From cc5ed29824ed4008de841656aabb892e085c054e Mon Sep 17 00:00:00 2001 From: Shad Storhaug Date: Thu, 13 Aug 2020 04:16:44 +0700 Subject: [PATCH 01/13] Lucene.Net.Diagnostics: Moved AssertionException from test framework to Lucene.Net assembly --- .../Support/Diagnostics/AssertionException.cs | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/{Lucene.Net.TestFramework => Lucene.Net}/Support/Diagnostics/AssertionException.cs (100%) diff --git a/src/Lucene.Net.TestFramework/Support/Diagnostics/AssertionException.cs b/src/Lucene.Net/Support/Diagnostics/AssertionException.cs similarity index 100% rename from src/Lucene.Net.TestFramework/Support/Diagnostics/AssertionException.cs rename to src/Lucene.Net/Support/Diagnostics/AssertionException.cs From b27412d3897abb46746f9bab963ad1fcc8f422c5 Mon Sep 17 00:00:00 2001 From: Shad Storhaug Date: Thu, 13 Aug 2020 17:00:10 +0700 Subject: [PATCH 02/13] Lucene.Net.Diagnostics: Added static Debugging.Assert() overloads to allow assertions to be turned on and off in the Release build --- .../Support/Diagnostics/Debugging.cs | 57 ++++++++++++++++++- 1 file changed, 54 insertions(+), 3 deletions(-) diff --git a/src/Lucene.Net/Support/Diagnostics/Debugging.cs b/src/Lucene.Net/Support/Diagnostics/Debugging.cs index ecde1664a3..e4ce4672fc 100644 --- a/src/Lucene.Net/Support/Diagnostics/Debugging.cs +++ b/src/Lucene.Net/Support/Diagnostics/Debugging.cs @@ -1,4 +1,6 @@ using Lucene.Net.Util; +using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Diagnostics { @@ -19,9 +21,9 @@ namespace Lucene.Net.Diagnostics * limitations under the License. */ - // LUCENENET: This can only be named Debug if we merge it with the Debug - // class from Lucene.Net.TestFramework because it is in the same namespace. - // But that class is dependent upon AssertionException, which is only for testing. + /// + /// Provides a set of methods that help debug your code. + /// internal static class Debugging { /// @@ -33,5 +35,54 @@ internal static class Debugging /// , and . /// public static bool AssertsEnabled { get; set; } = SystemProperties.GetPropertyAsBoolean("assert", false); + + ///// + ///// Checks for a condition; if the condition is false, throws an . + ///// + ///// The conditional expression to evaluate. If the condition is true, no exception is thrown. + //[MethodImpl(MethodImplOptions.AggressiveInlining)] + //public static void Assert(bool condition) + //{ + // if (AssertsEnabled && !condition) + // throw new AssertionException(); + //} + + ///// + ///// Checks for a condition; if the is false, throws an with the specified . + ///// + ///// The conditional expression to evaluate. If the condition is true, no exception is thrown. + ///// A delegate to build the message to use. + //[MethodImpl(MethodImplOptions.AggressiveInlining)] + //public static void Assert(bool condition, Func messageFactory) + //{ + // if (AssertsEnabled && !condition) + // throw new AssertionException(messageFactory()); + //} + + /// + /// Checks for a condition; if the condition is false, throws an . + /// + /// A delegate that returns the conditional expression to evaluate. If the condition is true, no exception is thrown. + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void Assert(Func conditionFactory) + { + if (AssertsEnabled && !conditionFactory()) + throw new AssertionException(); + } + + /// + /// Checks for a condition if asserts are enabled; if the + /// returns false, throws an with the message returned + /// from the specified . + /// + /// A delegate that returns the conditional expression to evaluate. If the condition returned from the factory is true, no exception is thrown. + /// A delegate to build the message to use. + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void Assert(Func conditionFactory, Func messageFactory) + { + if (AssertsEnabled && !conditionFactory()) + throw new AssertionException(messageFactory()); + } } } From f6b935defe03db1b59b56c57c6d8888ba887d7b6 Mon Sep 17 00:00:00 2001 From: Shad Storhaug Date: Thu, 13 Aug 2020 16:58:23 +0700 Subject: [PATCH 03/13] Converted all non-test projects to use Lucene.Net.Diagnostics.Debugging.Assert() instead of System.Diagnostics.Debug.Assert() --- .../Analysis/CharFilter/BaseCharFilter.cs | 5 +- .../CharFilter/HTMLStripCharFilter.cs | 23 +-- .../Analysis/CharFilter/MappingCharFilter.cs | 3 +- .../Analysis/CharFilter/NormalizeCharMap.cs | 6 +- .../Compound/CompoundWordTokenFilterBase.cs | 4 +- .../Analysis/Gl/GalicianStemmer.cs | 6 +- .../Analysis/Hunspell/Dictionary.cs | 10 +- .../Analysis/Hunspell/Stemmer.cs | 6 +- .../Miscellaneous/ASCIIFoldingFilter.cs | 6 +- .../Miscellaneous/SingleTokenTokenStream.cs | 5 +- .../Analysis/NGram/NGramTokenizer.cs | 5 +- .../Pattern/PatternCaptureGroupTokenFilter.cs | 3 +- .../Analysis/Pt/PortugueseStemmer.cs | 4 +- .../Analysis/Pt/RSLPStemmerBase.cs | 2 +- .../Analysis/Synonym/SynonymFilter.cs | 20 +-- .../Analysis/Synonym/SynonymMap.cs | 7 +- .../Analysis/Util/CharArrayMap.cs | 6 +- .../Analysis/Util/CharTokenizer.cs | 8 +- .../Analysis/Util/CharacterUtils.cs | 21 +-- .../Analysis/Util/RollingCharBuffer.cs | 23 +-- .../Analysis/Util/SegmentingTokenizerBase.cs | 3 +- .../Analysis/Util/StemmerUtil.cs | 7 +- .../Analysis/Icu/ICUNormalizer2CharFilter.cs | 3 +- .../Analysis/Icu/Segmentation/ICUTokenizer.cs | 3 +- .../Icu/Segmentation/ICUTokenizerFactory.cs | 5 +- .../Dict/TokenInfoFST.cs | 5 +- .../GraphvizFormatter.cs | 4 +- .../JapaneseIterationMarkCharFilter.cs | 2 +- .../JapaneseTokenizer.cs | 36 ++-- .../Tools/BinaryDictionaryWriter.cs | 25 +-- .../Tools/ConnectionCostsBuilder.cs | 7 +- .../Tools/ConnectionCostsWriter.cs | 5 +- .../BeiderMorseFilter.cs | 4 +- .../ByTask/Utils/AnalyzerFactory.cs | 4 +- .../Quality/QualityStats.cs | 3 +- .../Quality/Trec/TrecJudge.cs | 3 +- .../BlockTerms/BlockTermsReader.cs | 33 ++-- .../BlockTerms/BlockTermsWriter.cs | 14 +- .../BlockTerms/FixedGapTermsIndexReader.cs | 25 +-- .../BlockTerms/FixedGapTermsIndexWriter.cs | 3 +- .../BlockTerms/VariableGapTermsIndexReader.cs | 4 +- .../BlockTerms/VariableGapTermsIndexWriter.cs | 4 +- .../Bloom/BloomFilteringPostingsFormat.cs | 3 +- src/Lucene.Net.Codecs/Bloom/FuzzySet.cs | 3 +- .../IntBlock/FixedIntBlockIndexInput.cs | 5 +- .../IntBlock/FixedIntBlockIndexOutput.cs | 4 +- .../IntBlock/VariableIntBlockIndexInput.cs | 3 +- .../IntBlock/VariableIntBlockIndexOutput.cs | 9 +- .../Memory/DirectDocValuesConsumer.cs | 5 +- .../Memory/DirectDocValuesProducer.cs | 3 +- .../Memory/DirectPostingsFormat.cs | 57 +++--- .../Memory/FSTOrdTermsReader.cs | 13 +- .../Memory/FSTTermOutputs.cs | 9 +- .../Memory/FSTTermsReader.cs | 11 +- .../Memory/MemoryDocValuesConsumer.cs | 5 +- .../Memory/MemoryDocValuesProducer.cs | 3 +- .../Memory/MemoryPostingsFormat.cs | 27 +-- .../Pulsing/PulsingPostingsFormat.cs | 3 +- .../Pulsing/PulsingPostingsReader.cs | 11 +- .../Pulsing/PulsingPostingsWriter.cs | 21 +-- .../Sep/SepPostingsReader.cs | 13 +- .../Sep/SepPostingsWriter.cs | 11 +- .../Sep/SepSkipListReader.cs | 5 +- .../Sep/SepSkipListWriter.cs | 5 +- .../SimpleText/SimpleTextDocValuesReader.cs | 47 ++--- .../SimpleText/SimpleTextDocValuesWriter.cs | 41 ++--- .../SimpleText/SimpleTextFieldInfosReader.cs | 31 ++-- .../SimpleText/SimpleTextFieldInfosWriter.cs | 5 +- .../SimpleText/SimpleTextFieldsReader.cs | 15 +- .../SimpleText/SimpleTextFieldsWriter.cs | 11 +- .../SimpleText/SimpleTextLiveDocsFormat.cs | 9 +- .../SimpleText/SimpleTextSegmentInfoReader.cs | 19 +- .../SimpleTextStoredFieldsReader.cs | 17 +- .../SimpleText/SimpleTextTermVectorsReader.cs | 37 ++-- .../SimpleText/SimpleTextTermVectorsWriter.cs | 7 +- .../ExpressionComparator.cs | 5 +- .../ScoreFunctionValues.cs | 3 +- src/Lucene.Net.Facet/DrillDownQuery.cs | 5 +- src/Lucene.Net.Facet/DrillSideways.cs | 3 +- src/Lucene.Net.Facet/DrillSidewaysScorer.cs | 8 +- src/Lucene.Net.Facet/FacetsConfig.cs | 5 +- .../Range/LongRangeCounter.cs | 13 +- src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs | 5 +- .../Directory/DirectoryTaxonomyWriter.cs | 7 +- .../Taxonomy/Directory/TaxonomyIndexArrays.cs | 5 +- src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs | 3 +- .../Taxonomy/FloatTaxonomyFacets.cs | 5 +- .../Taxonomy/TaxonomyReader.cs | 3 +- .../AbstractFirstPassGroupingCollector.cs | 13 +- .../BlockGroupingCollector.cs | 11 +- src/Lucene.Net.Grouping/SearchGroup.cs | 17 +- .../Term/TermGroupFacetCollector.cs | 7 +- .../MultiTermHighlighting.cs | 5 +- .../PostingsHighlight/Passage.cs | 5 +- .../PostingsHighlight/PostingsHighlighter.cs | 17 +- .../VectorHighlight/BaseFragListBuilder.cs | 5 +- .../VectorHighlight/FieldTermStack.cs | 5 +- src/Lucene.Net.Join/ToChildBlockJoinQuery.cs | 13 +- .../ToParentBlockJoinCollector.cs | 7 +- src/Lucene.Net.Join/ToParentBlockJoinQuery.cs | 7 +- .../MemoryIndex.MemoryIndexReader.cs | 11 +- src/Lucene.Net.Memory/MemoryIndex.cs | 15 +- src/Lucene.Net.Misc/Document/LazyDocument.cs | 9 +- .../Index/MultiPassIndexSplitter.cs | 3 +- src/Lucene.Net.Misc/Index/PKIndexSplitter.cs | 5 +- src/Lucene.Net.Misc/Index/Sorter/Sorter.cs | 7 +- .../Index/Sorter/SortingAtomicReader.cs | 5 +- .../Index/Sorter/SortingMergePolicy.cs | 4 +- src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs | 7 +- .../Util/Fst/UpToTwoPositiveIntOutputs.cs | 39 ++-- src/Lucene.Net.Queries/BooleanFilter.cs | 5 +- src/Lucene.Net.Queries/CommonTermsQuery.cs | 3 +- .../Processors/AnalyzerQueryNodeProcessor.cs | 11 +- .../Simple/SimpleQueryParser.cs | 5 +- .../IndexAndTaxonomyRevision.cs | 3 +- src/Lucene.Net.Replicator/IndexRevision.cs | 3 +- .../ReplicationClient.cs | 5 +- .../Queries/SortedSetSortField.cs | 5 +- .../Prefix/AbstractPrefixTreeFilter.cs | 3 +- .../AbstractVisitingPrefixTreeFilter.cs | 19 +- .../Prefix/ContainsPrefixTreeFilter.cs | 13 +- src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs | 5 +- .../Prefix/Tree/QuadPrefixTree.cs | 5 +- .../Prefix/Tree/SpatialPrefixTree.cs | 5 +- .../Prefix/WithinPrefixTreeFilter.cs | 5 +- .../Vector/DistanceValueSource.cs | 5 +- .../Suggest/Analyzing/AnalyzingSuggester.cs | 21 +-- .../Analyzing/BlendedInfixSuggester.cs | 3 +- .../Suggest/Analyzing/FSTUtil.cs | 11 +- .../Suggest/Analyzing/FreeTextSuggester.cs | 15 +- .../Suggest/Analyzing/SuggestStopFilter.cs | 3 +- .../Suggest/Fst/FSTCompletion.cs | 5 +- .../Suggest/Fst/WFSTCompletionLookup.cs | 11 +- .../Suggest/UnsortedInputIterator.cs | 9 +- src/Lucene.Net/Analysis/NumericTokenStream.cs | 3 +- .../Analysis/TokenStreamToAutomaton.cs | 5 +- src/Lucene.Net/Codecs/BlockTermState.cs | 4 +- src/Lucene.Net/Codecs/BlockTreeTermsReader.cs | 170 +++++++++--------- src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs | 71 ++++---- src/Lucene.Net/Codecs/CodecUtil.cs | 3 +- .../CompressingStoredFieldsIndexWriter.cs | 9 +- .../CompressingStoredFieldsReader.cs | 19 +- .../CompressingStoredFieldsWriter.cs | 17 +- .../CompressingTermVectorsReader.cs | 21 +-- .../CompressingTermVectorsWriter.cs | 39 ++-- .../Codecs/Compressing/CompressionMode.cs | 7 +- src/Lucene.Net/Codecs/Compressing/LZ4.cs | 19 +- src/Lucene.Net/Codecs/DocValuesConsumer.cs | 5 +- src/Lucene.Net/Codecs/FieldsConsumer.cs | 3 +- .../Codecs/Lucene3x/Lucene3xFields.cs | 41 ++--- .../Codecs/Lucene3x/Lucene3xNormsProducer.cs | 7 +- .../Lucene3x/Lucene3xSegmentInfoReader.cs | 7 +- .../Lucene3x/Lucene3xStoredFieldsReader.cs | 5 +- .../Lucene3x/Lucene3xTermVectorsReader.cs | 15 +- .../Codecs/Lucene3x/SegmentTermDocs.cs | 3 +- .../Codecs/Lucene3x/SegmentTermEnum.cs | 5 +- .../Codecs/Lucene3x/SegmentTermPositions.cs | 3 +- src/Lucene.Net/Codecs/Lucene3x/TermBuffer.cs | 9 +- .../Codecs/Lucene3x/TermInfosReader.cs | 11 +- src/Lucene.Net/Codecs/Lucene40/BitVector.cs | 28 +-- .../Codecs/Lucene40/Lucene40LiveDocsFormat.cs | 11 +- .../Codecs/Lucene40/Lucene40PostingsFormat.cs | 3 +- .../Codecs/Lucene40/Lucene40PostingsReader.cs | 37 ++-- .../Lucene40/Lucene40StoredFieldsReader.cs | 9 +- .../Lucene40/Lucene40StoredFieldsWriter.cs | 11 +- .../Lucene40/Lucene40TermVectorsReader.cs | 25 +-- .../Lucene40/Lucene40TermVectorsWriter.cs | 19 +- src/Lucene.Net/Codecs/Lucene41/ForUtil.cs | 23 +-- .../Codecs/Lucene41/Lucene41PostingsFormat.cs | 6 +- .../Codecs/Lucene41/Lucene41PostingsReader.cs | 23 +-- .../Codecs/Lucene41/Lucene41PostingsWriter.cs | 13 +- .../Codecs/Lucene41/Lucene41SkipReader.cs | 3 +- .../Codecs/Lucene42/Lucene42NormsConsumer.cs | 5 +- .../Lucene45/Lucene45DocValuesConsumer.cs | 5 +- .../Lucene46/Lucene46FieldInfosWriter.cs | 5 +- src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs | 3 +- .../Codecs/MultiLevelSkipListReader.cs | 3 +- .../Codecs/MultiLevelSkipListWriter.cs | 3 +- .../PerField/PerFieldDocValuesFormat.cs | 11 +- .../Codecs/PerField/PerFieldPostingsFormat.cs | 9 +- src/Lucene.Net/Codecs/PostingsConsumer.cs | 3 +- src/Lucene.Net/Codecs/TermVectorsWriter.cs | 17 +- src/Lucene.Net/Codecs/TermsConsumer.cs | 11 +- src/Lucene.Net/Index/AtomicReader.cs | 9 +- src/Lucene.Net/Index/AtomicReaderContext.cs | 3 +- src/Lucene.Net/Index/AutomatonTermsEnum.cs | 9 +- src/Lucene.Net/Index/BitsSlice.cs | 5 +- src/Lucene.Net/Index/BufferedUpdatesStream.cs | 49 ++--- src/Lucene.Net/Index/ByteSliceReader.cs | 17 +- src/Lucene.Net/Index/ByteSliceWriter.cs | 13 +- src/Lucene.Net/Index/CheckIndex.cs | 31 ++-- src/Lucene.Net/Index/CompositeReader.cs | 5 +- .../Index/CompositeReaderContext.cs | 5 +- .../Index/ConcurrentMergeScheduler.cs | 2 +- src/Lucene.Net/Index/DirectoryReader.cs | 7 +- src/Lucene.Net/Index/DocFieldProcessor.cs | 9 +- src/Lucene.Net/Index/DocTermOrds.cs | 21 +-- src/Lucene.Net/Index/DocValuesFieldUpdates.cs | 5 +- src/Lucene.Net/Index/DocValuesProcessor.cs | 5 +- src/Lucene.Net/Index/DocumentsWriter.cs | 39 ++-- .../Index/DocumentsWriterDeleteQueue.cs | 7 +- .../Index/DocumentsWriterFlushControl.cs | 73 ++++---- .../Index/DocumentsWriterFlushQueue.cs | 35 ++-- .../Index/DocumentsWriterPerThread.cs | 33 ++-- .../Index/DocumentsWriterPerThreadPool.cs | 25 +-- .../Index/DocumentsWriterStallControl.cs | 9 +- src/Lucene.Net/Index/FieldInfo.cs | 29 +-- src/Lucene.Net/Index/FieldInfos.cs | 11 +- src/Lucene.Net/Index/FilteredTermsEnum.cs | 7 +- src/Lucene.Net/Index/FlushPolicy.cs | 9 +- src/Lucene.Net/Index/FreqProxTermsWriter.cs | 4 +- .../Index/FreqProxTermsWriterPerField.cs | 51 +++--- src/Lucene.Net/Index/FrozenBufferedUpdates.cs | 8 +- src/Lucene.Net/Index/IndexFileDeleter.cs | 45 +++-- src/Lucene.Net/Index/IndexFileNames.cs | 6 +- .../Index/IndexFormatTooNewException.cs | 4 +- .../Index/IndexFormatTooOldException.cs | 6 +- src/Lucene.Net/Index/IndexWriter.cs | 145 ++++++++------- src/Lucene.Net/Index/LogMergePolicy.cs | 10 +- src/Lucene.Net/Index/MergePolicy.cs | 10 +- src/Lucene.Net/Index/MergeState.cs | 8 +- src/Lucene.Net/Index/MultiBits.cs | 14 +- src/Lucene.Net/Index/MultiDocValues.cs | 14 +- .../Index/MultiDocsAndPositionsEnum.cs | 6 +- src/Lucene.Net/Index/MultiDocsEnum.cs | 4 +- src/Lucene.Net/Index/MultiFields.cs | 14 +- src/Lucene.Net/Index/MultiTerms.cs | 4 +- src/Lucene.Net/Index/MultiTermsEnum.cs | 24 +-- src/Lucene.Net/Index/NormsConsumer.cs | 8 +- .../Index/NumericDocValuesFieldUpdates.cs | 4 +- src/Lucene.Net/Index/OrdTermState.cs | 4 +- .../Index/ParallelCompositeReader.cs | 4 +- src/Lucene.Net/Index/PrefixCodedTerms.cs | 4 +- src/Lucene.Net/Index/ReadersAndUpdates.cs | 50 +++--- src/Lucene.Net/Index/SegmentCoreReaders.cs | 8 +- src/Lucene.Net/Index/SegmentDocValues.cs | 6 +- src/Lucene.Net/Index/SegmentInfo.cs | 6 +- src/Lucene.Net/Index/SegmentInfos.cs | 14 +- src/Lucene.Net/Index/SegmentMerger.cs | 8 +- src/Lucene.Net/Index/SegmentReader.cs | 14 +- .../Index/SimpleMergedSegmentWarmer.cs | 4 +- .../Index/SingletonSortedSetDocValues.cs | 4 +- .../Index/SlowCompositeReaderWrapper.cs | 6 +- .../Index/SnapshotDeletionPolicy.cs | 4 +- .../Index/SortedDocValuesTermsEnum.cs | 8 +- src/Lucene.Net/Index/SortedDocValuesWriter.cs | 4 +- .../Index/SortedSetDocValuesTermsEnum.cs | 8 +- .../Index/SortedSetDocValuesWriter.cs | 6 +- .../Index/StandardDirectoryReader.cs | 8 +- src/Lucene.Net/Index/TermContext.cs | 14 +- src/Lucene.Net/Index/TermVectorsConsumer.cs | 13 +- .../Index/TermVectorsConsumerPerField.cs | 13 +- src/Lucene.Net/Index/TermsHashPerField.cs | 8 +- ...ThreadAffinityDocumentsWriterThreadPool.cs | 7 +- src/Lucene.Net/Lucene.Net.csproj | 1 + src/Lucene.Net/Search/CachingWrapperFilter.cs | 4 +- src/Lucene.Net/Search/CollectionStatistics.cs | 10 +- .../Search/ConstantScoreAutoRewrite.cs | 8 +- src/Lucene.Net/Search/ConstantScoreQuery.cs | 14 +- src/Lucene.Net/Search/DisjunctionScorer.cs | 6 +- src/Lucene.Net/Search/DocIdSetIterator.cs | 10 +- .../Search/DocTermOrdsRangeFilter.cs | 6 +- .../Search/DocTermOrdsRewriteMethod.cs | 6 +- src/Lucene.Net/Search/ExactPhraseScorer.cs | 4 +- src/Lucene.Net/Search/FieldCacheImpl.cs | 14 +- .../Search/FieldCacheRangeFilter.cs | 6 +- .../Search/FieldCacheRewriteMethod.cs | 6 +- src/Lucene.Net/Search/FieldComparator.cs | 14 +- src/Lucene.Net/Search/FieldValueHitQueue.cs | 10 +- src/Lucene.Net/Search/FilteredQuery.cs | 10 +- src/Lucene.Net/Search/FuzzyTermsEnum.cs | 6 +- src/Lucene.Net/Search/IndexSearcher.cs | 8 +- .../Search/MinShouldMatchSumScorer.cs | 6 +- src/Lucene.Net/Search/MultiPhraseQuery.cs | 12 +- .../Search/MultiTermQueryWrapperFilter.cs | 6 +- src/Lucene.Net/Search/NumericRangeQuery.cs | 16 +- src/Lucene.Net/Search/PhraseQuery.cs | 8 +- src/Lucene.Net/Search/QueryRescorer.cs | 4 +- src/Lucene.Net/Search/ReferenceManager.cs | 8 +- src/Lucene.Net/Search/ReqOptSumScorer.cs | 6 +- src/Lucene.Net/Search/ScoringRewrite.cs | 12 +- src/Lucene.Net/Search/SearcherManager.cs | 6 +- .../Search/Similarities/SimilarityBase.cs | 4 +- src/Lucene.Net/Search/SloppyPhraseScorer.cs | 6 +- src/Lucene.Net/Search/SortField.cs | 4 +- src/Lucene.Net/Search/SortRescorer.cs | 4 +- .../Search/Spans/NearSpansOrdered.cs | 8 +- src/Lucene.Net/Search/Spans/SpanFirstQuery.cs | 4 +- .../Search/Spans/SpanPositionRangeQuery.cs | 4 +- src/Lucene.Net/Search/Spans/TermSpans.cs | 4 +- .../Search/TermCollectingRewrite.cs | 4 +- src/Lucene.Net/Search/TermQuery.cs | 14 +- src/Lucene.Net/Search/TermScorer.cs | 4 +- src/Lucene.Net/Search/TermStatistics.cs | 6 +- src/Lucene.Net/Search/TopDocs.cs | 12 +- src/Lucene.Net/Search/TopScoreDocCollector.cs | 14 +- src/Lucene.Net/Search/TopTermsRewrite.cs | 20 +-- src/Lucene.Net/Store/BaseDirectory.cs | 5 +- src/Lucene.Net/Store/BufferedIndexInput.cs | 4 +- src/Lucene.Net/Store/ByteArrayDataOutput.cs | 7 +- src/Lucene.Net/Store/ByteBufferIndexInput.cs | 10 +- src/Lucene.Net/Store/CompoundFileDirectory.cs | 10 +- src/Lucene.Net/Store/CompoundFileWriter.cs | 20 +-- src/Lucene.Net/Store/DataInput.cs | 4 +- src/Lucene.Net/Store/DataOutput.cs | 6 +- src/Lucene.Net/Store/IOContext.cs | 8 +- src/Lucene.Net/Store/IndexInput.cs | 2 +- src/Lucene.Net/Store/IndexOutput.cs | 2 +- src/Lucene.Net/Store/MMapDirectory.cs | 5 +- src/Lucene.Net/Store/NIOFSDirectory.cs | 6 +- src/Lucene.Net/Store/RAMOutputStream.cs | 4 +- .../Store/RateLimitedDirectoryWrapper.cs | 2 +- src/Lucene.Net/Store/SimpleFSDirectory.cs | 4 +- src/Lucene.Net/Support/BitArrayExtensions.cs | 2 +- src/Lucene.Net/Support/Collections.cs | 4 +- src/Lucene.Net/Util/ArrayUtil.cs | 43 +++-- src/Lucene.Net/Util/AttributeSource.cs | 5 +- src/Lucene.Net/Util/Automaton/Automaton.cs | 6 +- .../Util/Automaton/BasicOperations.cs | 10 +- .../Util/Automaton/CompiledAutomaton.cs | 10 +- .../Automaton/DaciukMihovAutomatonBuilder.cs | 20 +-- .../Automaton/Lev1ParametricDescription.cs | 6 +- .../Automaton/Lev1TParametricDescription.cs | 6 +- .../Automaton/Lev2ParametricDescription.cs | 6 +- .../Automaton/Lev2TParametricDescription.cs | 6 +- .../Util/Automaton/LevenshteinAutomata.cs | 4 +- src/Lucene.Net/Util/Automaton/SortedIntSet.cs | 4 +- src/Lucene.Net/Util/Automaton/State.cs | 4 +- src/Lucene.Net/Util/Automaton/Transition.cs | 9 +- src/Lucene.Net/Util/Automaton/UTF32ToUTF8.cs | 6 +- src/Lucene.Net/Util/BroadWord.cs | 6 +- src/Lucene.Net/Util/ByteBlockPool.cs | 4 +- src/Lucene.Net/Util/BytesRef.cs | 14 +- src/Lucene.Net/Util/BytesRefArray.cs | 4 +- src/Lucene.Net/Util/BytesRefHash.cs | 40 ++--- src/Lucene.Net/Util/CharsRef.cs | 6 +- src/Lucene.Net/Util/FilterIterator.cs | 4 +- src/Lucene.Net/Util/FixedBitSet.cs | 32 ++-- src/Lucene.Net/Util/Fst/Builder.cs | 46 ++--- .../Util/Fst/ByteSequenceOutputs.cs | 24 +-- src/Lucene.Net/Util/Fst/BytesRefFSTEnum.cs | 4 +- src/Lucene.Net/Util/Fst/BytesStore.cs | 20 +-- .../Util/Fst/CharSequenceOutputs.cs | 24 +-- src/Lucene.Net/Util/Fst/FST.cs | 80 ++++----- src/Lucene.Net/Util/Fst/FSTEnum.cs | 22 +-- src/Lucene.Net/Util/Fst/IntSequenceOutputs.cs | 24 +-- src/Lucene.Net/Util/Fst/IntsRefFSTEnum.cs | 4 +- src/Lucene.Net/Util/Fst/NoOutputs.cs | 18 +- src/Lucene.Net/Util/Fst/NodeHash.cs | 5 +- src/Lucene.Net/Util/Fst/PairOutputs.cs | 20 +-- src/Lucene.Net/Util/Fst/PositiveIntOutputs.cs | 26 +-- src/Lucene.Net/Util/Fst/Util.cs | 14 +- .../Util/IndexableBinaryStringTools.cs | 6 +- src/Lucene.Net/Util/InfoStream.cs | 4 +- src/Lucene.Net/Util/IntBlockPool.cs | 16 +- src/Lucene.Net/Util/IntsRef.cs | 6 +- src/Lucene.Net/Util/LongBitSet.cs | 32 ++-- src/Lucene.Net/Util/LongsRef.cs | 6 +- src/Lucene.Net/Util/MergedIterator.cs | 4 +- src/Lucene.Net/Util/OfflineSorter.cs | 13 +- src/Lucene.Net/Util/OpenBitSet.cs | 36 ++-- src/Lucene.Net/Util/PForDeltaDocIdSet.cs | 30 ++-- .../Packed/AbstractAppendingLongBuffer.cs | 12 +- .../Util/Packed/AbstractBlockPackedWriter.cs | 4 +- .../Util/Packed/AbstractPagedMutable.cs | 8 +- .../Util/Packed/BlockPackedReader.cs | 4 +- .../Util/Packed/BlockPackedReaderIterator.cs | 14 +- .../Util/Packed/BlockPackedWriter.cs | 4 +- src/Lucene.Net/Util/Packed/BulkOperation.cs | 6 +- .../Util/Packed/BulkOperationPacked.cs | 18 +- src/Lucene.Net/Util/Packed/Direct16.cs | 16 +- src/Lucene.Net/Util/Packed/Direct32.cs | 16 +- src/Lucene.Net/Util/Packed/Direct64.cs | 14 +- src/Lucene.Net/Util/Packed/Direct8.cs | 16 +- .../Util/Packed/EliasFanoDecoder.cs | 30 ++-- .../Util/Packed/EliasFanoEncoder.cs | 6 +- src/Lucene.Net/Util/Packed/GrowableWriter.cs | 4 +- .../Packed/MonotonicAppendingLongBuffer.cs | 4 +- .../Util/Packed/MonotonicBlockPackedReader.cs | 4 +- .../Util/Packed/MonotonicBlockPackedWriter.cs | 6 +- .../Util/Packed/Packed16ThreeBlocks.cs | 14 +- src/Lucene.Net/Util/Packed/Packed64.cs | 46 ++--- .../Util/Packed/Packed64SingleBlock.cs | 42 ++--- .../Util/Packed/Packed8ThreeBlocks.cs | 14 +- src/Lucene.Net/Util/Packed/PackedDataInput.cs | 4 +- .../Util/Packed/PackedDataOutput.cs | 4 +- src/Lucene.Net/Util/Packed/PackedInts.cs | 64 +++---- .../Util/Packed/PackedReaderIterator.cs | 10 +- src/Lucene.Net/Util/Packed/PackedWriter.cs | 8 +- src/Lucene.Net/Util/Packed/PagedMutable.cs | 4 +- src/Lucene.Net/Util/PagedBytes.cs | 16 +- src/Lucene.Net/Util/QueryBuilder.cs | 14 +- src/Lucene.Net/Util/RamUsageEstimator.cs | 16 +- .../Util/RecyclingByteBlockAllocator.cs | 8 +- .../Util/RecyclingIntBlockAllocator.cs | 8 +- src/Lucene.Net/Util/RollingBuffer.cs | 8 +- src/Lucene.Net/Util/SentinelIntSet.cs | 6 +- src/Lucene.Net/Util/Sorter.cs | 4 +- src/Lucene.Net/Util/TimSorter.cs | 22 +-- src/Lucene.Net/Util/UnicodeUtil.cs | 6 +- src/Lucene.Net/Util/WAH8DocIdSet.cs | 56 +++--- 401 files changed, 2540 insertions(+), 2384 deletions(-) diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs index 755d4bed65..420822b480 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Support; +using Lucene.Net.Diagnostics; +using Lucene.Net.Support; using Lucene.Net.Util; using System.Diagnostics; using System.IO; @@ -113,7 +114,7 @@ protected virtual void AddOffCorrectMap(int off, int cumulativeDiff) } int offset = offsets[(size == 0) ? 0 : size - 1]; - Debug.Assert(size == 0 || off >= offset, "Offset #" + size + "(" + off + ") is less than the last recorded offset " + offset + "\n" + Arrays.ToString(offsets) + "\n" + Arrays.ToString(diffs)); + Debugging.Assert(() => size == 0 || off >= offset, () => "Offset #" + size + "(" + off + ") is less than the last recorded offset " + offset + "\n" + Arrays.ToString(offsets) + "\n" + Arrays.ToString(diffs)); if (size == 0 || off != offsets[size - 1]) { diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs index d30bad7243..eb28c1f57f 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs @@ -1,5 +1,6 @@ using J2N; using Lucene.Net.Analysis.Util; +using Lucene.Net.Diagnostics; using Lucene.Net.Util; using System; using System.Collections.Generic; @@ -30952,7 +30953,7 @@ internal void Restart() /// internal int NextChar() { - Debug.Assert(!IsRead, "Attempting to read past the end of a segment."); + Debugging.Assert(() => !IsRead, () => "Attempting to read past the end of a segment."); return m_buf[pos++]; } @@ -31377,7 +31378,7 @@ private int NextChar() } catch (Exception /*e*/) { - Debug.Assert(false, "Exception parsing code point '" + decimalCharRef + "'"); + Debugging.Assert(() => false, () => "Exception parsing code point '" + decimalCharRef + "'"); } if (codePoint <= 0x10FFFF) { @@ -31637,7 +31638,7 @@ string hexCharRef } catch (Exception /*e*/) { - Debug.Assert(false, "Exception parsing hex code point '" + hexCharRef + "'"); + Debugging.Assert(() => false, () => "Exception parsing hex code point '" + hexCharRef + "'"); } if (codePoint <= 0x10FFFF) { @@ -31900,7 +31901,7 @@ string hexCharRef } catch (Exception /*e*/) { // should never happen - Debug.Assert(false, "Exception parsing high surrogate '" + Debugging.Assert(() => false, () => "Exception parsing high surrogate '" + surrogatePair.Substring(2, 6 - 2) + "'"); } try @@ -31909,7 +31910,7 @@ string hexCharRef } catch (Exception /*e*/) { // should never happen - Debug.Assert(false, "Exception parsing low surrogate '" + surrogatePair.Substring(10, 14 - 10) + "'"); + Debugging.Assert(() => false, () => "Exception parsing low surrogate '" + surrogatePair.Substring(10, 14 - 10) + "'"); } // add (previously matched input length) + (this match length) - (substitution length) cumulativeDiff += inputSegment.Length + YyLength - 2; @@ -31931,7 +31932,7 @@ string hexCharRef } catch (Exception /*e*/) { // should never happen - Debug.Assert(false, "Exception parsing high surrogate '" + Debugging.Assert(() => false, () => "Exception parsing high surrogate '" + surrogatePair.Substring(2, 6 - 2) + "'"); } try @@ -31940,7 +31941,7 @@ string hexCharRef } catch (Exception /*e*/) { // should never happen - Debug.Assert(false, "Exception parsing low surrogate '" + Debugging.Assert(() => false, () => "Exception parsing low surrogate '" + surrogatePair.Substring(9, 14 - 9) + "'"); } if (char.IsLowSurrogate(lowSurrogate)) @@ -31972,7 +31973,7 @@ string hexCharRef } catch (Exception /*e*/) { // should never happen - Debug.Assert(false, "Exception parsing high surrogate '" + Debugging.Assert(() => false, () => "Exception parsing high surrogate '" + surrogatePair.Substring(1, 6 - 1) + "'"); } if (char.IsHighSurrogate(highSurrogate)) @@ -31985,7 +31986,7 @@ string hexCharRef } catch (Exception /*e*/) { // should never happen - Debug.Assert(false, "Exception parsing low surrogate '" + Debugging.Assert(() => false, () => "Exception parsing low surrogate '" + surrogatePair.Substring(10, 14 - 10) + "'"); } // add (previously matched input length) + (this match length) - (substitution length) @@ -32012,7 +32013,7 @@ string hexCharRef } catch (Exception /*e*/) { // should never happen - Debug.Assert(false, "Exception parsing high surrogate '" + Debugging.Assert(() => false, () => "Exception parsing high surrogate '" + surrogatePair.Substring(1, 6 - 1) + "'"); } if (char.IsHighSurrogate(highSurrogate)) @@ -32024,7 +32025,7 @@ string hexCharRef } catch (Exception /*e*/) { // should never happen - Debug.Assert(false, "Exception parsing low surrogate '" + Debugging.Assert(() => false, () => "Exception parsing low surrogate '" + surrogatePair.Substring(9, 14 - 9) + "'"); } if (char.IsLowSurrogate(lowSurrogate)) diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilter.cs index 6f3c83bcaa..42d03df016 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilter.cs @@ -1,4 +1,5 @@ using Lucene.Net.Analysis.Util; +using Lucene.Net.Diagnostics; using Lucene.Net.Util; using Lucene.Net.Util.Fst; using System; @@ -135,7 +136,7 @@ public override int Read() if (!FST.TargetHasArcs(arc)) { // Fast pass for single character match: - Debug.Assert(arc.IsFinal); + Debugging.Assert(() => arc.IsFinal); lastMatchLen = 1; lastMatch = arc.Output; } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/NormalizeCharMap.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/NormalizeCharMap.cs index 43347c9f66..3858445026 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/NormalizeCharMap.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/NormalizeCharMap.cs @@ -1,8 +1,8 @@ -using Lucene.Net.Util; +using Lucene.Net.Diagnostics; +using Lucene.Net.Util; using Lucene.Net.Util.Fst; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using JCG = J2N.Collections.Generic; @@ -54,7 +54,7 @@ private NormalizeCharMap(FST map) map.ReadFirstRealTargetArc(scratchArc.Target, scratchArc, fstReader); while (true) { - Debug.Assert(scratchArc.Label != FST.END_LABEL); + Debugging.Assert(() => scratchArc.Label != FST.END_LABEL); cachedRootArcs[Convert.ToChar((char)scratchArc.Label)] = (new FST.Arc()).CopyFrom(scratchArc); if (scratchArc.IsLast) { diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Compound/CompoundWordTokenFilterBase.cs b/src/Lucene.Net.Analysis.Common/Analysis/Compound/CompoundWordTokenFilterBase.cs index c6a2f3b2c9..518651ba14 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Compound/CompoundWordTokenFilterBase.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Compound/CompoundWordTokenFilterBase.cs @@ -1,10 +1,10 @@ using J2N.Text; using Lucene.Net.Analysis.TokenAttributes; using Lucene.Net.Analysis.Util; +using Lucene.Net.Diagnostics; using Lucene.Net.Util; using System; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Analysis.Compound { @@ -110,7 +110,7 @@ public override sealed bool IncrementToken() { if (m_tokens.Count > 0) { - Debug.Assert(current != null); + Debugging.Assert(() => current != null); CompoundToken token = m_tokens.Dequeue(); RestoreState(current); // keep all other attributes untouched m_termAtt.SetEmpty().Append(token.Text); diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianStemmer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianStemmer.cs index f2f5eebb19..7aa674ebd8 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianStemmer.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianStemmer.cs @@ -1,6 +1,6 @@ -using System.Diagnostics; +using Lucene.Net.Analysis.Pt; +using Lucene.Net.Diagnostics; using System.Collections.Generic; -using Lucene.Net.Analysis.Pt; namespace Lucene.Net.Analysis.Gl { @@ -47,7 +47,7 @@ static GalicianStemmer() /// new valid length, stemmed public virtual int Stem(char[] s, int len) { - Debug.Assert(s.Length >= len + 1, "this stemmer requires an oversized array of at least 1"); + Debugging.Assert(() => s.Length >= len + 1, () => "this stemmer requires an oversized array of at least 1"); len = plural.Apply(s, len); len = unification.Apply(s, len); diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs index 70bc9c5cc2..f770e91df6 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs @@ -1,15 +1,15 @@ -using J2N.Collections.Generic.Extensions; +using J2N; +using J2N.Collections.Generic.Extensions; using J2N.Text; +using Lucene.Net.Diagnostics; using Lucene.Net.Store; using Lucene.Net.Support; using Lucene.Net.Support.IO; using Lucene.Net.Util; using Lucene.Net.Util.Automaton; using Lucene.Net.Util.Fst; -using J2N; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; using System.IO; using System.Text; @@ -375,7 +375,7 @@ private void ReadAffixFile(Stream affixStream, Encoding decoder) strip.CopyTo(0, stripData, currentOffset, strip.Length - 0); currentOffset += strip.Length; } - Debug.Assert(currentIndex == seenStrips.Count); + Debugging.Assert(() => currentIndex == seenStrips.Count); stripOffsets[currentIndex] = currentOffset; } @@ -424,7 +424,7 @@ private void ParseAffix(JCG.SortedDictionary> affixes, stri for (int i = 0; i < numLines; i++) { - Debug.Assert(affixWriter.Position == currentAffix << 3); + Debugging.Assert(() => affixWriter.Position == currentAffix << 3); string line = reader.ReadLine(); string[] ruleArgs = whitespacePattern.Split(line).TrimEnd(); diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Stemmer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Stemmer.cs index 975eb95807..0b40b56818 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Stemmer.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Stemmer.cs @@ -1,10 +1,10 @@ using Lucene.Net.Analysis.Util; +using Lucene.Net.Diagnostics; using Lucene.Net.Store; using Lucene.Net.Util; using Lucene.Net.Util.Automaton; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Text; @@ -210,7 +210,7 @@ private IList Stem(char[] word, int length, int previous, int prevFlag // cross check incoming continuation class (flag of previous affix) against list. dictionary.flagLookup.Get(append, scratch); char[] appendFlags = Dictionary.DecodeFlags(scratch); - Debug.Assert(prevFlag >= 0); + Debugging.Assert(() => prevFlag >= 0); compatible = HasCrossCheckedFlag((char)prevFlag, appendFlags, false); } else @@ -279,7 +279,7 @@ private IList Stem(char[] word, int length, int previous, int prevFlag // cross check incoming continuation class (flag of previous affix) against list. dictionary.flagLookup.Get(append, scratch); char[] appendFlags = Dictionary.DecodeFlags(scratch); - Debug.Assert(prevFlag >= 0); + Debugging.Assert(() => prevFlag >= 0); compatible = HasCrossCheckedFlag((char)prevFlag, appendFlags, previousWasPrefix); } else diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ASCIIFoldingFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ASCIIFoldingFilter.cs index ad8e650058..d47ed475ab 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ASCIIFoldingFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ASCIIFoldingFilter.cs @@ -1,5 +1,5 @@ -using System.Diagnostics; -using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Util; namespace Lucene.Net.Analysis.Miscellaneous @@ -92,7 +92,7 @@ public override bool IncrementToken() { if (state != null) { - Debug.Assert(preserveOriginal, "state should only be captured if preserveOriginal is true"); + Debugging.Assert(() => preserveOriginal, () => "state should only be captured if preserveOriginal is true"); RestoreState(state); posIncAttr.PositionIncrement = 0; state = null; diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/SingleTokenTokenStream.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/SingleTokenTokenStream.cs index 92748b466d..4855e8dcc7 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/SingleTokenTokenStream.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/SingleTokenTokenStream.cs @@ -1,4 +1,5 @@ using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using System.Diagnostics; namespace Lucene.Net.Analysis.Miscellaneous @@ -34,11 +35,11 @@ public sealed class SingleTokenTokenStream : TokenStream public SingleTokenTokenStream(Token token) : base(Token.TOKEN_ATTRIBUTE_FACTORY) { - Debug.Assert(token != null); + Debugging.Assert(() => token != null); this.singleToken = (Token)token.Clone(); tokenAtt = AddAttribute(); - Debug.Assert(tokenAtt is Token); + Debugging.Assert(() => tokenAtt is Token); } public override sealed bool IncrementToken() diff --git a/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenizer.cs b/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenizer.cs index 7f79f5d333..132720008c 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenizer.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenizer.cs @@ -1,6 +1,7 @@ using J2N; using Lucene.Net.Analysis.TokenAttributes; using Lucene.Net.Analysis.Util; +using Lucene.Net.Diagnostics; using Lucene.Net.Util; using System; using System.Diagnostics; @@ -231,7 +232,7 @@ public override sealed bool IncrementToken() { if (bufferStart + 1 + minGram > bufferEnd) { - Debug.Assert(exhausted); + Debugging.Assert(() => exhausted); return false; } Consume(); @@ -294,7 +295,7 @@ protected virtual bool IsTokenChar(int chr) public override sealed void End() { base.End(); - Debug.Assert(bufferStart <= bufferEnd); + Debugging.Assert(() => bufferStart <= bufferEnd); int endOffset = offset; for (int i = bufferStart; i < bufferEnd; ++i) { diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternCaptureGroupTokenFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternCaptureGroupTokenFilter.cs index bd121580b5..8ca48d4c30 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternCaptureGroupTokenFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternCaptureGroupTokenFilter.cs @@ -1,4 +1,5 @@ using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Util; using System.Diagnostics; using System.Text.RegularExpressions; @@ -154,7 +155,7 @@ public override bool IncrementToken() { if (currentMatcher != -1 && NextCapture()) { - Debug.Assert(state != null); + Debugging.Assert(() => state != null); ClearAttributes(); RestoreState(state); int start = matchers[currentMatcher].Groups[currentGroup[currentMatcher]].Index; diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseStemmer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseStemmer.cs index 70b6ae09d6..cdd1eced4d 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseStemmer.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseStemmer.cs @@ -1,4 +1,4 @@ -using System.Diagnostics; +using Lucene.Net.Diagnostics; using System.Collections.Generic; namespace Lucene.Net.Analysis.Pt @@ -46,7 +46,7 @@ static PortugueseStemmer() /// new valid length, stemmed public virtual int Stem(char[] s, int len) { - Debug.Assert(s.Length >= len + 1, "this stemmer requires an oversized array of at least 1"); + Debugging.Assert(() => s.Length >= len + 1, () => "this stemmer requires an oversized array of at least 1"); len = plural.Apply(s, len); len = adverb.Apply(s, len); diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs index e47e0c1377..942e6d5aa6 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs @@ -304,7 +304,7 @@ private static Step ParseStep(TextReader r, string header) { throw new Exception("Illegal Step header specified at line " /*+ r.LineNumber*/); // TODO Line number } - //Debug.Assert(headerPattern.GetGroupNumbers().Length == 4); + //Debugging.Assert(headerPattern.GetGroupNumbers().Length == 4); string name = matcher.Groups[1].Value; int min = int.Parse(matcher.Groups[2].Value, CultureInfo.InvariantCulture); int type = int.Parse(matcher.Groups[3].Value, CultureInfo.InvariantCulture); diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilter.cs index 6b039eeef1..52adde9099 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilter.cs @@ -1,10 +1,10 @@ using J2N; using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Store; using Lucene.Net.Util; using Lucene.Net.Util.Fst; using System; -using System.Diagnostics; using System.Globalization; namespace Lucene.Net.Analysis.Synonym @@ -176,7 +176,7 @@ public virtual void Reset() public virtual CharsRef PullNext() { - Debug.Assert(upto < count); + Debugging.Assert(() => upto < count); lastEndOffset = endOffsets[upto]; lastPosLength = posLengths[upto]; CharsRef result = outputs[upto++]; @@ -306,7 +306,7 @@ private void Capture() nextWrite = RollIncr(nextWrite); // Buffer head should never catch up to tail: - Debug.Assert(nextWrite != nextRead); + Debugging.Assert(() => nextWrite != nextRead); } /* @@ -325,7 +325,7 @@ private void Parse() { //System.out.println("\nS: parse"); - Debug.Assert(inputSkipCount == 0); + Debugging.Assert(() => inputSkipCount == 0); int curNextRead = nextRead; @@ -337,7 +337,7 @@ private void Parse() BytesRef pendingOutput = fst.Outputs.NoOutput; fst.GetFirstArc(scratchArc); - Debug.Assert(scratchArc.Output == fst.Outputs.NoOutput); + Debugging.Assert(() => scratchArc.Output == fst.Outputs.NoOutput); int tokenCount = 0; @@ -364,7 +364,7 @@ private void Parse() else { //System.out.println(" input.incrToken"); - Debug.Assert(futureInputs[nextWrite].consumed); + Debugging.Assert(() => futureInputs[nextWrite].consumed); // Not correct: a syn match whose output is longer // than its input can set future inputs keepOrig // to true: @@ -480,7 +480,7 @@ private void Parse() } else { - Debug.Assert(finished); + Debugging.Assert(() => finished); } //System.out.println(" parse done inputSkipCount=" + inputSkipCount + " nextRead=" + nextRead + " nextWrite=" + nextWrite); @@ -510,7 +510,7 @@ private void AddOutput(BytesRef bytes, int matchInputLength, int matchEndOffset) int outputLen = chIDX - lastStart; // Caller is not allowed to have empty string in // the output: - Debug.Assert(outputLen > 0, "output contains empty string: " + scratchChars); + Debugging.Assert(() => outputLen > 0, () => "output contains empty string: " + scratchChars); int endOffset; int posLen; if (chIDX == chEnd && lastStart == scratchChars.Offset) @@ -536,7 +536,7 @@ private void AddOutput(BytesRef bytes, int matchInputLength, int matchEndOffset) lastStart = 1 + chIDX; //System.out.println(" slot=" + outputUpto + " keepOrig=" + keepOrig); outputUpto = RollIncr(outputUpto); - Debug.Assert(futureOutputs[outputUpto].posIncr == 1, "outputUpto=" + outputUpto + " vs nextWrite=" + nextWrite); + Debugging.Assert(() => futureOutputs[outputUpto].posIncr == 1, () => "outputUpto=" + outputUpto + " vs nextWrite=" + nextWrite); } } } @@ -602,7 +602,7 @@ public override bool IncrementToken() { // Pass-through case: return token we just pulled // but didn't capture: - Debug.Assert(inputSkipCount == 1, "inputSkipCount=" + inputSkipCount + " nextRead=" + nextRead); + Debugging.Assert(() => inputSkipCount == 1, () => "inputSkipCount=" + inputSkipCount + " nextRead=" + nextRead); } input.Reset(); if (outputs.count > 0) diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs index 1a1b68aa8d..99c743a35a 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs @@ -1,4 +1,5 @@ using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Store; using Lucene.Net.Util; using Lucene.Net.Util.Fst; @@ -172,8 +173,8 @@ internal virtual void Add(CharsRef input, int numInputWords, CharsRef output, in throw new ArgumentException("output.length must be > 0 (got " + output.Length + ")"); } - Debug.Assert(!HasHoles(input), "input has holes: " + input); - Debug.Assert(!HasHoles(output), "output has holes: " + output); + Debugging.Assert(() => !HasHoles(input), () => "input has holes: " + input); + Debugging.Assert(() => !HasHoles(output), () => "output has holes: " + output); //System.out.println("fmap.add input=" + input + " numInputWords=" + numInputWords + " output=" + output + " numOutputWords=" + numOutputWords); UnicodeUtil.UTF16toUTF8(output.Chars, output.Offset, output.Length, utf8Scratch); @@ -280,7 +281,7 @@ public virtual SynonymMap Build() scratch.Grow(estimatedSize); scratchOutput.Reset(scratch.Bytes, scratch.Offset, scratch.Bytes.Length); - Debug.Assert(scratch.Offset == 0); + Debugging.Assert(() => scratch.Offset == 0); // now write our output data: int count = 0; diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs index 7438ab9131..434d2c420a 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs @@ -1,12 +1,12 @@ using J2N; -using J2N.Text; using J2N.Globalization; +using J2N.Text; +using Lucene.Net.Diagnostics; using Lucene.Net.Util; using System; using System.Collections; using System.Collections.Generic; using System.ComponentModel; -using System.Diagnostics; using System.Globalization; using System.Text; @@ -632,7 +632,7 @@ public virtual void PutAll(IEnumerable> collection) private void Rehash() { - Debug.Assert(keys.Length == values.Length); + Debugging.Assert(() => keys.Length == values.Length); int newSize = 2 * keys.Length; char[][] oldkeys = keys; MapValue[] oldvalues = values; diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharTokenizer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharTokenizer.cs index cfb9ec6c40..9669dd5522 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharTokenizer.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharTokenizer.cs @@ -1,8 +1,8 @@ using J2N; -using System.Diagnostics; -using System.IO; using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Util; +using System.IO; namespace Lucene.Net.Analysis.Util { @@ -169,7 +169,7 @@ public override sealed bool IncrementToken() { if (length == 0) // start of token { - Debug.Assert(start == -1); + Debugging.Assert(() => start == -1); start = offset + bufferIndex - charCount; end = start; } // check if a supplementary could run out of bounds @@ -191,7 +191,7 @@ public override sealed bool IncrementToken() } termAtt.Length = length; - Debug.Assert(start != -1); + Debugging.Assert(() => start != -1); offsetAtt.SetOffset(CorrectOffset(start), finalOffset = CorrectOffset(end)); return true; } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharacterUtils.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharacterUtils.cs index aa22177792..9c2f5054b1 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharacterUtils.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharacterUtils.cs @@ -1,5 +1,6 @@ using J2N; using J2N.Text; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using Lucene.Net.Util; using System; @@ -175,8 +176,8 @@ public static CharacterBuffer NewCharacterBuffer(int bufferSize) /// the number of characters in the buffer to lower case public virtual void ToLower(char[] buffer, int offset, int length) // LUCENENET specific - marked virtual so we can override the default { - Debug.Assert(buffer.Length >= length); - Debug.Assert(offset <= 0 && offset <= buffer.Length); + Debugging.Assert(() => buffer.Length >= length); + Debugging.Assert(() => offset <= 0 && offset <= buffer.Length); // Slight optimization, eliminating a few method calls internally CultureInfo.InvariantCulture.TextInfo @@ -206,8 +207,8 @@ public static CharacterBuffer NewCharacterBuffer(int bufferSize) /// the number of characters in the buffer to lower case public virtual void ToUpper(char[] buffer, int offset, int length) // LUCENENET specific - marked virtual so we can override the default { - Debug.Assert(buffer.Length >= length); - Debug.Assert(offset <= 0 && offset <= buffer.Length); + Debugging.Assert(() => buffer.Length >= length); + Debugging.Assert(() => offset <= 0 && offset <= buffer.Length); // Slight optimization, eliminating a few method calls internally CultureInfo.InvariantCulture.TextInfo @@ -348,7 +349,7 @@ public override int CodePointAt(char[] chars, int offset, int limit) public override bool Fill(CharacterBuffer buffer, TextReader reader, int numChars) { - Debug.Assert(buffer.Buffer.Length >= 2); + Debugging.Assert(() => buffer.Buffer.Length >= 2); if (numChars < 2 || numChars > buffer.Buffer.Length) { throw new ArgumentException("numChars must be >= 2 and <= the buffer size"); @@ -469,7 +470,7 @@ public override int CodePointAt(char[] chars, int offset, int limit) public override bool Fill(CharacterBuffer buffer, TextReader reader, int numChars) { - Debug.Assert(buffer.Buffer.Length >= 1); + Debugging.Assert(() => buffer.Buffer.Length >= 1); if (numChars < 1 || numChars > buffer.Buffer.Length) { throw new ArgumentException("numChars must be >= 1 and <= the buffer size"); @@ -532,8 +533,8 @@ private class Java4CharacterUtilsBWCompatibility : Java4CharacterUtils { public override void ToLower(char[] buffer, int offset, int limit) { - Debug.Assert(buffer.Length >= limit); - Debug.Assert(offset <= 0 && offset <= buffer.Length); + Debugging.Assert(() => buffer.Length >= limit); + Debugging.Assert(() => offset <= 0 && offset <= buffer.Length); for (int i = offset; i < limit;) { @@ -545,8 +546,8 @@ public override void ToLower(char[] buffer, int offset, int limit) public override void ToUpper(char[] buffer, int offset, int limit) { - Debug.Assert(buffer.Length >= limit); - Debug.Assert(offset <= 0 && offset <= buffer.Length); + Debugging.Assert(() => buffer.Length >= limit); + Debugging.Assert(() => offset <= 0 && offset <= buffer.Length); for (int i = offset; i < limit;) { diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/RollingCharBuffer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/RollingCharBuffer.cs index 3cf6f12bc1..e4a708698d 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/RollingCharBuffer.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/RollingCharBuffer.cs @@ -1,7 +1,8 @@ -using System; +using Lucene.Net.Diagnostics; +using Lucene.Net.Util; +using System; using System.Diagnostics; using System.IO; -using Lucene.Net.Util; namespace Lucene.Net.Analysis.Util { @@ -107,10 +108,10 @@ public int Get(int pos) else { // Cannot read from future (except by 1): - Debug.Assert(pos < nextPos); + Debugging.Assert(() => pos < nextPos); // Cannot read from already freed past: - Debug.Assert(nextPos - pos <= count, "nextPos=" + nextPos + " pos=" + pos + " count=" + count); + Debugging.Assert(() => nextPos - pos <= count, () => "nextPos=" + nextPos + " pos=" + pos + " count=" + count); return buffer[GetIndex(pos)]; } @@ -129,15 +130,15 @@ private int GetIndex(int pos) { // Wrap: index += buffer.Length; - Debug.Assert(index >= 0); + Debugging.Assert(() => index >= 0); } return index; } public char[] Get(int posStart, int length) { - Debug.Assert(length > 0); - Debug.Assert(InBounds(posStart), "posStart=" + posStart + " length=" + length); + Debugging.Assert(() => length > 0); + Debugging.Assert(() => InBounds(posStart), () => "posStart=" + posStart + " length=" + length); //System.out.println(" buffer.Get posStart=" + posStart + " len=" + length); int startIndex = GetIndex(posStart); @@ -165,11 +166,11 @@ public char[] Get(int posStart, int length) /// public void FreeBefore(int pos) { - Debug.Assert(pos >= 0); - Debug.Assert(pos <= nextPos); + Debugging.Assert(() => pos >= 0); + Debugging.Assert(() => pos <= nextPos); int newCount = nextPos - pos; - Debug.Assert(newCount <= count, "newCount=" + newCount + " count=" + count); - Debug.Assert(newCount <= buffer.Length, "newCount=" + newCount + " buf.length=" + buffer.Length); + Debugging.Assert(() => newCount <= count, () => "newCount=" + newCount + " count=" + count); + Debugging.Assert(() => newCount <= buffer.Length, () => "newCount=" + newCount + " buf.length=" + buffer.Length); count = newCount; } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/SegmentingTokenizerBase.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/SegmentingTokenizerBase.cs index f4e848d6da..23cbfae930 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/SegmentingTokenizerBase.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/SegmentingTokenizerBase.cs @@ -1,6 +1,7 @@ #if FEATURE_BREAKITERATOR using ICU4N.Text; using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using System; using System.Diagnostics; using System.IO; @@ -183,7 +184,7 @@ private void Refill() /// commons-io's readFully, but without bugs if offset != 0 private static int Read(TextReader input, char[] buffer, int offset, int length) { - Debug.Assert(length >= 0, "length must not be negative: " + length); + Debugging.Assert(() => length >= 0, () => "length must not be negative: " + length); int remaining = length; while (remaining > 0) diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/StemmerUtil.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/StemmerUtil.cs index 9164e95360..9c8d61cddc 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/StemmerUtil.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/StemmerUtil.cs @@ -1,4 +1,5 @@ -using System; +using Lucene.Net.Diagnostics; +using System; using System.Diagnostics; namespace Lucene.Net.Analysis.Util @@ -116,7 +117,7 @@ public static bool EndsWith(char[] s, int len, char[] suffix) /// length of input buffer after deletion public static int Delete(char[] s, int pos, int len) { - Debug.Assert(pos < len); + Debugging.Assert(() => pos < len); if (pos < len - 1) // don't arraycopy if asked to delete last character { Array.Copy(s, pos + 1, s, pos, len - pos - 1); @@ -134,7 +135,7 @@ public static int Delete(char[] s, int pos, int len) /// length of input buffer after deletion public static int DeleteN(char[] s, int pos, int len, int nChars) { - Debug.Assert(pos + nChars <= len); + Debugging.Assert(() => pos + nChars <= len); if (pos + nChars < len) // don't arraycopy if asked to delete the last characters { Array.Copy(s, pos + nChars, s, pos, len - pos - nChars); diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilter.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilter.cs index e04dc5f5e7..981e073186 100644 --- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilter.cs +++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilter.cs @@ -9,6 +9,7 @@ using System.IO; using System.Text; using ExceptionToClassNameConventionAttribute = Lucene.Net.Support.ExceptionToClassNameConventionAttribute; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Analysis.Icu { @@ -127,7 +128,7 @@ private void ReadInputToBuffer() bool hasRemainingChars = CharacterUtils.GetInstance(LuceneVersion.LUCENE_CURRENT).Fill(tmpBuffer, m_input); #pragma warning restore 612, 618 - Debug.Assert(tmpBuffer.Offset == 0); + Debugging.Assert(() => tmpBuffer.Offset == 0); inputBuffer.Append(tmpBuffer.Buffer, 0, tmpBuffer.Length); if (hasRemainingChars == false) diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizer.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizer.cs index 2ae6ce6c34..213fa2f1ce 100644 --- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizer.cs +++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizer.cs @@ -3,6 +3,7 @@ using ICU4N.Text; using Lucene.Net.Analysis.Icu.TokenAttributes; using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Diagnostics; @@ -193,7 +194,7 @@ private void Refill() /// commons-io's readFully, but without bugs if offset != 0 private static int Read(TextReader input, char[] buffer, int offset, int length) { - Debug.Assert(length >= 0, "length must not be negative: " + length); + Debugging.Assert(() => length >= 0, () => "length must not be negative: " + length); int remaining = length; while (remaining > 0) diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs index ea933546b1..c4870896d1 100644 --- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs @@ -3,6 +3,7 @@ using ICU4N.Globalization; using ICU4N.Text; using Lucene.Net.Analysis.Util; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using Lucene.Net.Util; using System; @@ -101,7 +102,7 @@ public ICUTokenizerFactory(IDictionary args) public virtual void Inform(IResourceLoader loader) { - Debug.Assert(tailored != null, "init must be called first!"); + Debugging.Assert(() => tailored != null, () => "init must be called first!"); if (tailored.Count == 0) { config = new DefaultICUTokenizerConfig(cjkAsWords, myanmarAsWords); @@ -161,7 +162,7 @@ private BreakIterator ParseRules(string filename, IResourceLoader loader) public override Tokenizer Create(AttributeSource.AttributeFactory factory, TextReader input) { - Debug.Assert(config != null, "inform must be called first!"); + Debugging.Assert(() => config != null, () => "inform must be called first!"); return new ICUTokenizer(factory, input, config); } } diff --git a/src/Lucene.Net.Analysis.Kuromoji/Dict/TokenInfoFST.cs b/src/Lucene.Net.Analysis.Kuromoji/Dict/TokenInfoFST.cs index 0ae27801ba..04ca42272e 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Dict/TokenInfoFST.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Dict/TokenInfoFST.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Util.Fst; +using Lucene.Net.Diagnostics; +using Lucene.Net.Util.Fst; using System.Diagnostics; namespace Lucene.Net.Analysis.Ja.Dict @@ -74,7 +75,7 @@ public TokenInfoFST(FST fst, bool fasterButMoreRam) { if (useCache && ch >= 0x3040 && ch <= cacheCeiling) { - Debug.Assert(ch != FST.END_LABEL); + Debugging.Assert(() => ch != FST.END_LABEL); FST.Arc result = rootCache[ch - 0x3040]; if (result == null) { diff --git a/src/Lucene.Net.Analysis.Kuromoji/GraphvizFormatter.cs b/src/Lucene.Net.Analysis.Kuromoji/GraphvizFormatter.cs index bcbc81ded4..a1e1fa984a 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/GraphvizFormatter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/GraphvizFormatter.cs @@ -89,8 +89,8 @@ private void SetBestPathMap(WrappedPositionArray positions, int startPos, Positi string toNodeID = GetNodeID(pos, bestIDX); string fromNodeID = GetNodeID(backPos, backIDX); - Debug.Assert(!bestPathMap.ContainsKey(fromNodeID)); - Debug.Assert(!bestPathMap.Values.Contains(toNodeID)); + Debugging.Assert(!bestPathMap.ContainsKey(fromNodeID)); + Debugging.Assert(!bestPathMap.Values.Contains(toNodeID)); bestPathMap[fromNodeID] = toNodeID; pos = backPos; bestIDX = backIDX; diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilter.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilter.cs index e615a05448..ad243f40eb 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilter.cs @@ -133,7 +133,7 @@ static JapaneseIterationMarkCharFilter() // Make katakana dakuten map from hiragana map char codePointDifference = (char)('\u30ab' - '\u304b'); // カ - か - Debug.Assert(h2d.Length == k2d.Length); + Debugging.Assert(h2d.Length == k2d.Length); for (int i = 0; i < k2d.Length; i++) { k2d[i] = (char)(h2d[i] + codePointDifference); diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs index cfd9a9ab69..d9d2293586 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs @@ -314,7 +314,7 @@ private void Add(IDictionary dict, Position fromPosData, int endPos, int wordID, int leftID = dict.GetLeftId(wordID); int leastCost = int.MaxValue; int leastIDX = -1; - Debug.Assert(fromPosData.count > 0); + Debugging.Assert(fromPosData.count > 0); for (int idx = 0; idx < fromPosData.count; idx++) { // Cost is path cost so far, plus word cost (added at @@ -356,7 +356,7 @@ private void Add(IDictionary dict, Position fromPosData, int endPos, int wordID, } //positions.get(endPos).add(leastCost, dict.getRightId(wordID), fromPosData.pos, leastIDX, wordID, type); - Debug.Assert(leftID == dict.GetRightId(wordID)); + Debugging.Assert(leftID == dict.GetRightId(wordID)); positions.Get(endPos).Add(leastCost, leftID, fromPosData.pos, leastIDX, wordID, type); } @@ -387,7 +387,7 @@ public override bool IncrementToken() int position = token.Position; int length = token.Length; ClearAttributes(); - Debug.Assert(length > 0); + Debugging.Assert(length > 0); //System.out.println("off=" + token.getOffset() + " len=" + length + " vs " + token.getSurfaceForm().length); termAtt.CopyBuffer(token.SurfaceForm, token.Offset, length); offsetAtt.SetOffset(CorrectOffset(position), CorrectOffset(position + length)); @@ -402,7 +402,7 @@ public override bool IncrementToken() } else { - Debug.Assert(token.Position > lastTokenPos); + Debugging.Assert(token.Position > lastTokenPos); posIncAtt.PositionIncrement = 1; posLengthAtt.PositionLength = 1; } @@ -511,7 +511,7 @@ private void Parse() } // We will always have at least one live path: - Debug.Assert(leastIDX != -1); + Debugging.Assert(leastIDX != -1); // Second pass: prune all but the best path: for (int pos2 = pos; pos2 < positions.GetNextPos(); pos2++) @@ -544,7 +544,7 @@ private void Parse() if (pos != leastPosData.pos) { // We jumped into a future position: - Debug.Assert(pos < leastPosData.pos); + Debugging.Assert(pos < leastPosData.pos); pos = leastPosData.pos; } @@ -913,10 +913,10 @@ private void Backtrace(Position endPosData, int fromIDX) { //System.out.println("BT: back pos=" + pos + " bestIDX=" + bestIDX); Position posData = positions.Get(pos); - Debug.Assert(bestIDX < posData.count); + Debugging.Assert(bestIDX < posData.count); int backPos = posData.backPos[bestIDX]; - Debug.Assert(backPos >= lastBackTracePos, "backPos=" + backPos + " vs lastBackTracePos=" + lastBackTracePos); + Debugging.Assert(backPos >= lastBackTracePos, "backPos=" + backPos + " vs lastBackTracePos=" + lastBackTracePos); int length = pos - backPos; JapaneseTokenizerType backType = posData.backType[bestIDX]; int backID = posData.backID[bestIDX]; @@ -989,7 +989,7 @@ private void Backtrace(Position endPosData, int fromIDX) if (leastIDX != -1 && leastCost <= maxCost && posData.backPos[leastIDX] != backPos) { // We should have pruned the altToken from the graph: - Debug.Assert(posData.backPos[leastIDX] != backPos); + Debugging.Assert(posData.backPos[leastIDX] != backPos); // Save the current compound token, to output when // this alternate path joins back: @@ -1024,7 +1024,7 @@ private void Backtrace(Position endPosData, int fromIDX) } int offset = backPos - lastBackTracePos; - Debug.Assert(offset >= 0); + Debugging.Assert(offset >= 0); if (altToken != null && altToken.Position >= backPos) { @@ -1035,7 +1035,7 @@ private void Backtrace(Position endPosData, int fromIDX) // The pruning we did when we created the altToken // ensures that the back trace will align back with // the start of the altToken: - Debug.Assert(altToken.Position == backPos, altToken.Position + " vs " + backPos); + Debugging.Assert(altToken.Position == backPos, altToken.Position + " vs " + backPos); // NOTE: not quite right: the compound token may // have had all punctuation back traced so far, but @@ -1060,7 +1060,7 @@ private void Backtrace(Position endPosData, int fromIDX) { Console.WriteLine(" discard all-punctuation altToken=" + altToken); } - Debug.Assert(discardPunctuation); + Debugging.Assert(discardPunctuation); } altToken = null; } @@ -1355,7 +1355,7 @@ public void Reset() { count = 0; // forwardCount naturally resets after it runs: - Debug.Assert(forwardCount == 0, "pos=" + pos + " forwardCount=" + forwardCount); + Debugging.Assert(forwardCount == 0, "pos=" + pos + " forwardCount=" + forwardCount); } } @@ -1432,13 +1432,13 @@ public Position Get(int pos) nextWrite = 0; } // Should have already been reset: - Debug.Assert(positions[nextWrite].count == 0); + Debugging.Assert(positions[nextWrite].count == 0); positions[nextWrite++].pos = nextPos++; count++; } - Debug.Assert(InBounds(pos)); + Debugging.Assert(InBounds(pos)); int index = GetIndex(pos); - Debug.Assert(positions[index].pos == pos); + Debugging.Assert(positions[index].pos == pos); return positions[index]; } @@ -1466,8 +1466,8 @@ private int GetIndex(int pos) public void FreeBefore(int pos) { int toFree = count - (nextPos - pos); - Debug.Assert(toFree >= 0); - Debug.Assert(toFree <= count); + Debugging.Assert(toFree >= 0); + Debugging.Assert(toFree <= count); int index = nextWrite - count; if (index < 0) { diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs index 4bc8ebfb0c..cfbfb3d6a1 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs @@ -1,6 +1,7 @@ using J2N.IO; using Lucene.Net.Analysis.Ja.Dict; using Lucene.Net.Codecs; +using Lucene.Net.Diagnostics; using Lucene.Net.Store; using Lucene.Net.Util; using System; @@ -61,7 +62,7 @@ public virtual int Put(string[] entry) for (int i = 4; i < 8; i++) { string part = entry[i]; - Debug.Assert(part.Length > 0); + Debugging.Assert(part.Length > 0); if (!"*".Equals(part, StringComparison.Ordinal)) { if (sb.Length > 0) @@ -118,8 +119,8 @@ public virtual int Put(string[] entry) flags |= BinaryDictionary.HAS_PRONUNCIATION; } - Debug.Assert(leftId == rightId); - Debug.Assert(leftId < 4096); // there are still unused bits + Debugging.Assert(leftId == rightId); + Debugging.Assert(leftId < 4096); // there are still unused bits // add pos mapping int toFill = 1 + leftId - posDict.Count; for (int i = 0; i < toFill; i++) @@ -128,7 +129,7 @@ public virtual int Put(string[] entry) } string existing = posDict[leftId]; - Debug.Assert(existing == null || existing.Equals(fullPOSData, StringComparison.Ordinal)); + Debugging.Assert(existing == null || existing.Equals(fullPOSData, StringComparison.Ordinal)); posDict[leftId] = fullPOSData; m_buffer.PutInt16((short)(leftId << 3 | flags)); @@ -136,7 +137,7 @@ public virtual int Put(string[] entry) if ((flags & BinaryDictionary.HAS_BASEFORM) != 0) { - Debug.Assert(baseForm.Length < 16); + Debugging.Assert(baseForm.Length < 16); int shared = SharedPrefix(entry[0], baseForm); int suffix = baseForm.Length - shared; m_buffer.Put((byte)(shared << 4 | suffix)); @@ -237,11 +238,11 @@ public static int SharedPrefix(string left, string right) public virtual void AddMapping(int sourceId, int wordId) { - Debug.Assert(wordId > lastWordId, "words out of order: " + wordId + " vs lastID: " + lastWordId); + Debugging.Assert(() => wordId > lastWordId, () => "words out of order: " + wordId + " vs lastID: " + lastWordId); if (sourceId > lastSourceId) { - Debug.Assert(sourceId > lastSourceId, "source ids out of order: lastSourceId=" + lastSourceId + " vs sourceId=" + sourceId); + Debugging.Assert(() => sourceId > lastSourceId, () => "source ids out of order: lastSourceId=" + lastSourceId + " vs sourceId=" + sourceId); targetMapOffsets = ArrayUtil.Grow(targetMapOffsets, sourceId + 1); for (int i = lastSourceId + 1; i <= sourceId; i++) { @@ -250,7 +251,7 @@ public virtual void AddMapping(int sourceId, int wordId) } else { - Debug.Assert(sourceId == lastSourceId); + Debugging.Assert(() => sourceId == lastSourceId); } targetMap = ArrayUtil.Grow(targetMap, targetMapEndOffset + 1); @@ -305,7 +306,7 @@ protected virtual void WriteTargetMap(string filename) for (int ofs = 0; ofs < targetMapEndOffset; ofs++) { int val = targetMap[ofs], delta = val - prev; - Debug.Assert(delta >= 0); + Debugging.Assert(() => delta >= 0); if (ofs == targetMapOffsets[sourceId]) { @out.WriteVInt32((delta << 1) | 0x01); @@ -317,7 +318,7 @@ protected virtual void WriteTargetMap(string filename) } prev += delta; } - Debug.Assert(sourceId == numSourceIds, "sourceId:" + sourceId + " != numSourceIds:" + numSourceIds); + Debugging.Assert(() => sourceId == numSourceIds, () => "sourceId:" + sourceId + " != numSourceIds:" + numSourceIds); } } @@ -341,7 +342,7 @@ protected virtual void WritePosDict(string filename) else { string[] data = CSVUtil.Parse(s); - Debug.Assert(data.Length == 3, "malformed pos/inflection: " + s); + Debugging.Assert(() => data.Length == 3, () => "malformed pos/inflection: " + s); @out.WriteString(data[0]); @out.WriteString(data[1]); @out.WriteString(data[2]); @@ -370,7 +371,7 @@ protected virtual void WriteDictionary(string filename) @out.WriteByte(m_buffer.Get()); } - Debug.Assert(m_buffer.Remaining == 0L); + Debugging.Assert(() => m_buffer.Remaining == 0L); } } } diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs index 1d7dbb0bc3..d7acc77169 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs @@ -1,4 +1,5 @@ using J2N.Text; +using Lucene.Net.Diagnostics; using System.Diagnostics; using System.Globalization; using System.IO; @@ -41,12 +42,12 @@ public static ConnectionCostsWriter Build(string filename) string line = streamReader.ReadLine(); string[] dimensions = whiteSpaceRegex.Split(line).TrimEnd(); - Debug.Assert(dimensions.Length == 2); + Debugging.Assert(() => dimensions.Length == 2); int forwardSize = int.Parse(dimensions[0], CultureInfo.InvariantCulture); int backwardSize = int.Parse(dimensions[1], CultureInfo.InvariantCulture); - Debug.Assert(forwardSize > 0 && backwardSize > 0); + Debugging.Assert(() => forwardSize > 0 && backwardSize > 0); ConnectionCostsWriter costs = new ConnectionCostsWriter(forwardSize, backwardSize); @@ -54,7 +55,7 @@ public static ConnectionCostsWriter Build(string filename) { string[] fields = whiteSpaceRegex.Split(line).TrimEnd(); - Debug.Assert(fields.Length == 3); + Debugging.Assert(fields.Length == 3); int forwardId = int.Parse(fields[0], CultureInfo.InvariantCulture); int backwardId = int.Parse(fields[1], CultureInfo.InvariantCulture); diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs index 91e4df8eff..9eea786bc0 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs @@ -1,5 +1,6 @@ using Lucene.Net.Analysis.Ja.Dict; using Lucene.Net.Codecs; +using Lucene.Net.Diagnostics; using Lucene.Net.Store; using System.Diagnostics; using System.IO; @@ -62,10 +63,10 @@ public void Write(string baseDir) @out.WriteVInt32(forwardSize); @out.WriteVInt32(backwardSize); int last = 0; - Debug.Assert(costs.Length == backwardSize); + Debugging.Assert(() => costs.Length == backwardSize); foreach (short[] a in costs) { - Debug.Assert(a.Length == forwardSize); + Debugging.Assert(() => a.Length == forwardSize); for (int i = 0; i < a.Length; i++) { int delta = (int)a[i] - last; diff --git a/src/Lucene.Net.Analysis.Phonetic/BeiderMorseFilter.cs b/src/Lucene.Net.Analysis.Phonetic/BeiderMorseFilter.cs index dc9cef7425..4a17a012ea 100644 --- a/src/Lucene.Net.Analysis.Phonetic/BeiderMorseFilter.cs +++ b/src/Lucene.Net.Analysis.Phonetic/BeiderMorseFilter.cs @@ -1,7 +1,7 @@ // lucene version compatibility level: 4.8.1 using Lucene.Net.Analysis.Phonetic.Language.Bm; using Lucene.Net.Analysis.TokenAttributes; -using System.Diagnostics; +using Lucene.Net.Diagnostics; using System.Text.RegularExpressions; namespace Lucene.Net.Analysis.Phonetic @@ -85,7 +85,7 @@ public override bool IncrementToken() if (matcher.Success) { - Debug.Assert(state != null && encoded != null); + Debugging.Assert(() => state != null && encoded != null); RestoreState(state); int start = matcher.Index; diff --git a/src/Lucene.Net.Benchmark/ByTask/Utils/AnalyzerFactory.cs b/src/Lucene.Net.Benchmark/ByTask/Utils/AnalyzerFactory.cs index 0ac466c772..d962a24b08 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Utils/AnalyzerFactory.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Utils/AnalyzerFactory.cs @@ -1,7 +1,7 @@ using Lucene.Net.Analysis; using Lucene.Net.Analysis.Util; +using Lucene.Net.Diagnostics; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Text; @@ -42,7 +42,7 @@ public AnalyzerFactory(IList charFilterFactories, IList tokenFilterFactories) { this.charFilterFactories = charFilterFactories; - Debug.Assert(null != tokenizerFactory); + Debugging.Assert(() => null != tokenizerFactory); this.tokenizerFactory = tokenizerFactory; this.tokenFilterFactories = tokenFilterFactories; } diff --git a/src/Lucene.Net.Benchmark/Quality/QualityStats.cs b/src/Lucene.Net.Benchmark/Quality/QualityStats.cs index 2ed4e120c0..5a357c8953 100644 --- a/src/Lucene.Net.Benchmark/Quality/QualityStats.cs +++ b/src/Lucene.Net.Benchmark/Quality/QualityStats.cs @@ -1,4 +1,5 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.Diagnostics; @@ -241,7 +242,7 @@ public static QualityStats Average(QualityStats[] stats) } } } - Debug.Assert(m> 0, "Fishy: no \"good\" queries!"); + Debugging.Assert(() => m > 0, () => "Fishy: no \"good\" queries!"); // take average: times go by all queries, other measures go by "good" queries only. avg.searchTime /= stats.Length; avg.docNamesExtractTime /= stats.Length; diff --git a/src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs b/src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs index c6b5d212cf..2f8ba27a26 100644 --- a/src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs +++ b/src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs @@ -1,4 +1,5 @@ using J2N.Text; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.Diagnostics; @@ -72,7 +73,7 @@ public TrecJudge(TextReader reader) st.MoveNext(); bool relevant = !zero.Equals(st.Current, StringComparison.Ordinal); // LUCENENET: don't call st.NextToken() unless the condition fails. - Debug.Assert(st.RemainingTokens == 0, "wrong format: " + line + " next: " + (st.MoveNext() ? st.Current : "")); + Debugging.Assert(() => st.RemainingTokens == 0, () => "wrong format: " + line + " next: " + (st.MoveNext() ? st.Current : "")); if (relevant) { // only keep relevant docs if (curr == null || !curr.queryID.Equals(queryID, StringComparison.Ordinal)) diff --git a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs index f3c07dcdc1..e33c4a9ddc 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Support; @@ -126,7 +127,7 @@ public BlockTermsReader(TermsIndexReaderBase indexReader, Directory dir, FieldIn { int field = input.ReadVInt32(); long numTerms = input.ReadVInt64(); - Debug.Assert(numTerms >= 0); + Debugging.Assert(() => numTerms >= 0); long termsStartPointer = input.ReadVInt64(); FieldInfo fieldInfo = fieldInfos.FieldInfo(field); long sumTotalTermFreq = fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY ? -1 : input.ReadVInt64(); @@ -233,7 +234,7 @@ public override IEnumerator GetEnumerator() public override Terms GetTerms(string field) { - Debug.Assert(field != null); + Debugging.Assert(() => field != null); FieldReader result; fields.TryGetValue(field, out result); @@ -257,7 +258,7 @@ private class FieldReader : Terms public FieldReader(BlockTermsReader outerInstance, FieldInfo fieldInfo, long numTerms, long termsStartPointer, long sumTotalTermFreq, long sumDocFreq, int docCount, int longsSize) { - Debug.Assert(numTerms > 0); + Debugging.Assert(() => numTerms > 0); this.outerInstance = outerInstance; @@ -448,7 +449,7 @@ public override SeekStatus SeekCeil(BytesRef target) // Block must exist since, at least, the indexed term // is in the block: - Debug.Assert(result); + Debugging.Assert(() => result); indexIsCurrent = true; didIndexNext = false; @@ -536,7 +537,7 @@ public override SeekStatus SeekCeil(BytesRef target) // Target's prefix is before the common prefix // of this block, so we position to start of // block and return NOT_FOUND: - Debug.Assert(state.TermBlockOrd == 0); + Debugging.Assert(() => state.TermBlockOrd == 0); int suffix = termSuffixesReader.ReadVInt32(); term.Length = termBlockPrefix + suffix; @@ -641,7 +642,7 @@ public override SeekStatus SeekCeil(BytesRef target) // cross another index term (besides the first // one) while we are scanning: - Debug.Assert(indexIsCurrent); + Debugging.Assert(() => indexIsCurrent); if (!NextBlock()) { @@ -664,7 +665,7 @@ public override BytesRef Next() // works properly: if (seekPending) { - Debug.Assert(!indexIsCurrent); + Debugging.Assert(() => !indexIsCurrent); input.Seek(state.BlockFilePointer); int pendingSeekCount = state.TermBlockOrd; bool result = NextBlock(); @@ -674,12 +675,12 @@ public override BytesRef Next() // Block must exist since seek(TermState) was called w/ a // TermState previously returned by this enum when positioned // on a real term: - Debug.Assert(result); + Debugging.Assert(() => result); while (state.TermBlockOrd < pendingSeekCount) { BytesRef nextResult = _next(); - Debug.Assert(nextResult != null); + Debugging.Assert(() => nextResult != null); } seekPending = false; state.Ord = savOrd; @@ -768,8 +769,8 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPos public override void SeekExact(BytesRef target, TermState otherState) { //System.out.println("BTR.seekExact termState target=" + target.utf8ToString() + " " + target + " this=" + this); - Debug.Assert(otherState != null && otherState is BlockTermState); - Debug.Assert(!doOrd || ((BlockTermState)otherState).Ord < outerInstance.numTerms); + Debugging.Assert(() => otherState != null && otherState is BlockTermState); + Debugging.Assert(() => !doOrd || ((BlockTermState)otherState).Ord < outerInstance.numTerms); state.CopyFrom(otherState); seekPending = true; indexIsCurrent = false; @@ -793,7 +794,7 @@ public override void SeekExact(long ord) throw new InvalidOperationException("terms index was not loaded"); } - Debug.Assert(ord < outerInstance.numTerms); + Debugging.Assert(() => ord < outerInstance.numTerms); // TODO: if ord is in same terms block and // after current ord, we should avoid this seek just @@ -802,7 +803,7 @@ public override void SeekExact(long ord) bool result = NextBlock(); // Block must exist since ord < numTerms: - Debug.Assert(result); + Debugging.Assert(() => result); indexIsCurrent = true; didIndexNext = false; @@ -810,7 +811,7 @@ public override void SeekExact(long ord) seekPending = false; state.Ord = indexEnum.Ord - 1; - Debug.Assert(state.Ord >= -1, "Ord=" + state.Ord); + Debugging.Assert(() => state.Ord >= -1, () => "Ord=" + state.Ord); term.CopyBytes(indexEnum.Term); // Now, scan: @@ -818,9 +819,9 @@ public override void SeekExact(long ord) while (left > 0) { BytesRef term = _next(); - Debug.Assert(term != null); + Debugging.Assert(() => term != null); left--; - Debug.Assert(indexIsCurrent); + Debugging.Assert(() => indexIsCurrent); } } diff --git a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs index 853fe157e3..63a82a984b 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs @@ -1,10 +1,10 @@ using J2N.Text; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; using System; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Codecs.BlockTerms { @@ -70,7 +70,7 @@ private class FieldMetaData public FieldMetaData(FieldInfo fieldInfo, long numTerms, long termsStartPointer, long sumTotalTermFreq, long sumDocFreq, int docCount, int int64sSize) { - Debug.Assert(numTerms > 0); + Debugging.Assert(() => numTerms > 0); FieldInfo = fieldInfo; TermsStartPointer = termsStartPointer; @@ -123,7 +123,7 @@ private void WriteHeader(IndexOutput output) public override TermsConsumer AddField(FieldInfo field) { //System.out.println("\nBTW.addField seg=" + segment + " field=" + field.name); - Debug.Assert(currentField == null || currentField.Name.CompareToOrdinal(field.Name) < 0); + Debugging.Assert(() => currentField == null || currentField.Name.CompareToOrdinal(field.Name) < 0); currentField = field; TermsIndexWriterBase.FieldWriter fieldIndexWriter = termsIndexWriter.AddField(field, m_output.GetFilePointer()); return new TermsWriter(this, fieldIndexWriter, field, postingsWriter); @@ -235,7 +235,7 @@ public override PostingsConsumer StartTerm(BytesRef text) public override void FinishTerm(BytesRef text, TermStats stats) { - Debug.Assert(stats.DocFreq > 0); + Debugging.Assert(() => stats.DocFreq > 0); //System.out.println("BTW: finishTerm term=" + fieldInfo.name + ":" + text.utf8ToString() + " " + text + " seg=" + segment + " df=" + stats.docFreq); bool isIndexTerm = fieldIndexWriter.CheckIndexTerm(text, stats); @@ -302,8 +302,8 @@ public override void Finish(long sumTotalTermFreq, long sumDocFreq, int docCount private int SharedPrefix(BytesRef term1, BytesRef term2) { - Debug.Assert(term1.Offset == 0); - Debug.Assert(term2.Offset == 0); + Debugging.Assert(() => term1.Offset == 0); + Debugging.Assert(() => term2.Offset == 0); int pos1 = 0; int pos1End = pos1 + Math.Min(term1.Length, term2.Length); int pos2 = 0; @@ -359,7 +359,7 @@ private void FlushBlock() for (int termCount = 0; termCount < pendingCount; termCount++) { BlockTermState state = pendingTerms[termCount].State; - Debug.Assert(state != null); + Debugging.Assert(() => state != null); bytesWriter.WriteVInt32(state.DocFreq); if (fieldInfo.IndexOptions != IndexOptions.DOCS_ONLY) { diff --git a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs index d28b331d90..bf5ba28a1f 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Support; @@ -69,7 +70,7 @@ public FixedGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, string seg { this.termComp = termComp; - Debug.Assert(indexDivisor == -1 || indexDivisor > 0); + Debugging.Assert(() => indexDivisor == -1 || indexDivisor > 0); input = dir.OpenInput(IndexFileNames.SegmentFileName(segment, segmentSuffix, FixedGapTermsIndexWriter.TERMS_INDEX_EXTENSION), context); @@ -100,7 +101,7 @@ public FixedGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, string seg // In case terms index gets loaded, later, on demand totalIndexInterval = indexInterval * indexDivisor; } - Debug.Assert(totalIndexInterval > 0); + Debugging.Assert(() => totalIndexInterval > 0); SeekDir(input, dirOffset); @@ -189,7 +190,7 @@ public override long Seek(BytesRef target) { int lo = 0; // binary search int hi = fieldIndex.numIndexTerms - 1; - Debug.Assert(outerInstance.totalIndexInterval > 0, "totalIndexInterval=" + outerInstance.totalIndexInterval); + Debugging.Assert(() => outerInstance.totalIndexInterval > 0, () => "totalIndexInterval=" + outerInstance.totalIndexInterval); while (hi >= lo) { @@ -210,7 +211,7 @@ public override long Seek(BytesRef target) } else { - Debug.Assert(mid >= 0); + Debugging.Assert(() => mid >= 0); ord = mid * outerInstance.totalIndexInterval; return fieldIndex.termsStart + fieldIndex.termsDictOffsets.Get(mid); } @@ -218,7 +219,7 @@ public override long Seek(BytesRef target) if (hi < 0) { - Debug.Assert(hi == -1); + Debugging.Assert(() => hi == -1); hi = 0; } @@ -251,7 +252,7 @@ public override long Seek(long ord) { int idx = (int)(ord / outerInstance.totalIndexInterval); // caller must ensure ord is in bounds - Debug.Assert(idx < fieldIndex.numIndexTerms); + Debugging.Assert(() => idx < fieldIndex.numIndexTerms); long offset = fieldIndex.termOffsets.Get(idx); int length = (int)(fieldIndex.termOffsets.Get(1 + idx) - offset); outerInstance.termBytesReader.FillSlice(term, fieldIndex.termBytesStart + offset, length); @@ -327,11 +328,11 @@ public CoreFieldIndex(FieldIndexData outerInstance, long indexStart, long termsS // -1 is passed to mean "don't load term index", but // if we are then later loaded it's overwritten with // a real value - Debug.Assert(outerInstance.outerInstance.indexDivisor > 0); + Debugging.Assert(() => outerInstance.outerInstance.indexDivisor > 0); this.numIndexTerms = 1 + (numIndexTerms - 1) / outerInstance.outerInstance.indexDivisor; - Debug.Assert(this.numIndexTerms > 0, "numIndexTerms=" + numIndexTerms + " indexDivisor=" + outerInstance.outerInstance.indexDivisor); + Debugging.Assert(() => this.numIndexTerms > 0, () => "numIndexTerms=" + numIndexTerms + " indexDivisor=" + outerInstance.outerInstance.indexDivisor); if (outerInstance.outerInstance.indexDivisor == 1) { @@ -344,11 +345,11 @@ public CoreFieldIndex(FieldIndexData outerInstance, long indexStart, long termsS // records offsets into main terms dict file termsDictOffsets = PackedInt32s.GetReader(clone); - Debug.Assert(termsDictOffsets.Count == numIndexTerms); + Debugging.Assert(() => termsDictOffsets.Count == numIndexTerms); // records offsets into byte[] term data termOffsets = PackedInt32s.GetReader(clone); - Debug.Assert(termOffsets.Count == 1 + numIndexTerms); + Debugging.Assert(() => termOffsets.Count == 1 + numIndexTerms); } finally { @@ -397,8 +398,8 @@ public CoreFieldIndex(FieldIndexData outerInstance, long indexStart, long termsS int numTermBytes = (int)(nextTermOffset - termOffset); clone.Seek(indexStart + termOffset); - Debug.Assert(indexStart + termOffset < clone.Length, "indexStart=" + indexStart + " termOffset=" + termOffset + " len=" + clone.Length); - Debug.Assert(indexStart + termOffset + numTermBytes < clone.Length); + Debugging.Assert(() => indexStart + termOffset < clone.Length, () => "indexStart=" + indexStart + " termOffset=" + termOffset + " len=" + clone.Length); + Debugging.Assert(() => indexStart + termOffset + numTermBytes < clone.Length); outerInstance.outerInstance.termBytes.Copy(clone, numTermBytes); termOffsetUpto += numTermBytes; diff --git a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs index 79caf43b76..d0f8f5d3a0 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; @@ -186,7 +187,7 @@ public override void Add(BytesRef text, TermStats stats, long termsFilePointer) lastTermsPointer = termsFilePointer; // save term length (in bytes) - Debug.Assert(indexedTermLength <= short.MaxValue); + Debugging.Assert(() => indexedTermLength <= short.MaxValue); termLengths[numIndexTerms] = (short)indexedTermLength; totTermLength += indexedTermLength; diff --git a/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexReader.cs b/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexReader.cs index 29f226b23f..5fcfd0ee24 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexReader.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexReader.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Support; @@ -5,7 +6,6 @@ using Lucene.Net.Util.Fst; using System; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Codecs.BlockTerms { @@ -55,7 +55,7 @@ public VariableGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, string input = dir.OpenInput(IndexFileNames.SegmentFileName(segment, segmentSuffix, VariableGapTermsIndexWriter.TERMS_INDEX_EXTENSION), new IOContext(context, true)); //this.segment = segment; // LUCENENET: Not used bool success = false; - Debug.Assert(indexDivisor == -1 || indexDivisor > 0); + Debugging.Assert(() => indexDivisor == -1 || indexDivisor > 0); try { diff --git a/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexWriter.cs b/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexWriter.cs index ced4aa9634..58122454c7 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexWriter.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexWriter.cs @@ -1,10 +1,10 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; using Lucene.Net.Util.Fst; using System; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Codecs.BlockTerms { @@ -292,7 +292,7 @@ public override void Add(BytesRef text, TermStats stats, long termsFilePointer) if (text.Length == 0) { // We already added empty string in ctor - Debug.Assert(termsFilePointer == startTermsFilePointer); + Debugging.Assert(() => termsFilePointer == startTermsFilePointer); return; } int lengthSave = text.Length; diff --git a/src/Lucene.Net.Codecs/Bloom/BloomFilteringPostingsFormat.cs b/src/Lucene.Net.Codecs/Bloom/BloomFilteringPostingsFormat.cs index 3a99cda3f4..c6b18d313c 100644 --- a/src/Lucene.Net.Codecs/Bloom/BloomFilteringPostingsFormat.cs +++ b/src/Lucene.Net.Codecs/Bloom/BloomFilteringPostingsFormat.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; @@ -379,7 +380,7 @@ public override TermsConsumer AddField(FieldInfo field) var bloomFilter = outerInstance._bloomFilterFactory.GetSetForField(_state, field); if (bloomFilter != null) { - Debug.Assert((_bloomFilters.ContainsKey(field) == false)); + Debugging.Assert(() => (_bloomFilters.ContainsKey(field) == false)); _bloomFilters.Add(field, bloomFilter); return new WrappedTermsConsumer(_delegateFieldsConsumer.AddField(field), bloomFilter); diff --git a/src/Lucene.Net.Codecs/Bloom/FuzzySet.cs b/src/Lucene.Net.Codecs/Bloom/FuzzySet.cs index 493328827b..5b7e8046c6 100644 --- a/src/Lucene.Net.Codecs/Bloom/FuzzySet.cs +++ b/src/Lucene.Net.Codecs/Bloom/FuzzySet.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Store; using Lucene.Net.Util; using System; @@ -227,7 +228,7 @@ public static FuzzySet Deserialize(DataInput input) private ContainsResult MayContainValue(int positiveHash) { - Debug.Assert((positiveHash >= 0)); + Debugging.Assert(() => (positiveHash >= 0)); // Bloom sizes are always base 2 and so can be ANDed for a fast modulo var pos = positiveHash & _bloomSize; diff --git a/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexInput.cs b/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexInput.cs index ce02b7edba..400de2e714 100644 --- a/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexInput.cs +++ b/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexInput.cs @@ -1,4 +1,5 @@ using Lucene.Net.Codecs.Sep; +using Lucene.Net.Diagnostics; using Lucene.Net.Store; using System.Diagnostics; @@ -108,7 +109,7 @@ public Reader(IndexInput input, int[] pending, IBlockReader blockReader) internal virtual void Seek(long fp, int upto) { - Debug.Assert(upto < blockSize); + Debugging.Assert(() => upto < blockSize); if (seekPending || fp != lastBlockFP) { pendingFP = fp; @@ -172,7 +173,7 @@ public override void Read(DataInput indexIn, bool absolute) fp += indexIn.ReadVInt64(); } } - Debug.Assert(upto < outerInstance.m_blockSize); + Debugging.Assert(() => upto < outerInstance.m_blockSize); } public override void Seek(Int32IndexInput.Reader other) diff --git a/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexOutput.cs b/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexOutput.cs index bc2a0c76ad..feb98e0ec4 100644 --- a/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexOutput.cs +++ b/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexOutput.cs @@ -1,6 +1,6 @@ using Lucene.Net.Codecs.Sep; +using Lucene.Net.Diagnostics; using Lucene.Net.Store; -using System.Diagnostics; namespace Lucene.Net.Codecs.IntBlock { @@ -101,7 +101,7 @@ public override void Write(DataOutput indexOut, bool absolute) else if (fp == lastFP) { // same block - Debug.Assert(upto >= lastUpto); + Debugging.Assert(() => upto >= lastUpto); int uptoDelta = upto - lastUpto; indexOut.WriteVInt32(uptoDelta << 1 | 1); } diff --git a/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexInput.cs b/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexInput.cs index 123cc87c46..89c956df2a 100644 --- a/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexInput.cs +++ b/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexInput.cs @@ -1,4 +1,5 @@ using Lucene.Net.Codecs.Sep; +using Lucene.Net.Diagnostics; using Lucene.Net.Store; using Lucene.Net.Support; using System.Diagnostics; @@ -117,7 +118,7 @@ internal virtual void Seek(long fp, int upto) // TODO: should we do this in real-time, not lazy? pendingFP = fp; pendingUpto = upto; - Debug.Assert(pendingUpto >= 0, "pendingUpto=" + pendingUpto); + Debugging.Assert(() => pendingUpto >= 0, () => "pendingUpto=" + pendingUpto); seekPending = true; } diff --git a/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexOutput.cs b/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexOutput.cs index 0b4eb5f16d..1ef5b83eeb 100644 --- a/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexOutput.cs +++ b/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexOutput.cs @@ -1,4 +1,5 @@ using Lucene.Net.Codecs.Sep; +using Lucene.Net.Diagnostics; using Lucene.Net.Store; using System.Diagnostics; @@ -106,7 +107,7 @@ public override void CopyFrom(Int32IndexOutput.Index other, bool copyLast) public override void Write(DataOutput indexOut, bool absolute) { - Debug.Assert(upto >= 0); + Debugging.Assert(() => upto >= 0); if (absolute) { indexOut.WriteVInt32(upto); @@ -115,7 +116,7 @@ public override void Write(DataOutput indexOut, bool absolute) else if (fp == lastFP) { // same block - Debug.Assert(upto >= lastUpto); + Debugging.Assert(() => upto >= lastUpto); int uptoDelta = upto - lastUpto; indexOut.WriteVInt32(uptoDelta << 1 | 1); } @@ -135,7 +136,7 @@ public override void Write(int v) hitExcDuringWrite = true; upto -= Add(v) - 1; hitExcDuringWrite = false; - Debug.Assert(upto >= 0); + Debugging.Assert(() => upto >= 0); } protected override void Dispose(bool disposing) @@ -151,7 +152,7 @@ protected override void Dispose(bool disposing) while (upto > stuffed) { upto -= Add(0) - 1; - Debug.Assert(upto >= 0); + Debugging.Assert(() => upto >= 0); stuffed += 1; } } diff --git a/src/Lucene.Net.Codecs/Memory/DirectDocValuesConsumer.cs b/src/Lucene.Net.Codecs/Memory/DirectDocValuesConsumer.cs index 46839e0d23..51e8238c23 100644 --- a/src/Lucene.Net.Codecs/Memory/DirectDocValuesConsumer.cs +++ b/src/Lucene.Net.Codecs/Memory/DirectDocValuesConsumer.cs @@ -1,4 +1,5 @@ -using System; +using Lucene.Net.Diagnostics; +using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics; @@ -371,7 +372,7 @@ public bool MoveNext() } else { - Debug.Assert(false); + Debugging.Assert(() => false); return false; } } diff --git a/src/Lucene.Net.Codecs/Memory/DirectDocValuesProducer.cs b/src/Lucene.Net.Codecs/Memory/DirectDocValuesProducer.cs index bf5fa2e26f..4cd0dda152 100644 --- a/src/Lucene.Net.Codecs/Memory/DirectDocValuesProducer.cs +++ b/src/Lucene.Net.Codecs/Memory/DirectDocValuesProducer.cs @@ -1,4 +1,5 @@ using J2N.Threading.Atomic; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; @@ -551,7 +552,7 @@ private IBits GetMissingBits(int fieldNumber, long offset, long length) { var data = (IndexInput)this.data.Clone(); data.Seek(offset); - Debug.Assert(length % 8 == 0); + Debugging.Assert(() => length % 8 == 0); var bits = new long[(int)length >> 3]; for (var i = 0; i < bits.Length; i++) { diff --git a/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs b/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs index b92fb41ab8..16a0e4d9f7 100644 --- a/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs +++ b/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Index; +using Lucene.Net.Diagnostics; +using Lucene.Net.Index; using Lucene.Net.Support; using System; using System.Collections.Generic; @@ -588,7 +589,7 @@ public DirectField(SegmentReadState state, string field, Terms termsIn, int minS upto++; } - Debug.Assert(upto == docFreq); + Debugging.Assert(() => upto == docFreq); ent = new HighFreqTerm(docs, freqs, positions, payloads, totalTermFreq); } @@ -624,7 +625,7 @@ public DirectField(SegmentReadState state, string field, Terms termsIn, int minS } } this.skipOffsets[numTerms] = skipOffset; - Debug.Assert(skipOffset == skipCount); + Debugging.Assert(() => skipOffset == skipCount); } /// Returns approximate RAM bytes used. @@ -737,7 +738,7 @@ private void SetSkips(int termOrd, byte[] termBytes) private void FinishSkips() { - Debug.Assert(count == terms.Length); + Debugging.Assert(() => count == terms.Length); int lastTermOffset = termOffsets[count - 1]; int lastTermLength = termOffsets[count] - lastTermOffset; @@ -971,7 +972,7 @@ public override void SeekExact(BytesRef term, TermState state) { termOrd = (int) ((OrdTermState) state).Ord; SetTerm(); - Debug.Assert(term.Equals(scratch)); + Debugging.Assert(() => term.Equals(scratch)); } public override BytesRef Term => scratch; @@ -1206,13 +1207,13 @@ public DirectIntersectTermsEnum(DirectPostingsFormat.DirectField outerInstance, while (label > states[i].transitionMax) { states[i].transitionUpto++; - Debug.Assert(states[i].transitionUpto < states[i].transitions.Length); + Debugging.Assert(() => states[i].transitionUpto < states[i].transitions.Length); states[i].transitionMin = states[i].transitions[states[i].transitionUpto].Min; states[i].transitionMax = states[i].transitions[states[i].transitionUpto].Max; - Debug.Assert(states[i].transitionMin >= 0); - Debug.Assert(states[i].transitionMin <= 255); - Debug.Assert(states[i].transitionMax >= 0); - Debug.Assert(states[i].transitionMax <= 255); + Debugging.Assert(() => states[i].transitionMin >= 0); + Debugging.Assert(() => states[i].transitionMin <= 255); + Debugging.Assert(() => states[i].transitionMax >= 0); + Debugging.Assert(() => states[i].transitionMax <= 255); } // Skip forwards until we find a term matching @@ -1253,7 +1254,7 @@ public DirectIntersectTermsEnum(DirectPostingsFormat.DirectField outerInstance, // System.out.println(" no match; already beyond; return termOrd=" + termOrd); // } stateUpto -= skipUpto; - Debug.Assert(stateUpto >= 0); + Debugging.Assert(() => stateUpto >= 0); return; } else if (label == (outerInstance.termBytes[termOffset_i + i] & 0xFF)) @@ -1268,7 +1269,7 @@ public DirectIntersectTermsEnum(DirectPostingsFormat.DirectField outerInstance, int nextState = runAutomaton.Step(states[stateUpto].state, label); // Automaton is required to accept startTerm: - Debug.Assert(nextState != -1); + Debugging.Assert(() => nextState != -1); stateUpto++; states[stateUpto].changeOrd = outerInstance.skips[skipOffset + skipUpto++]; @@ -1299,12 +1300,12 @@ public DirectIntersectTermsEnum(DirectPostingsFormat.DirectField outerInstance, while (termOrd < outerInstance.terms.Length && outerInstance.Compare(termOrd, startTerm) <= 0) { - Debug.Assert(termOrd == startTermOrd || + Debugging.Assert(() => termOrd == startTermOrd || outerInstance.skipOffsets[termOrd] == outerInstance.skipOffsets[termOrd + 1]); termOrd++; } - Debug.Assert(termOrd - startTermOrd < outerInstance.minSkipCount); + Debugging.Assert(() => termOrd - startTermOrd < outerInstance.minSkipCount); termOrd--; stateUpto -= skipUpto; // if (DEBUG) { @@ -1385,7 +1386,7 @@ public override BytesRef Next() if (termOrd == 0 && outerInstance.termOffsets[1] == 0) { // Special-case empty string: - Debug.Assert(stateUpto == 0); + Debugging.Assert(() => stateUpto == 0); // if (DEBUG) { // System.out.println(" visit empty string"); // } @@ -1434,9 +1435,9 @@ public override BytesRef Next() // System.out.println(" term=" + new BytesRef(termBytes, termOffset, termLength).utf8ToString() + " skips=" + Arrays.toString(skips)); // } - Debug.Assert(termOrd < state.changeOrd); + Debugging.Assert(() => termOrd < state.changeOrd); - Debug.Assert(stateUpto <= termLength, "term.length=" + termLength + "; stateUpto=" + stateUpto); + Debugging.Assert(() => stateUpto <= termLength, () => "term.length=" + termLength + "; stateUpto=" + stateUpto); int label = outerInstance.termBytes[termOffset + stateUpto] & 0xFF; while (label > state.transitionMax) @@ -1455,7 +1456,7 @@ public override BytesRef Next() } else { - Debug.Assert(state.changeOrd > termOrd); + Debugging.Assert(() => state.changeOrd > termOrd); // if (DEBUG) { // System.out.println(" jumpend " + (state.changeOrd - termOrd)); // } @@ -1466,14 +1467,14 @@ public override BytesRef Next() } goto nextTermContinue; } - Debug.Assert(state.transitionUpto < state.transitions.Length, - " state.transitionUpto=" + state.transitionUpto + " vs " + state.transitions.Length); + Debugging.Assert(() => state.transitionUpto < state.transitions.Length, + () => " state.transitionUpto=" + state.transitionUpto + " vs " + state.transitions.Length); state.transitionMin = state.transitions[state.transitionUpto].Min; state.transitionMax = state.transitions[state.transitionUpto].Max; - Debug.Assert(state.transitionMin >= 0); - Debug.Assert(state.transitionMin <= 255); - Debug.Assert(state.transitionMax >= 0); - Debug.Assert(state.transitionMax <= 255); + Debugging.Assert(() => state.transitionMin >= 0); + Debugging.Assert(() => state.transitionMin <= 255); + Debugging.Assert(() => state.transitionMax >= 0); + Debugging.Assert(() => state.transitionMax <= 255); } int targetLabel = state.transitionMin; @@ -1602,7 +1603,7 @@ public override BytesRef Next() if (compiledAutomaton.CommonSuffixRef != null) { //System.out.println("suffix " + compiledAutomaton.commonSuffixRef.utf8ToString()); - Debug.Assert(compiledAutomaton.CommonSuffixRef.Offset == 0); + Debugging.Assert(() => compiledAutomaton.CommonSuffixRef.Offset == 0); if (termLength < compiledAutomaton.CommonSuffixRef.Length) { termOrd++; @@ -2001,7 +2002,7 @@ public override int NextDoc() if (upto < postings.Length) { freq = postings[upto + 1]; - Debug.Assert(freq > 0); + Debugging.Assert(() => freq > 0); return postings[upto]; } } @@ -2010,7 +2011,7 @@ public override int NextDoc() while (upto < postings.Length) { freq = postings[upto + 1]; - Debug.Assert(freq > 0); + Debugging.Assert(() => freq > 0); if (liveDocs.Get(postings[upto])) { return postings[upto]; @@ -2185,7 +2186,7 @@ public override int NextDoc() public override int NextPosition() { - Debug.Assert(skipPositions > 0); + Debugging.Assert(() => skipPositions > 0); skipPositions--; int pos = postings[upto++]; diff --git a/src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs b/src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs index 20758484e0..55d733e6cd 100644 --- a/src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs +++ b/src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs @@ -9,6 +9,7 @@ using System.Diagnostics; using JCG = J2N.Collections.Generic; using BitSet = Lucene.Net.Util.OpenBitSet; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Codecs.Memory { @@ -165,7 +166,7 @@ public override IEnumerator GetEnumerator() public override Terms GetTerms(string field) { - Debug.Assert(field != null); + Debugging.Assert(() => field != null); TermsReader result; fields.TryGetValue(field, out result); return result; @@ -217,7 +218,7 @@ internal TermsReader(FSTOrdTermsReader outerInstance, FieldInfo fieldInfo, Index this.longsSize = longsSize; this.index = index; - Debug.Assert((numTerms & (~0xffffffffL)) == 0); + Debugging.Assert(() => (numTerms & (~0xffffffffL)) == 0); int numBlocks = (int)(numTerms + INTERVAL - 1) / INTERVAL; this.numSkipInfo = longsSize + 3; this.skipInfo = new long[numBlocks * numSkipInfo]; @@ -499,7 +500,7 @@ public override BytesRef Next() { seekPending = false; var status = SeekCeil(term); - Debug.Assert(status == SeekStatus.FOUND); // must positioned on valid term + Debugging.Assert(() => status == SeekStatus.FOUND); // must positioned on valid term } UpdateEnum(fstEnum.Next()); return term; @@ -630,7 +631,7 @@ internal override void DecodeMetaData() internal override void DecodeStats() { var arc = TopFrame().arc; - Debug.Assert(arc.NextFinalOutput == fstOutputs.NoOutput); + Debugging.Assert(() => arc.NextFinalOutput == fstOutputs.NoOutput); ord = arc.Output.Value; base.DecodeStats(); } @@ -698,7 +699,7 @@ private BytesRef DoSeekCeil(BytesRef target) { break; } - Debug.Assert(IsValid(frame)); // target must be fetched from automaton + Debugging.Assert(() => IsValid(frame)); // target must be fetched from automaton PushFrame(frame); upto++; } @@ -836,7 +837,7 @@ private void PushFrame(Frame frame) arc.Output = fstOutputs.Add(TopFrame().arc.Output, arc.Output); term = Grow(arc.Label); level++; - Debug.Assert(frame == stack[level]); + Debugging.Assert(() => frame == stack[level]); } private Frame PopFrame() diff --git a/src/Lucene.Net.Codecs/Memory/FSTTermOutputs.cs b/src/Lucene.Net.Codecs/Memory/FSTTermOutputs.cs index 37a64b4591..93c61d11f4 100644 --- a/src/Lucene.Net.Codecs/Memory/FSTTermOutputs.cs +++ b/src/Lucene.Net.Codecs/Memory/FSTTermOutputs.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Support; +using Lucene.Net.Diagnostics; +using Lucene.Net.Support; using System.Diagnostics; namespace Lucene.Net.Codecs.Memory @@ -127,7 +128,7 @@ public override TermData Common(TermData t1, TermData t2) if (Equals(t1, NO_OUTPUT) || Equals(t2, NO_OUTPUT)) return NO_OUTPUT; - Debug.Assert(t1.longs.Length == t2.longs.Length); + Debugging.Assert(() => t1.longs.Length == t2.longs.Length); long[] min = t1.longs, max = t2.longs; int pos = 0; @@ -182,7 +183,7 @@ public override TermData Subtract(TermData t1, TermData t2) if (Equals(t2, NO_OUTPUT)) return t1; - Debug.Assert(t1.longs.Length == t2.longs.Length); + Debugging.Assert(() => t1.longs.Length == t2.longs.Length); int pos = 0; long diff = 0; @@ -219,7 +220,7 @@ public override TermData Add(TermData t1, TermData t2) if (Equals(t2, NO_OUTPUT)) return t1; - Debug.Assert(t1.longs.Length == t2.longs.Length); + Debugging.Assert(() => t1.longs.Length == t2.longs.Length); var pos = 0; var accum = new long[_longsSize]; diff --git a/src/Lucene.Net.Codecs/Memory/FSTTermsReader.cs b/src/Lucene.Net.Codecs/Memory/FSTTermsReader.cs index c3ced65eeb..4eb4a3220a 100644 --- a/src/Lucene.Net.Codecs/Memory/FSTTermsReader.cs +++ b/src/Lucene.Net.Codecs/Memory/FSTTermsReader.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Index; +using Lucene.Net.Diagnostics; +using Lucene.Net.Index; using Lucene.Net.Util.Fst; using System; using System.Collections.Generic; @@ -165,7 +166,7 @@ public override IEnumerator GetEnumerator() public override Terms GetTerms(string field) { - Debug.Assert(field != null); + Debugging.Assert(() => field != null); TermsReader result; fields.TryGetValue(field, out result); return result; @@ -368,7 +369,7 @@ public override BytesRef Next() { seekPending = false; SeekStatus status = SeekCeil(term); - Debug.Assert(status == SeekStatus.FOUND); // must positioned on valid term + Debugging.Assert(() => status == SeekStatus.FOUND); // must positioned on valid term } UpdateEnum(fstEnum.Next()); return term; @@ -499,7 +500,7 @@ internal IntersectTermsEnum(FSTTermsReader.TermsReader outerInstance, CompiledAu internal override void DecodeMetaData() { - Debug.Assert(term != null); + Debugging.Assert(() => term != null); if (!decoded) { if (meta.bytes != null) @@ -610,7 +611,7 @@ private BytesRef DoSeekCeil(BytesRef target) { break; } - Debug.Assert(IsValid(frame)); // target must be fetched from automaton + Debugging.Assert(() => IsValid(frame)); // target must be fetched from automaton PushFrame(frame); upto++; } diff --git a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs index f21ecfab92..b7c595cc40 100644 --- a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs +++ b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Util.Fst; +using Lucene.Net.Diagnostics; +using Lucene.Net.Util.Fst; using System; using System.Collections; using System.Collections.Generic; @@ -143,7 +144,7 @@ internal virtual void AddNumericField(FieldInfo field, IEnumerable values ++count; } - Debug.Assert(count == maxDoc); + Debugging.Assert(() => count == maxDoc); } if (missing) diff --git a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesProducer.cs b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesProducer.cs index 64c8e98ecd..de1b5fadeb 100644 --- a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesProducer.cs +++ b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesProducer.cs @@ -1,4 +1,5 @@ using J2N.Threading.Atomic; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; @@ -674,7 +675,7 @@ private IBits GetMissingBits(int fieldNumber, long offset, long length) { var data = (IndexInput)this.data.Clone(); data.Seek(offset); - Debug.Assert(length%8 == 0); + Debugging.Assert(() => length % 8 == 0); var bits = new long[(int) length >> 3]; for (var i = 0; i < bits.Length; i++) { diff --git a/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs b/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs index 8e6ce5b4e1..8df0e00536 100644 --- a/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs +++ b/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Index; +using Lucene.Net.Diagnostics; +using Lucene.Net.Index; using Lucene.Net.Util.Fst; using System; using System.Collections.Generic; @@ -145,7 +146,7 @@ public PostingsWriter(MemoryPostingsFormat.TermsWriter outerInstance) public override void StartDoc(int docID, int termDocFreq) { int delta = docID - lastDocID; - Debug.Assert(docID == 0 || delta > 0); + Debugging.Assert(() => docID == 0 || delta > 0); lastDocID = docID; docCount++; @@ -160,7 +161,7 @@ public override void StartDoc(int docID, int termDocFreq) else { buffer.WriteVInt32(delta << 1); - Debug.Assert(termDocFreq > 0); + Debugging.Assert(() => termDocFreq > 0); buffer.WriteVInt32(termDocFreq); } @@ -170,12 +171,12 @@ public override void StartDoc(int docID, int termDocFreq) public override void AddPosition(int pos, BytesRef payload, int startOffset, int endOffset) { - Debug.Assert(payload == null || outerInstance.field.HasPayloads); + Debugging.Assert(() => payload == null || outerInstance.field.HasPayloads); //System.out.println(" addPos pos=" + pos + " payload=" + payload); int delta = pos - lastPos; - Debug.Assert(delta >= 0); + Debugging.Assert(() => delta >= 0); lastPos = pos; int payloadLen = 0; @@ -230,7 +231,7 @@ public override void FinishDoc() public virtual PostingsWriter Reset() { - Debug.Assert(buffer.GetFilePointer() == 0); + Debugging.Assert(() => buffer.GetFilePointer() == 0); lastDocID = 0; docCount = 0; lastPayloadLen = 0; @@ -254,9 +255,9 @@ public override PostingsConsumer StartTerm(BytesRef text) public override void FinishTerm(BytesRef text, TermStats stats) { - Debug.Assert(postingsWriter.docCount == stats.DocFreq); + Debugging.Assert(() => postingsWriter.docCount == stats.DocFreq); - Debug.Assert(buffer2.GetFilePointer() == 0); + Debugging.Assert(() => buffer2.GetFilePointer() == 0); buffer2.WriteVInt32(stats.DocFreq); if (field.IndexOptions != IndexOptions.DOCS_ONLY) @@ -401,7 +402,7 @@ public bool CanReuse(IndexOptions indexOptions, bool storePayloads) public FSTDocsEnum Reset(BytesRef bufferIn, IBits liveDocs, int numDocs) { - Debug.Assert(numDocs > 0); + Debugging.Assert(() => numDocs > 0); if (buffer.Length < bufferIn.Length) { buffer = ArrayUtil.Grow(buffer, bufferIn.Length); @@ -445,7 +446,7 @@ public override int NextDoc() else { freq = @in.ReadVInt32(); - Debug.Assert(freq > 0); + Debugging.Assert(() => freq > 0); } if (indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) @@ -554,7 +555,7 @@ public bool CanReuse(bool storePayloads, bool storeOffsets) public FSTDocsAndPositionsEnum Reset(BytesRef bufferIn, IBits liveDocs, int numDocs) { - Debug.Assert(numDocs > 0); + Debugging.Assert(() => numDocs > 0); // System.out.println("D&P reset bytes this=" + this); // for(int i=bufferIn.offset;i 0); + Debugging.Assert(() => freq > 0); } if (liveDocs == null || liveDocs.Get(accum)) @@ -654,7 +655,7 @@ public override int NextDoc() public override int NextPosition() { //System.out.println(" nextPos storePayloads=" + storePayloads + " this=" + this); - Debug.Assert(posPending > 0); + Debugging.Assert(() => posPending > 0); posPending--; if (!storePayloads) { diff --git a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsFormat.cs b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsFormat.cs index 78d6db7a91..17ed5fb1a9 100644 --- a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsFormat.cs +++ b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsFormat.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Util; using System.Diagnostics; @@ -46,7 +47,7 @@ public PulsingPostingsFormat(PostingsBaseFormat wrappedPostingsBaseFormat, int f int minBlockSize, int maxBlockSize) : base() { - Debug.Assert(minBlockSize > 1); + Debugging.Assert(() => minBlockSize > 1); _freqCutoff = freqCutoff; _minBlockSize = minBlockSize; diff --git a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs index 77cccc3e9a..d43a4f7e48 100644 --- a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs +++ b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs @@ -1,4 +1,5 @@ using J2N.Runtime.CompilerServices; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Support; @@ -121,7 +122,7 @@ public override object Clone() } else { - Debug.Assert(WrappedTermState != null); + Debugging.Assert(() => WrappedTermState != null); clone.WrappedTermState = (BlockTermState)WrappedTermState.Clone(); clone.Absolute = Absolute; @@ -171,7 +172,7 @@ public override void DecodeTerm(long[] empty, DataInput input, FieldInfo fieldIn { var termState2 = (PulsingTermState) termState; - Debug.Assert(empty.Length == 0); + Debugging.Assert(() => empty.Length == 0); termState2.Absolute = termState2.Absolute || absolute; // if we have positions, its total TF, otherwise its computed based on docFreq. @@ -339,7 +340,7 @@ public PulsingDocsEnum(FieldInfo fieldInfo) public virtual PulsingDocsEnum Reset(IBits liveDocs, PulsingTermState termState) { - Debug.Assert(termState.PostingsSize != -1); + Debugging.Assert(() => termState.PostingsSize != -1); // Must make a copy of termState's byte[] so that if // app does TermsEnum.next(), this DocsEnum is not affected @@ -481,7 +482,7 @@ internal bool CanReuse(FieldInfo fieldInfo) public virtual PulsingDocsAndPositionsEnum Reset(IBits liveDocs, PulsingTermState termState) { - Debug.Assert(termState.PostingsSize != -1); + Debugging.Assert(() => termState.PostingsSize != -1); if (_postingsBytes == null) { @@ -541,7 +542,7 @@ public override int Advance(int target) public override int NextPosition() { - Debug.Assert(_posPending > 0); + Debugging.Assert(() => _posPending > 0); _posPending--; diff --git a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsWriter.cs b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsWriter.cs index 38034f796c..7179fd5c7d 100644 --- a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsWriter.cs +++ b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsWriter.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; @@ -149,7 +150,7 @@ public override BlockTermState NewTermState() public override void StartTerm() { - Debug.Assert(_pendingCount == 0); + Debugging.Assert(() => _pendingCount == 0); } // TODO: -- should we NOT reuse across fields? would @@ -174,7 +175,7 @@ public override int SetField(FieldInfo fieldInfo) public override void StartDoc(int docId, int termDocFreq) { - Debug.Assert(docId >= 0, "Got DocID=" + docId); + Debugging.Assert(() => docId >= 0, () => "Got DocID=" + docId); if (_pendingCount == _pending.Length) { @@ -184,7 +185,7 @@ public override void StartDoc(int docId, int termDocFreq) if (_pendingCount != -1) { - Debug.Assert(_pendingCount < _pending.Length); + Debugging.Assert(() => _pendingCount < _pending.Length); _currentDoc = _pending[_pendingCount]; _currentDoc.docID = docId; if (_indexOptions == IndexOptions.DOCS_ONLY) @@ -266,7 +267,7 @@ public override void FinishTerm(BlockTermState state) { var state2 = (PulsingTermState)state; - Debug.Assert(_pendingCount > 0 || _pendingCount == -1); + Debugging.Assert(() => _pendingCount > 0 || _pendingCount == -1); if (_pendingCount == -1) { @@ -317,7 +318,7 @@ public override void FinishTerm(BlockTermState state) for (var posIDX = 0; posIDX < doc.termFreq; posIDX++) { var pos = _pending[pendingIDX++]; - Debug.Assert(pos.docID == doc.docID); + Debugging.Assert(() => pos.docID == doc.docID); var posDelta = pos.pos - lastPos; lastPos = pos.pos; @@ -360,7 +361,7 @@ public override void FinishTerm(BlockTermState state) if (payloadLength > 0) { - Debug.Assert(_storePayloads); + Debugging.Assert(() => _storePayloads); _buffer.WriteBytes(pos.payload.Bytes, 0, pos.payload.Length); } } @@ -374,7 +375,7 @@ public override void FinishTerm(BlockTermState state) Position doc = _pending[posIdx]; int delta = doc.docID - lastDocId; - Debug.Assert(doc.termFreq != 0); + Debugging.Assert(() => doc.termFreq != 0); if (doc.termFreq == 1) { @@ -410,7 +411,7 @@ public override void EncodeTerm(long[] empty, DataOutput output, FieldInfo field bool abs) { var _state = (PulsingTermState)state; - Debug.Assert(empty.Length == 0); + Debugging.Assert(() => empty.Length == 0); _absolute = _absolute || abs; if (_state.bytes == null) { @@ -468,7 +469,7 @@ protected override void Dispose(bool disposing) /// private void Push() { - Debug.Assert(_pendingCount == _pending.Length); + Debugging.Assert(() => _pendingCount == _pending.Length); _wrappedPostingsWriter.StartTerm(); @@ -486,7 +487,7 @@ private void Push() } else if (doc.docID != pos.docID) { - Debug.Assert(pos.docID > doc.docID); + Debugging.Assert(() => pos.docID > doc.docID); _wrappedPostingsWriter.FinishDoc(); doc = pos; _wrappedPostingsWriter.StartDoc(doc.docID, doc.termFreq); diff --git a/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs b/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs index cc6b4b446b..5943d169ea 100644 --- a/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs +++ b/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Index; +using Lucene.Net.Diagnostics; +using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; using System.Diagnostics; @@ -262,7 +263,7 @@ public override DocsEnum Docs(FieldInfo fieldInfo, BlockTermState termState, IBi public override DocsAndPositionsEnum DocsAndPositions(FieldInfo fieldInfo, BlockTermState termState, IBits liveDocs, DocsAndPositionsEnum reuse, DocsAndPositionsFlags flags) { - Debug.Assert(fieldInfo.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); + Debugging.Assert(() => fieldInfo.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); SepTermState termState_ = (SepTermState)termState; SepDocsAndPositionsEnum postingsEnum; if (reuse == null || !(reuse is SepDocsAndPositionsEnum)) @@ -691,7 +692,7 @@ public override int NextPosition() { // Payload length has changed payloadLength = posReader.Next(); - Debug.Assert(payloadLength >= 0); + Debugging.Assert(() => payloadLength >= 0); } pendingPosCount--; position = 0; @@ -706,7 +707,7 @@ public override int NextPosition() { // Payload length has changed payloadLength = posReader.Next(); - Debug.Assert(payloadLength >= 0); + Debugging.Assert(() => payloadLength >= 0); } position += (int)(((uint)code) >> 1); pendingPayloadBytes += payloadLength; @@ -718,7 +719,7 @@ public override int NextPosition() } pendingPosCount--; - Debug.Assert(pendingPosCount >= 0); + Debugging.Assert(() => pendingPosCount >= 0); return position; } @@ -740,7 +741,7 @@ public override BytesRef GetPayload() return payload; } - Debug.Assert(pendingPayloadBytes >= payloadLength); + Debugging.Assert(() => pendingPayloadBytes >= payloadLength); if (pendingPayloadBytes > payloadLength) { diff --git a/src/Lucene.Net.Codecs/Sep/SepPostingsWriter.cs b/src/Lucene.Net.Codecs/Sep/SepPostingsWriter.cs index a9b78fbb21..5f75ab4067 100644 --- a/src/Lucene.Net.Codecs/Sep/SepPostingsWriter.cs +++ b/src/Lucene.Net.Codecs/Sep/SepPostingsWriter.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Index; +using Lucene.Net.Diagnostics; +using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; using System; @@ -261,10 +262,10 @@ public override void StartDoc(int docID, int termDocFreq) /// Add a new position & payload. public override void AddPosition(int position, BytesRef payload, int startOffset, int endOffset) { - Debug.Assert(indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); + Debugging.Assert(() => indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); int delta = position - lastPosition; - Debug.Assert(delta >= 0, "position=" + position + " lastPosition=" + lastPosition); // not quite right (if pos=0 is repeated twice we don't catch it) + Debugging.Assert(() => delta >= 0, () => "position=" + position + " lastPosition=" + lastPosition); // not quite right (if pos=0 is repeated twice we don't catch it) lastPosition = position; if (storePayloads) @@ -316,8 +317,8 @@ public override void FinishTerm(BlockTermState state) { SepTermState state_ = (SepTermState)state; // TODO: -- wasteful we are counting this in two places? - Debug.Assert(state_.DocFreq > 0); - Debug.Assert(state_.DocFreq == df); + Debugging.Assert(() => state_.DocFreq > 0); + Debugging.Assert(() => state_.DocFreq == df); state_.DocIndex = docOut.GetIndex(); state_.DocIndex.CopyFrom(docIndex, false); diff --git a/src/Lucene.Net.Codecs/Sep/SepSkipListReader.cs b/src/Lucene.Net.Codecs/Sep/SepSkipListReader.cs index d774121e2d..54528deae6 100644 --- a/src/Lucene.Net.Codecs/Sep/SepSkipListReader.cs +++ b/src/Lucene.Net.Codecs/Sep/SepSkipListReader.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Index; +using Lucene.Net.Diagnostics; +using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Support; using System.Diagnostics; @@ -194,7 +195,7 @@ protected override void SetLastSkipData(int level) protected override int ReadSkipData(int level, IndexInput skipStream) { int delta; - Debug.Assert(indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !currentFieldStoresPayloads); + Debugging.Assert(() => indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !currentFieldStoresPayloads); if (currentFieldStoresPayloads) { // the current field stores payloads. diff --git a/src/Lucene.Net.Codecs/Sep/SepSkipListWriter.cs b/src/Lucene.Net.Codecs/Sep/SepSkipListWriter.cs index 96a984ebed..3cd61c1673 100644 --- a/src/Lucene.Net.Codecs/Sep/SepSkipListWriter.cs +++ b/src/Lucene.Net.Codecs/Sep/SepSkipListWriter.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Index; +using Lucene.Net.Diagnostics; +using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Support; using System.Diagnostics; @@ -177,7 +178,7 @@ protected override void WriteSkipData(int level, IndexOutput skipBuffer) // current payload length equals the length at the previous // skip point - Debug.Assert(indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !curStorePayloads); + Debugging.Assert(() => indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !curStorePayloads); if (curStorePayloads) { diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs index 586a5bb67e..1b53e2dd9b 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs @@ -1,4 +1,5 @@ using J2N.Text; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; @@ -75,14 +76,14 @@ internal SimpleTextDocValuesReader(SegmentReadState state, string ext) { break; } - Debug.Assert(StartsWith(SimpleTextDocValuesWriter.FIELD), scratch.Utf8ToString()); + Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.FIELD), () => scratch.Utf8ToString()); var fieldName = StripPrefix(SimpleTextDocValuesWriter.FIELD); var field = new OneField(); fields[fieldName] = field; ReadLine(); - Debug.Assert(StartsWith(SimpleTextDocValuesWriter.TYPE), scratch.Utf8ToString()); + Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.TYPE), () => scratch.Utf8ToString()); var dvType = (DocValuesType) @@ -91,11 +92,11 @@ internal SimpleTextDocValuesReader(SegmentReadState state, string ext) if (dvType == DocValuesType.NUMERIC) { ReadLine(); - Debug.Assert(StartsWith(SimpleTextDocValuesWriter.MINVALUE), - "got " + scratch.Utf8ToString() + " field=" + fieldName + " ext=" + ext); + Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.MINVALUE), + () => "got " + scratch.Utf8ToString() + " field=" + fieldName + " ext=" + ext); field.MinValue = Convert.ToInt64(StripPrefix(SimpleTextDocValuesWriter.MINVALUE), CultureInfo.InvariantCulture); ReadLine(); - Debug.Assert(StartsWith(SimpleTextDocValuesWriter.PATTERN)); + Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.PATTERN)); field.Pattern = StripPrefix(SimpleTextDocValuesWriter.PATTERN); field.DataStartFilePointer = data.GetFilePointer(); data.Seek(data.GetFilePointer() + (1 + field.Pattern.Length + 2)*maxDoc); @@ -103,10 +104,10 @@ internal SimpleTextDocValuesReader(SegmentReadState state, string ext) else if (dvType == DocValuesType.BINARY) { ReadLine(); - Debug.Assert(StartsWith(SimpleTextDocValuesWriter.MAXLENGTH)); + Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.MAXLENGTH)); field.MaxLength = Convert.ToInt32(StripPrefix(SimpleTextDocValuesWriter.MAXLENGTH), CultureInfo.InvariantCulture); ReadLine(); - Debug.Assert(StartsWith(SimpleTextDocValuesWriter.PATTERN)); + Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.PATTERN)); field.Pattern = StripPrefix(SimpleTextDocValuesWriter.PATTERN); field.DataStartFilePointer = data.GetFilePointer(); data.Seek(data.GetFilePointer() + (9 + field.Pattern.Length + field.MaxLength + 2)*maxDoc); @@ -114,16 +115,16 @@ internal SimpleTextDocValuesReader(SegmentReadState state, string ext) else if (dvType == DocValuesType.SORTED || dvType == DocValuesType.SORTED_SET) { ReadLine(); - Debug.Assert(StartsWith(SimpleTextDocValuesWriter.NUMVALUES)); + Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.NUMVALUES)); field.NumValues = Convert.ToInt64(StripPrefix(SimpleTextDocValuesWriter.NUMVALUES), CultureInfo.InvariantCulture); ReadLine(); - Debug.Assert(StartsWith(SimpleTextDocValuesWriter.MAXLENGTH)); + Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.MAXLENGTH)); field.MaxLength = Convert.ToInt32(StripPrefix(SimpleTextDocValuesWriter.MAXLENGTH), CultureInfo.InvariantCulture); ReadLine(); - Debug.Assert(StartsWith(SimpleTextDocValuesWriter.PATTERN)); + Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.PATTERN)); field.Pattern = StripPrefix(SimpleTextDocValuesWriter.PATTERN); ReadLine(); - Debug.Assert(StartsWith(SimpleTextDocValuesWriter.ORDPATTERN)); + Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.ORDPATTERN)); field.OrdPattern = StripPrefix(SimpleTextDocValuesWriter.ORDPATTERN); field.DataStartFilePointer = data.GetFilePointer(); data.Seek(data.GetFilePointer() + (9 + field.Pattern.Length + field.MaxLength)*field.NumValues + @@ -137,16 +138,16 @@ internal SimpleTextDocValuesReader(SegmentReadState state, string ext) // We should only be called from above if at least one // field has DVs: - Debug.Assert(fields.Count > 0); + Debugging.Assert(() => fields.Count > 0); } public override NumericDocValues GetNumeric(FieldInfo fieldInfo) { var field = fields[fieldInfo.Name]; - Debug.Assert(field != null); + Debugging.Assert(() => field != null); // SegmentCoreReaders already verifies this field is valid: - Debug.Assert(field != null, "field=" + fieldInfo.Name + " fields=" + fields); + Debugging.Assert(() => field != null, () => "field=" + fieldInfo.Name + " fields=" + fields); var @in = (IndexInput)data.Clone(); var scratch = new BytesRef(); @@ -242,7 +243,7 @@ public bool Get(int index) public override BinaryDocValues GetBinary(FieldInfo fieldInfo) { var field = fields[fieldInfo.Name]; - Debug.Assert(field != null); + Debugging.Assert(() => field != null); var input = (IndexInput)data.Clone(); var scratch = new BytesRef(); @@ -276,7 +277,7 @@ public override void Get(int docId, BytesRef result) _input.Seek(_field.DataStartFilePointer + (9 + _field.Pattern.Length + _field.MaxLength + 2) * docId); SimpleTextUtil.ReadLine(_input, _scratch); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH)); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH)); int len; try { @@ -333,7 +334,7 @@ public bool Get(int index) { _input.Seek(_field.DataStartFilePointer + (9 + _field.Pattern.Length + _field.MaxLength + 2) * index); SimpleTextUtil.ReadLine(_input, _scratch); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH)); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH)); int len; try { @@ -366,7 +367,7 @@ public override SortedDocValues GetSorted(FieldInfo fieldInfo) var field = fields[fieldInfo.Name]; // SegmentCoreReaders already verifies this field is valid: - Debug.Assert(field != null); + Debugging.Assert(() => field != null); var input = (IndexInput)data.Clone(); var scratch = new BytesRef(); @@ -435,8 +436,8 @@ public override void LookupOrd(int ord, BytesRef result) } _input.Seek(_field.DataStartFilePointer + ord * (9 + _field.Pattern.Length + _field.MaxLength)); SimpleTextUtil.ReadLine(_input, _scratch); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH), - "got " + _scratch.Utf8ToString() + " in=" + _input); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH), + () => "got " + _scratch.Utf8ToString() + " in=" + _input); int len; try { @@ -471,7 +472,7 @@ public override SortedSetDocValues GetSortedSet(FieldInfo fieldInfo) // SegmentCoreReaders already verifies this field is // valid: - Debug.Assert(field != null); + Debugging.Assert(() => field != null); var input = (IndexInput) data.Clone(); var scratch = new BytesRef(); @@ -540,8 +541,8 @@ public override void LookupOrd(long ord, BytesRef result) _input.Seek(_field.DataStartFilePointer + ord * (9 + _field.Pattern.Length + _field.MaxLength)); SimpleTextUtil.ReadLine(_input, _scratch); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH), - "got " + _scratch.Utf8ToString() + " in=" + _input); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH), + () => "got " + _scratch.Utf8ToString() + " in=" + _input); int len; try { diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs index 1c544ea43d..856baa2384 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs @@ -1,4 +1,5 @@ -using System; +using Lucene.Net.Diagnostics; +using System; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; @@ -68,15 +69,15 @@ internal SimpleTextDocValuesWriter(SegmentWriteState state, string ext) /// private bool FieldSeen(string field) { - Debug.Assert(!_fieldsSeen.Contains(field), "field \"" + field + "\" was added more than once during flush"); + Debugging.Assert(() => !_fieldsSeen.Contains(field), () => "field \"" + field + "\" was added more than once during flush"); _fieldsSeen.Add(field); return true; } public override void AddNumericField(FieldInfo field, IEnumerable values) { - Debug.Assert(FieldSeen(field.Name)); - Debug.Assert(field.DocValuesType == DocValuesType.NUMERIC || + Debugging.Assert(() => FieldSeen(field.Name)); + Debugging.Assert(() => field.DocValuesType == DocValuesType.NUMERIC || field.NormType == DocValuesType.NUMERIC); WriteFieldEntry(field, DocValuesType.NUMERIC); @@ -117,26 +118,26 @@ public override void AddNumericField(FieldInfo field, IEnumerable values) { long value = n.GetValueOrDefault(); - Debug.Assert(value >= minValue); + Debugging.Assert(() => value >= minValue); var delta = (decimal)value - (decimal)minValue; // LUCENENET specific - use decimal rather than BigInteger string s = delta.ToString(patternString, CultureInfo.InvariantCulture); - Debug.Assert(s.Length == patternString.Length); + Debugging.Assert(() => s.Length == patternString.Length); SimpleTextUtil.Write(data, s, scratch); SimpleTextUtil.WriteNewline(data); SimpleTextUtil.Write(data, n == null ? "F" : "T", scratch); SimpleTextUtil.WriteNewline(data); numDocsWritten++; - Debug.Assert(numDocsWritten <= numDocs); + Debugging.Assert(() => numDocsWritten <= numDocs); } - Debug.Assert(numDocs == numDocsWritten, "numDocs=" + numDocs + " numDocsWritten=" + numDocsWritten); + Debugging.Assert(() => numDocs == numDocsWritten, () => "numDocs=" + numDocs + " numDocsWritten=" + numDocsWritten); } public override void AddBinaryField(FieldInfo field, IEnumerable values) { - Debug.Assert(FieldSeen(field.Name)); - Debug.Assert(field.DocValuesType == DocValuesType.BINARY); + Debugging.Assert(() => FieldSeen(field.Name)); + Debugging.Assert(() => field.DocValuesType == DocValuesType.BINARY); var maxLength = 0; foreach (var value in values) @@ -191,13 +192,13 @@ public override void AddBinaryField(FieldInfo field, IEnumerable value numDocsWritten++; } - Debug.Assert(numDocs == numDocsWritten); + Debugging.Assert(() => numDocs == numDocsWritten); } public override void AddSortedField(FieldInfo field, IEnumerable values, IEnumerable docToOrd) { - Debug.Assert(FieldSeen(field.Name)); - Debug.Assert(field.DocValuesType == DocValuesType.SORTED); + Debugging.Assert(() => FieldSeen(field.Name)); + Debugging.Assert(() => field.DocValuesType == DocValuesType.SORTED); WriteFieldEntry(field, DocValuesType.SORTED); int valueCount = 0; @@ -267,10 +268,10 @@ public override void AddSortedField(FieldInfo field, IEnumerable value } SimpleTextUtil.WriteNewline(data); valuesSeen++; - Debug.Assert(valuesSeen <= valueCount); + Debugging.Assert(() => valuesSeen <= valueCount); } - Debug.Assert(valuesSeen == valueCount); + Debugging.Assert(() => valuesSeen == valueCount); foreach (var ord in docToOrd) { @@ -282,8 +283,8 @@ public override void AddSortedField(FieldInfo field, IEnumerable value public override void AddSortedSetField(FieldInfo field, IEnumerable values, IEnumerable docToOrdCount, IEnumerable ords) { - Debug.Assert(FieldSeen(field.Name)); - Debug.Assert(field.DocValuesType == DocValuesType.SORTED_SET); + Debugging.Assert(() => FieldSeen(field.Name)); + Debugging.Assert(() => field.DocValuesType == DocValuesType.SORTED_SET); WriteFieldEntry(field, DocValuesType.SORTED_SET); long valueCount = 0; @@ -374,10 +375,10 @@ public override void AddSortedSetField(FieldInfo field, IEnumerable va } SimpleTextUtil.WriteNewline(data); valuesSeen++; - Debug.Assert(valuesSeen <= valueCount); + Debugging.Assert(() => valuesSeen <= valueCount); } - Debug.Assert(valuesSeen == valueCount); + Debugging.Assert(() => valuesSeen == valueCount); using (var ordStream = ords.GetEnumerator()) { @@ -425,7 +426,7 @@ protected override void Dispose(bool disposing) var success = false; try { - Debug.Assert(_fieldsSeen.Count > 0); + Debugging.Assert(() => _fieldsSeen.Count > 0); // java : sheisty to do this here? SimpleTextUtil.Write(data, END); SimpleTextUtil.WriteNewline(data); diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldInfosReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldInfosReader.cs index ea658b009c..9c50ef3a21 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldInfosReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldInfosReader.cs @@ -1,4 +1,5 @@ -using System; +using Lucene.Net.Diagnostics; +using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Diagnostics; @@ -56,29 +57,29 @@ public override FieldInfos Read(Directory directory, string segmentName, string { SimpleTextUtil.ReadLine(input, scratch); - Debug.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NUMFIELDS)); + Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NUMFIELDS)); var size = Convert.ToInt32(ReadString(SimpleTextFieldInfosWriter.NUMFIELDS.Length, scratch), CultureInfo.InvariantCulture); var infos = new FieldInfo[size]; for (var i = 0; i < size; i++) { SimpleTextUtil.ReadLine(input, scratch); - Debug.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NAME)); + Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NAME)); string name = ReadString(SimpleTextFieldInfosWriter.NAME.Length, scratch); SimpleTextUtil.ReadLine(input, scratch); - Debug.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NUMBER)); + Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NUMBER)); int fieldNumber = Convert.ToInt32(ReadString(SimpleTextFieldInfosWriter.NUMBER.Length, scratch), CultureInfo.InvariantCulture); SimpleTextUtil.ReadLine(input, scratch); - Debug.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.ISINDEXED)); + Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.ISINDEXED)); bool isIndexed = Convert.ToBoolean(ReadString(SimpleTextFieldInfosWriter.ISINDEXED.Length, scratch), CultureInfo.InvariantCulture); IndexOptions indexOptions; if (isIndexed) { SimpleTextUtil.ReadLine(input, scratch); - Debug.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.INDEXOPTIONS)); + Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.INDEXOPTIONS)); indexOptions = (IndexOptions)Enum.Parse(typeof(IndexOptions), ReadString(SimpleTextFieldInfosWriter.INDEXOPTIONS.Length, scratch)); } @@ -88,46 +89,46 @@ public override FieldInfos Read(Directory directory, string segmentName, string } SimpleTextUtil.ReadLine(input, scratch); - Debug.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.STORETV)); + Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.STORETV)); bool storeTermVector = Convert.ToBoolean(ReadString(SimpleTextFieldInfosWriter.STORETV.Length, scratch), CultureInfo.InvariantCulture); SimpleTextUtil.ReadLine(input, scratch); - Debug.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.PAYLOADS)); + Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.PAYLOADS)); bool storePayloads = Convert.ToBoolean(ReadString(SimpleTextFieldInfosWriter.PAYLOADS.Length, scratch), CultureInfo.InvariantCulture); SimpleTextUtil.ReadLine(input, scratch); - Debug.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NORMS)); + Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NORMS)); bool omitNorms = !Convert.ToBoolean(ReadString(SimpleTextFieldInfosWriter.NORMS.Length, scratch), CultureInfo.InvariantCulture); SimpleTextUtil.ReadLine(input, scratch); - Debug.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NORMS_TYPE)); + Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NORMS_TYPE)); string nrmType = ReadString(SimpleTextFieldInfosWriter.NORMS_TYPE.Length, scratch); Index.DocValuesType normsType = DocValuesType(nrmType); SimpleTextUtil.ReadLine(input, scratch); - Debug.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.DOCVALUES)); + Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.DOCVALUES)); string dvType = ReadString(SimpleTextFieldInfosWriter.DOCVALUES.Length, scratch); Index.DocValuesType docValuesType = DocValuesType(dvType); SimpleTextUtil.ReadLine(input, scratch); - Debug.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.DOCVALUES_GEN)); + Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.DOCVALUES_GEN)); long dvGen = Convert.ToInt64(ReadString(SimpleTextFieldInfosWriter.DOCVALUES_GEN.Length, scratch), CultureInfo.InvariantCulture); SimpleTextUtil.ReadLine(input, scratch); - Debug.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NUM_ATTS)); + Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NUM_ATTS)); int numAtts = Convert.ToInt32(ReadString(SimpleTextFieldInfosWriter.NUM_ATTS.Length, scratch), CultureInfo.InvariantCulture); IDictionary atts = new Dictionary(); for (int j = 0; j < numAtts; j++) { SimpleTextUtil.ReadLine(input, scratch); - Debug.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.ATT_KEY)); + Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.ATT_KEY)); string key = ReadString(SimpleTextFieldInfosWriter.ATT_KEY.Length, scratch); SimpleTextUtil.ReadLine(input, scratch); - Debug.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.ATT_VALUE)); + Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.ATT_VALUE)); string value = ReadString(SimpleTextFieldInfosWriter.ATT_VALUE.Length, scratch); atts[key] = value; } diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldInfosWriter.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldInfosWriter.cs index 82735dcbdb..02b8cb2418 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldInfosWriter.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldInfosWriter.cs @@ -1,4 +1,5 @@ -using System.Collections.Generic; +using Lucene.Net.Diagnostics; +using System.Collections.Generic; using System.Diagnostics; using System.Globalization; @@ -91,7 +92,7 @@ public override void Write(Directory directory, string segmentName, string segme if (fi.IsIndexed) { - Debug.Assert(fi.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); + Debugging.Assert(() => fi.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); SimpleTextUtil.Write(output, INDEXOPTIONS); SimpleTextUtil.Write(output, fi.IndexOptions != IndexOptions.NONE ? fi.IndexOptions.ToString() : string.Empty, diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs index 33b8bb47b9..04f652277a 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Index; +using Lucene.Net.Diagnostics; +using Lucene.Net.Index; using Lucene.Net.Util.Fst; using System; using System.Collections.Generic; @@ -313,8 +314,8 @@ public override int NextDoc() } else { - Debug.Assert( - StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.TERM) || StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.FIELD) || + Debugging.Assert( + () => StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.TERM) || StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.FIELD) || // LUCENENET TODO: This assert fails sometimes, which in turns causes _scratch.Utf8ToString() to throw an index out of range exception StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.END) /*, "scratch=" + _scratch.Utf8ToString()*/); @@ -444,7 +445,7 @@ public override int NextDoc() } else { - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.TERM) || StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.FIELD) || + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.TERM) || StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.FIELD) || StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.END)); if (!first && (_liveDocs == null || _liveDocs.Get(_docId))) @@ -470,7 +471,7 @@ public override int NextPosition() if (_readPositions) { SimpleTextUtil.ReadLine(_in, _scratch); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.POS), "got line=" + _scratch.Utf8ToString()); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.POS), () => "got line=" + _scratch.Utf8ToString()); UnicodeUtil.UTF8toUTF16(_scratch.Bytes, _scratch.Offset + SimpleTextFieldsWriter.POS.Length, _scratch.Length - SimpleTextFieldsWriter.POS.Length, _scratchUtf162); pos = ArrayUtil.ParseInt32(_scratchUtf162.Chars, 0, _scratchUtf162.Length); @@ -483,12 +484,12 @@ public override int NextPosition() if (_readOffsets) { SimpleTextUtil.ReadLine(_in, _scratch); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.START_OFFSET), "got line=" + _scratch.Utf8ToString()); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.START_OFFSET), () => "got line=" + _scratch.Utf8ToString()); UnicodeUtil.UTF8toUTF16(_scratch.Bytes, _scratch.Offset + SimpleTextFieldsWriter.START_OFFSET.Length, _scratch.Length - SimpleTextFieldsWriter.START_OFFSET.Length, _scratchUtf162); _startOffset = ArrayUtil.ParseInt32(_scratchUtf162.Chars, 0, _scratchUtf162.Length); SimpleTextUtil.ReadLine(_in, _scratch); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.END_OFFSET), "got line=" + _scratch.Utf8ToString()); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.END_OFFSET), () => "got line=" + _scratch.Utf8ToString()); UnicodeUtil.UTF8toUTF16(_scratch.Bytes, _scratch.Offset + SimpleTextFieldsWriter.END_OFFSET.Length, _scratch.Length - SimpleTextFieldsWriter.END_OFFSET.Length, _scratchUtf162); _endOffset = ArrayUtil.ParseInt32(_scratchUtf162.Chars, 0, _scratchUtf162.Length); diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsWriter.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsWriter.cs index 6e67e7cfab..91562fc1fa 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsWriter.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsWriter.cs @@ -1,4 +1,5 @@ -using System; +using Lucene.Net.Diagnostics; +using System; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; @@ -162,9 +163,9 @@ public override void AddPosition(int position, BytesRef payload, int startOffset if (_writeOffsets) { - Debug.Assert(endOffset >= startOffset); - Debug.Assert(startOffset >= _lastStartOffset, - "startOffset=" + startOffset + " lastStartOffset=" + _lastStartOffset); + Debugging.Assert(() => endOffset >= startOffset); + Debugging.Assert(() => startOffset >= _lastStartOffset, + () => "startOffset=" + startOffset + " lastStartOffset=" + _lastStartOffset); _lastStartOffset = startOffset; _outerInstance.Write(START_OFFSET); _outerInstance.Write(Convert.ToString(startOffset, CultureInfo.InvariantCulture)); @@ -176,7 +177,7 @@ public override void AddPosition(int position, BytesRef payload, int startOffset if (payload != null && payload.Length > 0) { - Debug.Assert(payload.Length != 0); + Debugging.Assert(() => payload.Length != 0); _outerInstance.Write(PAYLOAD); _outerInstance.Write(payload); _outerInstance.Newline(); diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextLiveDocsFormat.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextLiveDocsFormat.cs index 8df76e5007..b7245f7318 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextLiveDocsFormat.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextLiveDocsFormat.cs @@ -1,4 +1,5 @@ -using System; +using Lucene.Net.Diagnostics; +using System; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; @@ -66,7 +67,7 @@ public override IMutableBits NewLiveDocs(IBits existing) public override IBits ReadLiveDocs(Directory dir, SegmentCommitInfo info, IOContext context) { - Debug.Assert(info.HasDeletions); + Debugging.Assert(() => info.HasDeletions); var scratch = new BytesRef(); var scratchUtf16 = new CharsRef(); @@ -79,7 +80,7 @@ public override IBits ReadLiveDocs(Directory dir, SegmentCommitInfo info, IOCont input = dir.OpenChecksumInput(fileName, context); SimpleTextUtil.ReadLine(input, scratch); - Debug.Assert(StringHelper.StartsWith(scratch, SIZE)); + Debugging.Assert(() => StringHelper.StartsWith(scratch, SIZE)); var size = ParseInt32At(scratch, SIZE.Length, scratchUtf16); var bits = new BitSet(size); @@ -87,7 +88,7 @@ public override IBits ReadLiveDocs(Directory dir, SegmentCommitInfo info, IOCont SimpleTextUtil.ReadLine(input, scratch); while (!scratch.Equals(END)) { - Debug.Assert(StringHelper.StartsWith(scratch, DOC)); + Debugging.Assert(() => StringHelper.StartsWith(scratch, DOC)); var docid = ParseInt32At(scratch, DOC.Length, scratchUtf16); bits.Set(docid); SimpleTextUtil.ReadLine(input, scratch); diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextSegmentInfoReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextSegmentInfoReader.cs index aa44774542..8be96c18c3 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextSegmentInfoReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextSegmentInfoReader.cs @@ -1,4 +1,5 @@ -using System; +using Lucene.Net.Diagnostics; +using System; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; @@ -52,43 +53,43 @@ public override SegmentInfo Read(Directory directory, string segmentName, IOCont try { SimpleTextUtil.ReadLine(input, scratch); - Debug.Assert(StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_VERSION)); + Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_VERSION)); string version = ReadString(SimpleTextSegmentInfoWriter.SI_VERSION.Length, scratch); SimpleTextUtil.ReadLine(input, scratch); - Debug.Assert(StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_DOCCOUNT)); + Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_DOCCOUNT)); int docCount = Convert.ToInt32(ReadString(SimpleTextSegmentInfoWriter.SI_DOCCOUNT.Length, scratch), CultureInfo.InvariantCulture); SimpleTextUtil.ReadLine(input, scratch); - Debug.Assert(StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_USECOMPOUND)); + Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_USECOMPOUND)); bool isCompoundFile = Convert.ToBoolean(ReadString(SimpleTextSegmentInfoWriter.SI_USECOMPOUND.Length, scratch), CultureInfo.InvariantCulture); SimpleTextUtil.ReadLine(input, scratch); - Debug.Assert(StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_NUM_DIAG)); + Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_NUM_DIAG)); int numDiag = Convert.ToInt32(ReadString(SimpleTextSegmentInfoWriter.SI_NUM_DIAG.Length, scratch), CultureInfo.InvariantCulture); IDictionary diagnostics = new Dictionary(); for (int i = 0; i < numDiag; i++) { SimpleTextUtil.ReadLine(input, scratch); - Debug.Assert(StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_DIAG_KEY)); + Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_DIAG_KEY)); string key = ReadString(SimpleTextSegmentInfoWriter.SI_DIAG_KEY.Length, scratch); SimpleTextUtil.ReadLine(input, scratch); - Debug.Assert(StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_DIAG_VALUE)); + Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_DIAG_VALUE)); string value = ReadString(SimpleTextSegmentInfoWriter.SI_DIAG_VALUE.Length, scratch); diagnostics[key] = value; } SimpleTextUtil.ReadLine(input, scratch); - Debug.Assert(StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_NUM_FILES)); + Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_NUM_FILES)); int numFiles = Convert.ToInt32(ReadString(SimpleTextSegmentInfoWriter.SI_NUM_FILES.Length, scratch), CultureInfo.InvariantCulture); var files = new JCG.HashSet(); for (int i = 0; i < numFiles; i++) { SimpleTextUtil.ReadLine(input, scratch); - Debug.Assert(StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_FILE)); + Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_FILE)); string fileName = ReadString(SimpleTextSegmentInfoWriter.SI_FILE.Length, scratch); files.Add(fileName); } diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextStoredFieldsReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextStoredFieldsReader.cs index 2a113a6395..ac3e8d7727 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextStoredFieldsReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextStoredFieldsReader.cs @@ -1,4 +1,5 @@ -using System; +using Lucene.Net.Diagnostics; +using System; using System.Diagnostics; using System.Globalization; using System.Reflection; @@ -112,26 +113,26 @@ private void ReadIndex(int size) } } SimpleTextUtil.CheckFooter(input); - Debug.Assert(upto == _offsets.Length); + Debugging.Assert(() => upto == _offsets.Length); } public override void VisitDocument(int n, StoredFieldVisitor visitor) { _input.Seek(_offsets[n]); ReadLine(); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.NUM)); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.NUM)); var numFields = ParseInt32At(SimpleTextStoredFieldsWriter.NUM.Length); for (var i = 0; i < numFields; i++) { ReadLine(); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.FIELD)); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.FIELD)); int fieldNumber = ParseInt32At(SimpleTextStoredFieldsWriter.FIELD.Length); FieldInfo fieldInfo = _fieldInfos.FieldInfo(fieldNumber); ReadLine(); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.NAME)); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.NAME)); ReadLine(); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.TYPE)); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.TYPE)); BytesRef type; if (EqualsAt(SimpleTextStoredFieldsWriter.TYPE_STRING, _scratch, SimpleTextStoredFieldsWriter.TYPE.Length)) @@ -170,7 +171,7 @@ public override void VisitDocument(int n, StoredFieldVisitor visitor) break; case StoredFieldVisitor.Status.NO: ReadLine(); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.VALUE)); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.VALUE)); break; case StoredFieldVisitor.Status.STOP: return; @@ -181,7 +182,7 @@ public override void VisitDocument(int n, StoredFieldVisitor visitor) private void ReadField(BytesRef type, FieldInfo fieldInfo, StoredFieldVisitor visitor) { ReadLine(); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.VALUE)); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.VALUE)); if (Equals(type, SimpleTextStoredFieldsWriter.TYPE_STRING)) { visitor.StringField(fieldInfo, diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs index 1fe22523f9..778219ea01 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Index; +using Lucene.Net.Diagnostics; +using Lucene.Net.Index; using System; using System.Collections.Generic; using System.Diagnostics; @@ -108,7 +109,7 @@ private void ReadIndex(int maxDoc) } } SimpleTextUtil.CheckFooter(input); - Debug.Assert(upto == _offsets.Length); + Debugging.Assert(() => upto == _offsets.Length); } public override Fields Get(int doc) @@ -118,7 +119,7 @@ public override Fields Get(int doc) _input.Seek(_offsets[doc]); ReadLine(); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.NUMFIELDS)); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.NUMFIELDS)); var numFields = ParseInt32At(SimpleTextTermVectorsWriter.NUMFIELDS.Length); if (numFields == 0) { @@ -127,28 +128,28 @@ public override Fields Get(int doc) for (var i = 0; i < numFields; i++) { ReadLine(); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELD)); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELD)); // skip fieldNumber: ParseInt32At(SimpleTextTermVectorsWriter.FIELD.Length); ReadLine(); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDNAME)); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDNAME)); var fieldName = ReadString(SimpleTextTermVectorsWriter.FIELDNAME.Length, _scratch); ReadLine(); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDPOSITIONS)); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDPOSITIONS)); var positions = Convert.ToBoolean(ReadString(SimpleTextTermVectorsWriter.FIELDPOSITIONS.Length, _scratch), CultureInfo.InvariantCulture); ReadLine(); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDOFFSETS)); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDOFFSETS)); var offsets = Convert.ToBoolean(ReadString(SimpleTextTermVectorsWriter.FIELDOFFSETS.Length, _scratch), CultureInfo.InvariantCulture); ReadLine(); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDPAYLOADS)); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDPAYLOADS)); var payloads = Convert.ToBoolean(ReadString(SimpleTextTermVectorsWriter.FIELDPAYLOADS.Length, _scratch), CultureInfo.InvariantCulture); ReadLine(); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDTERMCOUNT)); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDTERMCOUNT)); var termCount = ParseInt32At(SimpleTextTermVectorsWriter.FIELDTERMCOUNT.Length); var terms = new SimpleTVTerms(offsets, positions, payloads); @@ -157,7 +158,7 @@ public override Fields Get(int doc) for (var j = 0; j < termCount; j++) { ReadLine(); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.TERMTEXT)); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.TERMTEXT)); var term = new BytesRef(); var termLength = _scratch.Length - SimpleTextTermVectorsWriter.TERMTEXT.Length; term.Grow(termLength); @@ -168,7 +169,7 @@ public override Fields Get(int doc) terms.terms.Add(term, postings); ReadLine(); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.TERMFREQ)); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.TERMFREQ)); postings.freq = ParseInt32At(SimpleTextTermVectorsWriter.TERMFREQ.Length); if (!positions && !offsets) continue; @@ -193,12 +194,12 @@ public override Fields Get(int doc) if (positions) { ReadLine(); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.POSITION)); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.POSITION)); postings.positions[k] = ParseInt32At(SimpleTextTermVectorsWriter.POSITION.Length); if (payloads) { ReadLine(); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.PAYLOAD)); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.PAYLOAD)); if (_scratch.Length - SimpleTextTermVectorsWriter.PAYLOAD.Length == 0) { postings.payloads[k] = null; @@ -216,11 +217,11 @@ public override Fields Get(int doc) if (!offsets) continue; ReadLine(); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.STARTOFFSET)); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.STARTOFFSET)); postings.startOffsets[k] = ParseInt32At(SimpleTextTermVectorsWriter.STARTOFFSET.Length); ReadLine(); - Debug.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.ENDOFFSET)); + Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.ENDOFFSET)); postings.endOffsets[k] = ParseInt32At(SimpleTextTermVectorsWriter.ENDOFFSET.Length); } } @@ -443,7 +444,7 @@ public override int Freq { get { - Debug.Assert(_freqRenamed != -1); + Debugging.Assert(() => _freqRenamed != -1); return _freqRenamed; } } @@ -494,7 +495,7 @@ public override int Freq if (_positions != null) return _positions.Length; - Debug.Assert(_startOffsets != null); + Debugging.Assert(() => _startOffsets != null); return _startOffsets.Length; } } @@ -539,7 +540,7 @@ public override BytesRef GetPayload() public override int NextPosition() { - //Debug.Assert((_positions != null && _nextPos < _positions.Length) || + //Debugging.Assert((_positions != null && _nextPos < _positions.Length) || // _startOffsets != null && _nextPos < _startOffsets.Length); // LUCENENET: The above assertion was for control flow when testing. In Java, it would throw an AssertionError, which is diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsWriter.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsWriter.cs index 8ea3c91ffc..011029e3ce 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsWriter.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsWriter.cs @@ -1,4 +1,5 @@ -using System; +using Lucene.Net.Diagnostics; +using System; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; @@ -142,7 +143,7 @@ public override void StartTerm(BytesRef term, int freq) public override void AddPosition(int position, int startOffset, int endOffset, BytesRef payload) { - Debug.Assert(_positions || _offsets); + Debugging.Assert(() => _positions || _offsets); if (_positions) { @@ -155,7 +156,7 @@ public override void AddPosition(int position, int startOffset, int endOffset, B Write(PAYLOAD); if (payload != null) { - Debug.Assert(payload.Length > 0); + Debugging.Assert(() => payload.Length > 0); Write(payload); } NewLine(); diff --git a/src/Lucene.Net.Expressions/ExpressionComparator.cs b/src/Lucene.Net.Expressions/ExpressionComparator.cs index 96483d7c65..e187adfd7d 100644 --- a/src/Lucene.Net.Expressions/ExpressionComparator.cs +++ b/src/Lucene.Net.Expressions/ExpressionComparator.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Queries.Function; using Lucene.Net.Search; @@ -48,11 +49,11 @@ public override void SetScorer(Scorer scorer) base.SetScorer(scorer); // TODO: might be cleaner to lazy-init 'source' and set scorer after? - Debug.Assert(readerContext != null); + Debugging.Assert(() => readerContext != null); try { var context = new Dictionary(); - Debug.Assert(scorer != null); + Debugging.Assert(() => scorer != null); context["scorer"] = scorer; scores = source.GetValues(context, readerContext); } diff --git a/src/Lucene.Net.Expressions/ScoreFunctionValues.cs b/src/Lucene.Net.Expressions/ScoreFunctionValues.cs index 8f66680a79..dd1b052704 100644 --- a/src/Lucene.Net.Expressions/ScoreFunctionValues.cs +++ b/src/Lucene.Net.Expressions/ScoreFunctionValues.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Queries.Function; using Lucene.Net.Queries.Function.DocValues; using Lucene.Net.Search; @@ -42,7 +43,7 @@ public override double DoubleVal(int document) { try { - Debug.Assert(document == scorer.DocID); + Debugging.Assert(() => document == scorer.DocID); return scorer.GetScore(); } catch (IOException exception) diff --git a/src/Lucene.Net.Facet/DrillDownQuery.cs b/src/Lucene.Net.Facet/DrillDownQuery.cs index d7f472c851..65e9c52d85 100644 --- a/src/Lucene.Net.Facet/DrillDownQuery.cs +++ b/src/Lucene.Net.Facet/DrillDownQuery.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Support; +using Lucene.Net.Diagnostics; +using Lucene.Net.Support; using System; using System.Collections.Generic; using System.Diagnostics; @@ -85,7 +86,7 @@ internal DrillDownQuery(FacetsConfig config, Filter filter, DrillDownQuery other { throw new ArgumentException("cannot apply filter unless baseQuery isn't null; pass ConstantScoreQuery instead"); } - Debug.Assert(clauses.Length == 1 + other.drillDownDims.Count, clauses.Length + " vs " + (1 + other.drillDownDims.Count)); + Debugging.Assert(() => clauses.Length == 1 + other.drillDownDims.Count, () => clauses.Length + " vs " + (1 + other.drillDownDims.Count)); drillDownDims.PutAll(other.drillDownDims); query.Add(new FilteredQuery(clauses[0].Query, filter), Occur.MUST); for (int i = 1; i < clauses.Length; i++) diff --git a/src/Lucene.Net.Facet/DrillSideways.cs b/src/Lucene.Net.Facet/DrillSideways.cs index 9939adad5b..65d4c10135 100644 --- a/src/Lucene.Net.Facet/DrillSideways.cs +++ b/src/Lucene.Net.Facet/DrillSideways.cs @@ -1,4 +1,5 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using Lucene.Net.Facet.SortedSet; using Lucene.Net.Facet.Taxonomy; using Lucene.Net.Search; @@ -174,7 +175,7 @@ public virtual DrillSidewaysResult Search(DrillDownQuery query, ICollector hitCo } else { - Debug.Assert(clauses.Length == 1 + drillDownDims.Count); + Debugging.Assert(() => clauses.Length == 1 + drillDownDims.Count); baseQuery = clauses[0].Query; startClause = 1; } diff --git a/src/Lucene.Net.Facet/DrillSidewaysScorer.cs b/src/Lucene.Net.Facet/DrillSidewaysScorer.cs index 64e4603402..23ed7d6f5f 100644 --- a/src/Lucene.Net.Facet/DrillSidewaysScorer.cs +++ b/src/Lucene.Net.Facet/DrillSidewaysScorer.cs @@ -1,4 +1,4 @@ -// Lucene version compatibility level: 4.10.4 +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.Diagnostics; @@ -89,7 +89,7 @@ public override bool Score(ICollector collector, int maxDoc) // TODO: if we ever allow null baseScorer ... it will // mean we DO score docs out of order ... hmm, or if we // change up the order of the conjuntions below - Debug.Assert(baseScorer != null); + Debugging.Assert(() => baseScorer != null); // some scorers, eg ReqExlScorer, can hit NPE if cost is called after nextDoc long baseQueryCost = baseScorer.GetCost(); @@ -395,7 +395,7 @@ private void DoDrillDownAdvanceScoring(ICollector collector, DocIdSetIterator[] while (slot0 < CHUNK && (slot0 = seen.NextSetBit(slot0)) != -1) { int ddDocID = docIDs[slot0]; - Debug.Assert(ddDocID != -1); + Debugging.Assert(() => ddDocID != -1); int baseDocID = baseScorer.DocID; if (baseDocID < ddDocID) @@ -550,7 +550,7 @@ private void DoUnionScoring(ICollector collector, DocIdSetIterator[] disis, ICol //} // Mark slot as valid: - Debug.Assert(docIDs[slot] != docID, "slot=" + slot + " docID=" + docID); + Debugging.Assert(() => docIDs[slot] != docID, () => "slot=" + slot + " docID=" + docID); docIDs[slot] = docID; scores[slot] = baseScorer.GetScore(); filledSlots[filledCount++] = slot; diff --git a/src/Lucene.Net.Facet/FacetsConfig.cs b/src/Lucene.Net.Facet/FacetsConfig.cs index ed85c56cf9..2f5610c285 100644 --- a/src/Lucene.Net.Facet/FacetsConfig.cs +++ b/src/Lucene.Net.Facet/FacetsConfig.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Support; +using Lucene.Net.Diagnostics; +using Lucene.Net.Support; using System; using System.Collections.Concurrent; using System.Collections.Generic; @@ -693,7 +694,7 @@ public static string[] StringToPath(string s) } } parts.Add(new string(buffer, 0, upto)); - Debug.Assert(!lastEscape); + Debugging.Assert(() => !lastEscape); return parts.ToArray(); } } diff --git a/src/Lucene.Net.Facet/Range/LongRangeCounter.cs b/src/Lucene.Net.Facet/Range/LongRangeCounter.cs index 02cddec7f5..fa124e8c9d 100644 --- a/src/Lucene.Net.Facet/Range/LongRangeCounter.cs +++ b/src/Lucene.Net.Facet/Range/LongRangeCounter.cs @@ -1,4 +1,5 @@ -using System.Collections.Generic; +using Lucene.Net.Diagnostics; +using System.Collections.Generic; using System.Diagnostics; using System.Text; @@ -119,7 +120,7 @@ public Int64RangeCounter(Int64Range[] ranges) } else { - Debug.Assert(flags == 2); + Debugging.Assert(() => flags == 2); // This point is only the end of an interval; attach // it to last interval: elementaryIntervals.Add(new InclusiveRange(prev, v)); @@ -274,7 +275,7 @@ private sealed class InclusiveRange public InclusiveRange(long start, long end) { - Debug.Assert(end >= start); + Debugging.Assert(() => end >= start); this.Start = start; this.End = end; } @@ -348,7 +349,7 @@ internal void AddOutputs(int index, Int64Range range) } else if (left != null) { - Debug.Assert(right != null); + Debugging.Assert(() => right != null); // Recurse: left.AddOutputs(index, range); right.AddOutputs(index, range); @@ -360,7 +361,7 @@ internal void ToString(StringBuilder sb, int depth) Indent(sb, depth); if (left == null) { - Debug.Assert(right == null); + Debugging.Assert(() => right == null); sb.Append("leaf: " + start + " to " + end); } else @@ -376,7 +377,7 @@ internal void ToString(StringBuilder sb, int depth) if (left != null) { - Debug.Assert(right != null); + Debugging.Assert(() => right != null); left.ToString(sb, depth + 1); right.ToString(sb, depth + 1); } diff --git a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs index cd78c69e92..9033c19cf0 100644 --- a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs +++ b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs @@ -1,4 +1,5 @@ using J2N.Text; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Diagnostics; @@ -64,7 +65,7 @@ private CategoryPath(CategoryPath copyFrom, int prefixLen) // while the code which calls this method is safe, at some point a test // tripped on AIOOBE in toString, but we failed to reproduce. adding the // assert as a safety check. - Debug.Assert(prefixLen > 0 && prefixLen <= copyFrom.Components.Length, "prefixLen cannot be negative nor larger than the given components' length: prefixLen=" + prefixLen + " components.length=" + copyFrom.Components.Length); + Debugging.Assert(() => prefixLen > 0 && prefixLen <= copyFrom.Components.Length, () => "prefixLen cannot be negative nor larger than the given components' length: prefixLen=" + prefixLen + " components.length=" + copyFrom.Components.Length); this.Components = copyFrom.Components; Length = prefixLen; } @@ -74,7 +75,7 @@ private CategoryPath(CategoryPath copyFrom, int prefixLen) /// public CategoryPath(params string[] components) { - Debug.Assert(components.Length > 0, "use CategoryPath.EMPTY to create an empty path"); + Debugging.Assert(() => components.Length > 0, () => "use CategoryPath.EMPTY to create an empty path"); foreach (string comp in components) { if (string.IsNullOrEmpty(comp)) diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs index 1eeae986f2..d9846c81f5 100644 --- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs +++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs @@ -1,5 +1,6 @@ using J2N.Threading.Atomic; using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Index.Extensions; using Lucene.Net.Store; @@ -189,7 +190,7 @@ public DirectoryTaxonomyWriter(Directory directory, OpenMode openMode, // verify (to some extent) that merge policy in effect would preserve category docids if (indexWriter != null) { - Debug.Assert(!(indexWriter.Config.MergePolicy is TieredMergePolicy), "for preserving category docids, merging none-adjacent segments is not allowed"); + Debugging.Assert(() => !(indexWriter.Config.MergePolicy is TieredMergePolicy), () => "for preserving category docids, merging none-adjacent segments is not allowed"); } // after we opened the writer, and the index is locked, it's safe to check @@ -825,7 +826,7 @@ public virtual void SetCacheMissesUntilFill(int i) FacetLabel cp = new FacetLabel(FacetsConfig.StringToPath(t.Utf8ToString())); docsEnum = termsEnum.Docs(null, docsEnum, DocsFlags.NONE); bool res = cache.Put(cp, docsEnum.NextDoc() + ctx.DocBase); - Debug.Assert(!res, "entries should not have been evicted from the cache"); + Debugging.Assert(() => !res, () => "entries should not have been evicted from the cache"); } else { @@ -906,7 +907,7 @@ public virtual int GetParent(int ordinal) } int[] parents = GetTaxoArrays().Parents; - Debug.Assert(ordinal < parents.Length, "requested ordinal (" + ordinal + "); parents.length (" + parents.Length + ") !"); + Debugging.Assert(() => ordinal < parents.Length, () => "requested ordinal (" + ordinal + "); parents.length (" + parents.Length + ") !"); return parents[ordinal]; } diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs index 14f9b7c016..10170b98f9 100644 --- a/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs +++ b/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Index; +using Lucene.Net.Diagnostics; +using Lucene.Net.Index; using Lucene.Net.Support; using System; using System.Diagnostics; @@ -76,7 +77,7 @@ public TaxonomyIndexArrays(IndexReader reader) public TaxonomyIndexArrays(IndexReader reader, TaxonomyIndexArrays copyFrom) { - Debug.Assert(copyFrom != null); + Debugging.Assert(() => copyFrom != null); // note that copyParents.length may be equal to reader.maxDoc(). this is not a bug // it may be caused if e.g. the taxonomy segments were merged, and so an updated diff --git a/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs b/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs index 57dfdc44ea..3c8aaa1e23 100644 --- a/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs +++ b/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs @@ -1,4 +1,5 @@ using J2N.Text; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Diagnostics; @@ -67,7 +68,7 @@ private FacetLabel(FacetLabel copyFrom, int prefixLen) // while the code which calls this method is safe, at some point a test // tripped on AIOOBE in toString, but we failed to reproduce. adding the // assert as a safety check. - Debug.Assert(prefixLen >= 0 && prefixLen <= copyFrom.Components.Length, "prefixLen cannot be negative nor larger than the given components' length: prefixLen=" + prefixLen + " components.length=" + copyFrom.Components.Length); + Debugging.Assert(() => prefixLen >= 0 && prefixLen <= copyFrom.Components.Length, () => "prefixLen cannot be negative nor larger than the given components' length: prefixLen=" + prefixLen + " components.length=" + copyFrom.Components.Length); this.Components = copyFrom.Components; Length = prefixLen; } diff --git a/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs index de8db1a185..ca0976dbda 100644 --- a/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs +++ b/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs @@ -1,4 +1,5 @@ -using System; +using Lucene.Net.Diagnostics; +using System; using System.Collections.Generic; using System.Diagnostics; @@ -57,7 +58,7 @@ protected virtual void Rollup() if (ft.IsHierarchical && ft.IsMultiValued == false) { int dimRootOrd = m_taxoReader.GetOrdinal(new FacetLabel(dim)); - Debug.Assert(dimRootOrd > 0); + Debugging.Assert(() => dimRootOrd > 0); m_values[dimRootOrd] += Rollup(m_children[dimRootOrd]); } } diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs index 255bb6eef1..de702f8f79 100644 --- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs +++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs @@ -1,4 +1,5 @@ using J2N.Threading.Atomic; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.Diagnostics; @@ -142,7 +143,7 @@ protected TaxonomyReader() // LUCENENET specific - marked protected instead of p public static T OpenIfChanged(T oldTaxoReader) where T : TaxonomyReader { T newTaxoReader = (T)oldTaxoReader.DoOpenIfChanged(); - Debug.Assert(newTaxoReader != oldTaxoReader); + Debugging.Assert(() => newTaxoReader != oldTaxoReader); return newTaxoReader; } diff --git a/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs b/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs index 2d1839a2bf..fbc983ad35 100644 --- a/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs +++ b/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Index; +using Lucene.Net.Diagnostics; +using Lucene.Net.Index; using System; using System.Collections.Generic; using System.Diagnostics; @@ -247,7 +248,7 @@ public virtual void Collect(int doc) bottomGroup = m_orderedGroups.Last(); m_orderedGroups.Remove(bottomGroup); } - Debug.Assert(m_orderedGroups.Count == topNGroups - 1); + Debugging.Assert(() => m_orderedGroups.Count == topNGroups - 1); groupMap.Remove(bottomGroup.GroupValue); @@ -262,7 +263,7 @@ public virtual void Collect(int doc) groupMap[bottomGroup.GroupValue] = bottomGroup; m_orderedGroups.Add(bottomGroup); - Debug.Assert(m_orderedGroups.Count == topNGroups); + Debugging.Assert(() => m_orderedGroups.Count == topNGroups); int lastComparerSlot = m_orderedGroups.Last().ComparerSlot; foreach (FieldComparer fc in comparers) @@ -314,7 +315,7 @@ public virtual void Collect(int doc) prevLast = m_orderedGroups.Last(); m_orderedGroups.Remove(group); } - Debug.Assert(m_orderedGroups.Count == topNGroups - 1); + Debugging.Assert(() => m_orderedGroups.Count == topNGroups - 1); } else { @@ -332,7 +333,7 @@ public virtual void Collect(int doc) if (m_orderedGroups != null) { m_orderedGroups.Add(group); - Debug.Assert(m_orderedGroups.Count == topNGroups); + Debugging.Assert(() => m_orderedGroups.Count == topNGroups); var newLast = m_orderedGroups.Last(); // If we changed the value of the last group, or changed which group was last, then update bottom: if (group == newLast || prevLast != newLast) @@ -375,7 +376,7 @@ private void BuildSortedSet() var comparer = new BuildSortedSetComparer(this); m_orderedGroups = new JCG.SortedSet>(comparer); m_orderedGroups.UnionWith(groupMap.Values); - Debug.Assert(m_orderedGroups.Count > 0); + Debugging.Assert(() => m_orderedGroups.Count > 0); foreach (FieldComparer fc in comparers) { diff --git a/src/Lucene.Net.Grouping/BlockGroupingCollector.cs b/src/Lucene.Net.Grouping/BlockGroupingCollector.cs index 6def6b0c40..c5e14e8123 100644 --- a/src/Lucene.Net.Grouping/BlockGroupingCollector.cs +++ b/src/Lucene.Net.Grouping/BlockGroupingCollector.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Index; +using Lucene.Net.Diagnostics; +using Lucene.Net.Index; using Lucene.Net.Util; using System; using System.Collections.Generic; @@ -152,8 +153,8 @@ protected internal override bool LessThan(OneGroup group1, OneGroup group2) { //System.out.println(" ltcheck"); - Debug.Assert(group1 != group2); - Debug.Assert(group1.comparerSlot != group2.comparerSlot); + Debugging.Assert(() => group1 != group2); + Debugging.Assert(() => group1.comparerSlot != group2.comparerSlot); int numComparers = outerInstance.comparers.Length; for (int compIDX = 0; compIDX < numComparers; compIDX++) @@ -220,7 +221,7 @@ private void ProcessGroup() { // Replace bottom element in PQ and then updateTop OneGroup og = groupQueue.Top; - Debug.Assert(og != null); + Debugging.Assert(() => og != null); og.count = subDocUpto; og.topGroupDoc = docBase + topGroupDoc; // Swap pending docs @@ -520,7 +521,7 @@ public virtual void Collect(int doc) { if (subDocUpto == 1) { - Debug.Assert(!queueFull); + Debugging.Assert(() => !queueFull); //System.out.println(" init copy to bottomSlot=" + bottomSlot); foreach (FieldComparer fc in comparers) diff --git a/src/Lucene.Net.Grouping/SearchGroup.cs b/src/Lucene.Net.Grouping/SearchGroup.cs index 2f3a8aae35..540bdd5676 100644 --- a/src/Lucene.Net.Grouping/SearchGroup.cs +++ b/src/Lucene.Net.Grouping/SearchGroup.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Support; +using Lucene.Net.Diagnostics; +using Lucene.Net.Support; using System; using System.Collections.Generic; using System.Diagnostics; @@ -105,12 +106,12 @@ public ShardIter(IEnumerable> shard, int shardIndex) { this.shardIndex = shardIndex; iter = shard.GetEnumerator(); - //Debug.Assert(iter.hasNext()); // No reasonable way to do this in .NET + //Debugging.Assert(iter.hasNext()); // No reasonable way to do this in .NET } public ISearchGroup Next() { - //Debug.Assert(iter.hasNext()); // No reasonable way to do this in .NET + //Debugging.Assert(iter.hasNext()); // No reasonable way to do this in .NET ISearchGroup group = iter.Current; if (group.SortValues == null) { @@ -185,12 +186,12 @@ private bool NeverEquals(object other) { if (groupValue == null) { - Debug.Assert(otherMergedGroup.groupValue != null); + Debugging.Assert(() => otherMergedGroup.groupValue != null); } else { - Debug.Assert(!groupValueIsValueType + Debugging.Assert(() => !groupValueIsValueType ? JCG.EqualityComparer.Default.Equals(groupValue, otherMergedGroup.groupValue) // LUCENENET specific - use J2N.Collections.StructuralEqualityComparer.Default.Equals() if we have a reference type @@ -205,7 +206,7 @@ public override bool Equals(object other) { // We never have another MergedGroup instance with // same groupValue - Debug.Assert(NeverEquals(other)); + Debugging.Assert(() => NeverEquals(other)); if (other is MergedGroup otherMergedGroup) { @@ -293,7 +294,7 @@ public virtual int Compare(MergedGroup group, MergedGroup other) } // Tie break by min shard index: - Debug.Assert(group.MinShardIndex != other.MinShardIndex); + Debugging.Assert(() => group.MinShardIndex != other.MinShardIndex); return group.MinShardIndex - other.MinShardIndex; } } @@ -326,7 +327,7 @@ private void UpdateNextGroup(int topN, ShardIter shard) //System.out.println(" new"); mergedGroup = new MergedGroup(group.GroupValue); mergedGroup.MinShardIndex = shard.ShardIndex; - Debug.Assert(group.SortValues != null); + Debugging.Assert(() => group.SortValues != null); mergedGroup.TopValues = group.SortValues; groupsSeen[group.GroupValue] = mergedGroup; mergedGroup.IsInQueue = true; diff --git a/src/Lucene.Net.Grouping/Term/TermGroupFacetCollector.cs b/src/Lucene.Net.Grouping/Term/TermGroupFacetCollector.cs index 12161e2087..5b4ddbd25e 100644 --- a/src/Lucene.Net.Grouping/Term/TermGroupFacetCollector.cs +++ b/src/Lucene.Net.Grouping/Term/TermGroupFacetCollector.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Index; +using Lucene.Net.Diagnostics; +using Lucene.Net.Index; using Lucene.Net.Util; using System.Collections.Generic; using System.Diagnostics; @@ -174,7 +175,7 @@ public override void SetNextReader(AtomicReaderContext context) BytesRef facetEndPrefix = BytesRef.DeepCopyOf(m_facetPrefix); facetEndPrefix.Append(UnicodeUtil.BIG_TERM); m_endFacetOrd = facetFieldTermsIndex.LookupTerm(facetEndPrefix); - Debug.Assert(m_endFacetOrd < 0); + Debugging.Assert(() => m_endFacetOrd < 0); m_endFacetOrd = -m_endFacetOrd - 1; // Points to the ord one higher than facetEndPrefix } else @@ -202,7 +203,7 @@ internal SegmentResult(int[] counts, int total, TermsEnum tenum, int startFacetO this.m_mergePos = startFacetOrd == -1 ? 1 : startFacetOrd + 1; if (m_mergePos < m_maxTermPos) { - Debug.Assert(tenum != null); + Debugging.Assert(() => tenum != null); tenum.SeekExact(startFacetOrd == -1 ? 0 : startFacetOrd); m_mergeTerm = tenum.Term; } diff --git a/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs b/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs index ed286fca1e..37ed39207b 100644 --- a/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs +++ b/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs @@ -2,6 +2,7 @@ using J2N; using Lucene.Net.Analysis; using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Search.Spans; using Lucene.Net.Util; @@ -291,7 +292,7 @@ public override int StartOffset { get { - Debug.Assert(currentStartOffset >= 0); + Debugging.Assert(() => currentStartOffset >= 0); return currentStartOffset; } } @@ -300,7 +301,7 @@ public override int EndOffset { get { - Debug.Assert(currentEndOffset >= 0); + Debugging.Assert(() => currentEndOffset >= 0); return currentEndOffset; } } diff --git a/src/Lucene.Net.Highlighter/PostingsHighlight/Passage.cs b/src/Lucene.Net.Highlighter/PostingsHighlight/Passage.cs index 751cf95fb3..920c9d1d7f 100644 --- a/src/Lucene.Net.Highlighter/PostingsHighlight/Passage.cs +++ b/src/Lucene.Net.Highlighter/PostingsHighlight/Passage.cs @@ -1,4 +1,5 @@ #if FEATURE_BREAKITERATOR +using Lucene.Net.Diagnostics; using Lucene.Net.Util; using System.Collections.Generic; using System.Diagnostics; @@ -42,7 +43,7 @@ public sealed class Passage internal void AddMatch(int startOffset, int endOffset, BytesRef term) { - Debug.Assert(startOffset >= this.startOffset && startOffset <= this.endOffset); + Debugging.Assert(() => startOffset >= this.startOffset && startOffset <= this.endOffset); if (numMatches == matchStarts.Length) { int newLength = ArrayUtil.Oversize(numMatches + 1, RamUsageEstimator.NUM_BYTES_OBJECT_REF); @@ -56,7 +57,7 @@ internal void AddMatch(int startOffset, int endOffset, BytesRef term) matchEnds = newMatchEnds; matchTerms = newMatchTerms; } - Debug.Assert(matchStarts.Length == matchEnds.Length && matchEnds.Length == matchTerms.Length); + Debugging.Assert(() => matchStarts.Length == matchEnds.Length && matchEnds.Length == matchTerms.Length); matchStarts[numMatches] = startOffset; matchEnds[numMatches] = endOffset; matchTerms[numMatches] = term; diff --git a/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs b/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs index b894dbe424..7244c75b6c 100644 --- a/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs +++ b/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs @@ -14,6 +14,7 @@ using System.IO; using System.Text; using JCG = J2N.Collections.Generic; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Search.PostingsHighlight { @@ -541,7 +542,7 @@ private IDictionary HighlightField(string field, string[] contents, AtomicReaderContext subContext = leaves[leaf]; AtomicReader r = subContext.AtomicReader; - Debug.Assert(leaf >= lastLeaf); // increasing order + Debugging.Assert(() => leaf >= lastLeaf); // increasing order // if the segment has changed, we must initialize new enums. if (leaf != lastLeaf) @@ -670,7 +671,7 @@ private Passage[] HighlightDoc(string field, BytesRef[] terms, int contentLength // LUCENE-5166: this hit would span the content limit... however more valid // hits may exist (they are sorted by start). so we pretend like we never // saw this term, it won't cause a passage to be added to passageQueue or anything. - Debug.Assert(EMPTY.StartOffset == int.MaxValue); + Debugging.Assert(() => EMPTY.StartOffset == int.MaxValue); if (start < contentLength && end > contentLength) { continue; @@ -713,7 +714,7 @@ private Passage[] HighlightDoc(string field, BytesRef[] terms, int contentLength return passages; } // advance breakiterator - Debug.Assert(BreakIterator.Done < 0); + Debugging.Assert(() => BreakIterator.Done < 0); current.startOffset = Math.Max(bi.Preceding(start + 1), 0); current.endOffset = Math.Min(bi.Next(), contentLength); } @@ -726,7 +727,7 @@ private Passage[] HighlightDoc(string field, BytesRef[] terms, int contentLength { // multitermquery match, pull from payload term = off.dp.GetPayload(); - Debug.Assert(term != null); + Debugging.Assert(() => term != null); } current.AddMatch(start, end, term); if (off.pos == dp.Freq) @@ -750,7 +751,7 @@ private Passage[] HighlightDoc(string field, BytesRef[] terms, int contentLength } // Dead code but compiler disagrees: - Debug.Assert(false); + Debugging.Assert(() => false); return null; } @@ -765,7 +766,7 @@ protected virtual Passage[] GetEmptyHighlight(string fieldName, BreakIterator bi // BreakIterator should be un-next'd: List passages = new List(); int pos = bi.Current; - Debug.Assert(pos == 0); + Debugging.Assert(() => pos == 0); while (passages.Count < maxPassages) { int next = bi.Next(); @@ -882,7 +883,7 @@ private class LimitedStoredFieldVisitor : StoredFieldVisitor public LimitedStoredFieldVisitor(string[] fields, char[] valueSeparators, int maxLength) { - Debug.Assert(fields.Length == valueSeparators.Length); + Debugging.Assert(() => fields.Length == valueSeparators.Length); this.fields = fields; this.valueSeparators = valueSeparators; this.maxLength = maxLength; @@ -895,7 +896,7 @@ public LimitedStoredFieldVisitor(string[] fields, char[] valueSeparators, int ma public override void StringField(Index.FieldInfo fieldInfo, string value) { - Debug.Assert(currentField >= 0); + Debugging.Assert(() => currentField >= 0); StringBuilder builder = builders[currentField]; if (builder.Length > 0 && builder.Length < maxLength) { diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragListBuilder.cs b/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragListBuilder.cs index a191c1191d..60f133f943 100644 --- a/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragListBuilder.cs +++ b/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragListBuilder.cs @@ -1,4 +1,5 @@ -using System; +using Lucene.Net.Diagnostics; +using System; using System.Collections.Generic; using System.Diagnostics; using WeightedPhraseInfo = Lucene.Net.Search.VectorHighlight.FieldPhraseList.WeightedPhraseInfo; @@ -141,7 +142,7 @@ public IteratorQueue(IEnumerator iter) { this.iter = iter; T removeTop = RemoveTop(); - Debug.Assert( removeTop == null); + Debugging.Assert(() => removeTop == null); } public T Top() diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/FieldTermStack.cs b/src/Lucene.Net.Highlighter/VectorHighlight/FieldTermStack.cs index 44a9a576d5..1b5798dcf5 100644 --- a/src/Lucene.Net.Highlighter/VectorHighlight/FieldTermStack.cs +++ b/src/Lucene.Net.Highlighter/VectorHighlight/FieldTermStack.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Index; +using Lucene.Net.Diagnostics; +using Lucene.Net.Index; using Lucene.Net.Util; using System; using System.Collections.Generic; @@ -139,7 +140,7 @@ public FieldTermStack(IndexReader reader, int docId, string fieldName, FieldQuer TermInfo current = termList[i]; if (current.Position == currentPos) { - Debug.Assert(previous != null); + Debugging.Assert(() => previous != null); previous.SetNext(current); previous = current; //iterator.Remove(); diff --git a/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs b/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs index 5ed0ed5be7..02974b5eaa 100644 --- a/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs +++ b/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Index; +using Lucene.Net.Diagnostics; +using Lucene.Net.Index; using Lucene.Net.Search; using Lucene.Net.Util; using System; @@ -245,7 +246,7 @@ public override int NextDoc() } } - Debug.Assert(_childDoc < _parentDoc, "childDoc=" + _childDoc + " parentDoc=" + _parentDoc); + Debugging.Assert(() => _childDoc < _parentDoc, () => "childDoc=" + _childDoc + " parentDoc=" + _parentDoc); _childDoc++; if (_acceptDocs != null && !_acceptDocs.Get(_childDoc)) { @@ -279,7 +280,7 @@ public override float GetScore() public override int Advance(int childTarget) { - Debug.Assert(childTarget >= _parentBits.Length || !_parentBits.Get(childTarget)); + Debugging.Assert(() => childTarget >= _parentBits.Length || !_parentBits.Get(childTarget)); //System.out.println("Q.advance childTarget=" + childTarget); if (childTarget == NO_MORE_DOCS) @@ -288,14 +289,14 @@ public override int Advance(int childTarget) return _childDoc = _parentDoc = NO_MORE_DOCS; } - Debug.Assert(_childDoc == -1 || childTarget != _parentDoc, "childTarget=" + childTarget); + Debugging.Assert(() => _childDoc == -1 || childTarget != _parentDoc, () => "childTarget=" + childTarget); if (_childDoc == -1 || childTarget > _parentDoc) { // Advance to new parent: _parentDoc = _parentScorer.Advance(childTarget); ValidateParentDoc(); //System.out.println(" advance to parentDoc=" + parentDoc); - Debug.Assert(_parentDoc > childTarget); + Debugging.Assert(() => _parentDoc > childTarget); if (_parentDoc == NO_MORE_DOCS) { //System.out.println(" END"); @@ -311,7 +312,7 @@ public override int Advance(int childTarget) childTarget = Math.Max(childTarget, firstChild); } - Debug.Assert(childTarget < _parentDoc); + Debugging.Assert(() => childTarget < _parentDoc); // Advance within children of current parent: _childDoc = childTarget; diff --git a/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs b/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs index ed90514da5..9cd5bee258 100644 --- a/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs +++ b/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Index; +using Lucene.Net.Diagnostics; +using Lucene.Net.Index; using Lucene.Net.Search; using Lucene.Net.Search.Grouping; using Lucene.Net.Support; @@ -283,7 +284,7 @@ private void CopyGroups(OneGroup og) og.counts[scorerIDX] = joinScorer.ChildCount; //System.out.println(" count=" + og.counts[scorerIDX]); og.docs[scorerIDX] = joinScorer.SwapChildDocs(og.docs[scorerIDX]); - Debug.Assert(og.docs[scorerIDX].Length >= og.counts[scorerIDX], "length=" + og.docs[scorerIDX].Length + " vs count=" + og.counts[scorerIDX]); + Debugging.Assert(() => og.docs[scorerIDX].Length >= og.counts[scorerIDX], () => "length=" + og.docs[scorerIDX].Length + " vs count=" + og.counts[scorerIDX]); //System.out.println(" len=" + og.docs[scorerIDX].length); /* for(int idx=0;idx= og.counts[scorerIDX], "length=" + og.scores[scorerIDX].Length + " vs count=" + og.counts[scorerIDX]); + Debugging.Assert(() => og.scores[scorerIDX].Length >= og.counts[scorerIDX], () => "length=" + og.scores[scorerIDX].Length + " vs count=" + og.counts[scorerIDX]); } } else diff --git a/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs b/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs index 4d51c530d7..01df4a512d 100644 --- a/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs +++ b/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Index; +using Lucene.Net.Diagnostics; +using Lucene.Net.Index; using Lucene.Net.Search; using Lucene.Net.Util; using System; @@ -283,7 +284,7 @@ public override int NextDoc() } //System.out.println(" parentDoc=" + parentDoc); - Debug.Assert(_parentDoc != -1); + Debugging.Assert(() => _parentDoc != -1); //System.out.println(" nextChildDoc=" + nextChildDoc); if (_acceptDocs != null && !_acceptDocs.Get(_parentDoc)) @@ -401,7 +402,7 @@ public override int Advance(int parentTarget) _prevParentDoc = _parentBits.PrevSetBit(parentTarget - 1); //System.out.println(" rolled back to prevParentDoc=" + prevParentDoc + " vs parentDoc=" + parentDoc); - Debug.Assert(_prevParentDoc >= _parentDoc); + Debugging.Assert(() => _prevParentDoc >= _parentDoc); if (_prevParentDoc > _nextChildDoc) { _nextChildDoc = _childScorer.Advance(_prevParentDoc); diff --git a/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs b/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs index 2bf296e397..d1a97ee632 100644 --- a/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs +++ b/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs @@ -1,4 +1,5 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using Lucene.Net.Search; using Lucene.Net.Search.Similarities; using Lucene.Net.Util; @@ -249,7 +250,7 @@ internal int BinarySearch(BytesRef b, BytesRef bytesRef, int low, int high, Byte return mid; } } - Debug.Assert(comparer.Compare(bytesRef, b) != 0); + Debugging.Assert(() => comparer.Compare(bytesRef, b) != 0); return -(low + 1); } @@ -284,7 +285,7 @@ public override SeekStatus SeekCeil(BytesRef text) public override void SeekExact(long ord) { - Debug.Assert(ord < info.terms.Count); + Debugging.Assert(() => ord < info.terms.Count); termUpto = (int)ord; } @@ -331,7 +332,7 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPos public override void SeekExact(BytesRef term, TermState state) { - Debug.Assert(state != null); + Debugging.Assert(() => state != null); this.SeekExact(((OrdTermState)state).Ord); } @@ -449,8 +450,8 @@ public override int Advance(int target) public override int NextPosition() { - Debug.Assert(posUpto++ < freq); - Debug.Assert(!sliceReader.IsEndOfSlice, " stores offsets : " + startOffset); + Debugging.Assert(() => posUpto++ < freq); + Debugging.Assert(() => !sliceReader.IsEndOfSlice, () => " stores offsets : " + startOffset); if (outerInstance.outerInstance.storeOffsets) { int pos = sliceReader.ReadInt32(); diff --git a/src/Lucene.Net.Memory/MemoryIndex.cs b/src/Lucene.Net.Memory/MemoryIndex.cs index 9c91f32c09..f38d804bf9 100644 --- a/src/Lucene.Net.Memory/MemoryIndex.cs +++ b/src/Lucene.Net.Memory/MemoryIndex.cs @@ -1,5 +1,6 @@ using Lucene.Net.Analysis; using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Search; using Lucene.Net.Util; using System; @@ -216,7 +217,7 @@ internal MemoryIndex(bool storeOffsets, long maxReusedBytes) this.bytesUsed = Counter.NewCounter(); int maxBufferedByteBlocks = (int)((maxReusedBytes / 2) / ByteBlockPool.BYTE_BLOCK_SIZE); int maxBufferedIntBlocks = (int)((maxReusedBytes - (maxBufferedByteBlocks * ByteBlockPool.BYTE_BLOCK_SIZE)) / (Int32BlockPool.INT32_BLOCK_SIZE * RamUsageEstimator.NUM_BYTES_INT32)); - Debug.Assert((maxBufferedByteBlocks * ByteBlockPool.BYTE_BLOCK_SIZE) + (maxBufferedIntBlocks * Int32BlockPool.INT32_BLOCK_SIZE * RamUsageEstimator.NUM_BYTES_INT32) <= maxReusedBytes); + Debugging.Assert(() => (maxBufferedByteBlocks * ByteBlockPool.BYTE_BLOCK_SIZE) + (maxBufferedIntBlocks * Int32BlockPool.INT32_BLOCK_SIZE * RamUsageEstimator.NUM_BYTES_INT32) <= maxReusedBytes); byteBlockPool = new ByteBlockPool(new RecyclingByteBlockAllocator(ByteBlockPool.BYTE_BLOCK_SIZE, maxBufferedByteBlocks, bytesUsed)); intBlockPool = new Int32BlockPool(new RecyclingInt32BlockAllocator(Int32BlockPool.INT32_BLOCK_SIZE, maxBufferedIntBlocks, bytesUsed)); postingsWriter = new Int32BlockPool.SliceWriter(intBlockPool); @@ -738,9 +739,9 @@ public override int[] Init() start = new int[ArrayUtil.Oversize(ord.Length, RamUsageEstimator.NUM_BYTES_INT32)]; end = new int[ArrayUtil.Oversize(ord.Length, RamUsageEstimator.NUM_BYTES_INT32)]; freq = new int[ArrayUtil.Oversize(ord.Length, RamUsageEstimator.NUM_BYTES_INT32)]; - Debug.Assert(start.Length >= ord.Length); - Debug.Assert(end.Length >= ord.Length); - Debug.Assert(freq.Length >= ord.Length); + Debugging.Assert(() => start.Length >= ord.Length); + Debugging.Assert(() => end.Length >= ord.Length); + Debugging.Assert(() => freq.Length >= ord.Length); return ord; } @@ -753,9 +754,9 @@ public override int[] Grow() end = ArrayUtil.Grow(end, ord.Length); freq = ArrayUtil.Grow(freq, ord.Length); } - Debug.Assert(start.Length >= ord.Length); - Debug.Assert(end.Length >= ord.Length); - Debug.Assert(freq.Length >= ord.Length); + Debugging.Assert(() => start.Length >= ord.Length); + Debugging.Assert(() => end.Length >= ord.Length); + Debugging.Assert(() => freq.Length >= ord.Length); return ord; } diff --git a/src/Lucene.Net.Misc/Document/LazyDocument.cs b/src/Lucene.Net.Misc/Document/LazyDocument.cs index 598561a9ea..72baf7a1c2 100644 --- a/src/Lucene.Net.Misc/Document/LazyDocument.cs +++ b/src/Lucene.Net.Misc/Document/LazyDocument.cs @@ -1,4 +1,5 @@ using Lucene.Net.Analysis; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Util; using System; @@ -119,8 +120,8 @@ private void FetchRealValues(string name, int fieldNum) fields.TryGetValue(fieldNum, out lazyValues); IIndexableField[] realValues = d.GetFields(name); - Debug.Assert(realValues.Length <= lazyValues.Count, - "More lazy values then real values for field: " + name); + Debugging.Assert(() => realValues.Length <= lazyValues.Count, + () => "More lazy values then real values for field: " + name); for (int i = 0; i < lazyValues.Count; i++) { @@ -163,8 +164,8 @@ internal virtual IIndexableField GetRealValue() { outerInstance.FetchRealValues(name, fieldNum); } - Debug.Assert(HasBeenLoaded, "field value was not lazy loaded"); - Debug.Assert(realValue.Name.Equals(Name, StringComparison.Ordinal), "realvalue name != name: " + realValue.Name + " != " + Name); + Debugging.Assert(() => HasBeenLoaded, () => "field value was not lazy loaded"); + Debugging.Assert(() => realValue.Name.Equals(Name, StringComparison.Ordinal), () => "realvalue name != name: " + realValue.Name + " != " + Name); return realValue; } diff --git a/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs b/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs index 66d40b3f48..1b85aa0f02 100644 --- a/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs +++ b/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs @@ -1,4 +1,5 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using Lucene.Net.Store; using Lucene.Net.Util; using System; @@ -296,7 +297,7 @@ public void UndeleteAll() if (m_input.HasDeletions) { IBits oldLiveDocs = m_input.LiveDocs; - Debug.Assert(oldLiveDocs != null); + Debugging.Assert(() => oldLiveDocs != null); // this loop is a little bit ineffective, as Bits has no nextSetBit(): for (int i = 0; i < maxDoc; i++) { diff --git a/src/Lucene.Net.Misc/Index/PKIndexSplitter.cs b/src/Lucene.Net.Misc/Index/PKIndexSplitter.cs index 4576f50b63..6377c8cd23 100644 --- a/src/Lucene.Net.Misc/Index/PKIndexSplitter.cs +++ b/src/Lucene.Net.Misc/Index/PKIndexSplitter.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Search; +using Lucene.Net.Diagnostics; +using Lucene.Net.Search; using Lucene.Net.Store; using Lucene.Net.Util; using System.Collections.Generic; @@ -155,7 +156,7 @@ public DocumentFilteredAtomicIndexReader(AtomicReaderContext context, Filter pre if (m_input.HasDeletions) { IBits oldLiveDocs = m_input.LiveDocs; - Debug.Assert(oldLiveDocs != null); + Debugging.Assert(() => oldLiveDocs != null); DocIdSetIterator it = bits.GetIterator(); for (int i = it.NextDoc(); i < maxDoc; i = it.NextDoc()) { diff --git a/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs b/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs index 84a406f00e..b812d2a21c 100644 --- a/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs +++ b/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Search; +using Lucene.Net.Diagnostics; +using Lucene.Net.Search; using Lucene.Net.Util; using Lucene.Net.Util.Packed; using System; @@ -82,8 +83,8 @@ internal static bool IsConsistent(DocMap docMap) { int newID = docMap.OldToNew(i); int oldID = docMap.NewToOld(newID); - Debug.Assert(newID >= 0 && newID < maxDoc, "doc IDs must be in [0-" + maxDoc + "[, got " + newID); - Debug.Assert(i == oldID, "mapping is inconsistent: " + i + " --oldToNew--> " + newID + " --newToOld--> " + oldID); + Debugging.Assert(() => newID >= 0 && newID < maxDoc, () => "doc IDs must be in [0-" + maxDoc + "[, got " + newID); + Debugging.Assert(() => i == oldID, () => "mapping is inconsistent: " + i + " --oldToNew--> " + newID + " --newToOld--> " + oldID); if (i != oldID || newID < 0 || newID >= maxDoc) { return false; diff --git a/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs b/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs index 7d87a0eb19..0ff309ee81 100644 --- a/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs +++ b/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Search; +using Lucene.Net.Diagnostics; +using Lucene.Net.Search; using Lucene.Net.Store; using Lucene.Net.Support; using Lucene.Net.Util; @@ -760,7 +761,7 @@ internal static AtomicReader Wrap(AtomicReader reader, Sorter.DocMap docMap) { throw new ArgumentException("reader.MaxDoc should be equal to docMap.Count, got" + reader.MaxDoc + " != " + docMap.Count); } - Debug.Assert(Sorter.IsConsistent(docMap)); + Debugging.Assert(() => Sorter.IsConsistent(docMap)); return new SortingAtomicReader(reader, docMap); } diff --git a/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs b/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs index 469290178b..58719d32c3 100644 --- a/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs +++ b/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs @@ -1,11 +1,11 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using Lucene.Net.Search; using Lucene.Net.Store; using Lucene.Net.Util; using Lucene.Net.Util.Packed; using System; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Index.Sorter { @@ -134,7 +134,7 @@ public override MergePolicy.DocMap GetDocMap(MergeState mergeState) { return base.GetDocMap(mergeState); } - Debug.Assert(mergeState.DocMaps.Length == 1); // we returned a singleton reader + Debugging.Assert(() => mergeState.DocMaps.Length == 1); // we returned a singleton reader MonotonicAppendingInt64Buffer deletes = GetDeletes(unsortedReaders); return new DocMapAnonymousInnerClassHelper(this, mergeState, deletes); } diff --git a/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs b/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs index d77ffb6e7c..91dc58bffe 100644 --- a/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs +++ b/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Store; +using Lucene.Net.Diagnostics; +using Lucene.Net.Store; using System.Collections; using System.Collections.Generic; using System.Diagnostics; @@ -87,7 +88,7 @@ public override object Subtract(object @object, object inc) public override object Add(object prefix, object output) { - Debug.Assert(!(prefix is IList)); + Debugging.Assert(() => !(prefix is IList)); if (!(output is IList)) { return outputs.Add((T)prefix, (T)output); @@ -106,7 +107,7 @@ public override object Add(object prefix, object output) public override void Write(object output, DataOutput @out) { - Debug.Assert(!(output is IList)); + Debugging.Assert(() => !(output is IList)); outputs.Write((T)output, @out); } diff --git a/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs b/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs index 78954fdd63..d6ca39f4e0 100644 --- a/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs +++ b/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Store; +using Lucene.Net.Diagnostics; +using Lucene.Net.Store; using System; using System.Diagnostics; using System.Runtime.CompilerServices; @@ -69,8 +70,8 @@ public TwoInt64s(long first, long second) { this.first = first; this.second = second; - Debug.Assert(first >= 0); - Debug.Assert(second >= 0); + Debugging.Assert(() => first >= 0); + Debugging.Assert(() => second >= 0); } public override string ToString() @@ -133,8 +134,8 @@ public TwoInt64s Get(long first, long second) public override object Common(object output1, object output2) { - Debug.Assert(Valid(output1, false)); - Debug.Assert(Valid(output2, false)); + Debugging.Assert(() => Valid(output1, false)); + Debugging.Assert(() => Valid(output2, false)); long? output1_ = (long?)output1; long? output2_ = (long?)output2; if (output1_ == NO_OUTPUT || output2_ == NO_OUTPUT) @@ -143,8 +144,8 @@ public override object Common(object output1, object output2) } else if (doShare) { - Debug.Assert(output1_ > 0); - Debug.Assert(output2_ > 0); + Debugging.Assert(() => output1_ > 0); + Debugging.Assert(() => output2_ > 0); return Math.Min(output1_.GetValueOrDefault(), output2_.GetValueOrDefault()); } else if (output1_.Equals(output2_)) @@ -159,11 +160,11 @@ public override object Common(object output1, object output2) public override object Subtract(object output, object inc) { - Debug.Assert(Valid(output, false)); - Debug.Assert(Valid(inc, false)); + Debugging.Assert(() => Valid(output, false)); + Debugging.Assert(() => Valid(inc, false)); long? output2 = (long?)output; long? inc2 = (long?)inc; - Debug.Assert(output2 >= inc2); + Debugging.Assert(() => output2 >= inc2); if (inc2 == NO_OUTPUT) { @@ -181,8 +182,8 @@ public override object Subtract(object output, object inc) public override object Add(object prefix, object output) { - Debug.Assert(Valid(prefix, false)); - Debug.Assert(Valid(output, true)); + Debugging.Assert(() => Valid(prefix, false)); + Debugging.Assert(() => Valid(output, true)); long? prefix2 = (long?)prefix; if (output is long?) { @@ -210,7 +211,7 @@ public override object Add(object prefix, object output) public override void Write(object output, DataOutput @out) { - Debug.Assert(Valid(output, true)); + Debugging.Assert(() => Valid(output, true)); if (output is long?) { long? output2 = (long?)output; @@ -251,9 +252,9 @@ public override object Read(DataInput @in) private bool Valid(long? o) { - Debug.Assert(o != null); - Debug.Assert(o is long?); - Debug.Assert(o == NO_OUTPUT || o > 0); + Debugging.Assert(() => o != null); + Debugging.Assert(() => o is long?); + Debugging.Assert(() => o == NO_OUTPUT || o > 0); return true; } @@ -262,7 +263,7 @@ private bool Valid(object o, bool allowDouble) { if (!allowDouble) { - Debug.Assert(o is long?); + Debugging.Assert(() => o is long?); return Valid((long?)o); } else if (o is TwoInt64s) @@ -285,8 +286,8 @@ public override string OutputToString(object output) [MethodImpl(MethodImplOptions.NoInlining)] public override object Merge(object first, object second) { - Debug.Assert(Valid(first, false)); - Debug.Assert(Valid(second, false)); + Debugging.Assert(() => Valid(first, false)); + Debugging.Assert(() => Valid(second, false)); return new TwoInt64s(((long?)first).GetValueOrDefault(), ((long?)second).GetValueOrDefault()); } } diff --git a/src/Lucene.Net.Queries/BooleanFilter.cs b/src/Lucene.Net.Queries/BooleanFilter.cs index 716b2a007f..00744e12f8 100644 --- a/src/Lucene.Net.Queries/BooleanFilter.cs +++ b/src/Lucene.Net.Queries/BooleanFilter.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Index; +using Lucene.Net.Diagnostics; +using Lucene.Net.Index; using Lucene.Net.Search; using Lucene.Net.Util; using System.Collections; @@ -77,7 +78,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo { if (res == null) { - Debug.Assert(!hasShouldClauses); + Debugging.Assert(() => !hasShouldClauses); res = new FixedBitSet(reader.MaxDoc); res.Set(0, reader.MaxDoc); // NOTE: may set bits on deleted docs } diff --git a/src/Lucene.Net.Queries/CommonTermsQuery.cs b/src/Lucene.Net.Queries/CommonTermsQuery.cs index 212345d9ad..db42ad7072 100644 --- a/src/Lucene.Net.Queries/CommonTermsQuery.cs +++ b/src/Lucene.Net.Queries/CommonTermsQuery.cs @@ -1,4 +1,5 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Search; using Lucene.Net.Util; @@ -278,7 +279,7 @@ public virtual void CollectTermContext(IndexReader reader, IList termsEnum != null); if (termsEnum == TermsEnum.EMPTY) { diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs index 502b89ca8a..ea7d8449e0 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs @@ -1,6 +1,7 @@ using J2N.Text; using Lucene.Net.Analysis; using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.QueryParsers.Flexible.Core.Nodes; using Lucene.Net.QueryParsers.Flexible.Core.Processors; using Lucene.Net.QueryParsers.Flexible.Standard.Config; @@ -181,7 +182,7 @@ protected override IQueryNode PostProcessNode(IQueryNode node) { bool hasNext; hasNext = buffer.IncrementToken(); - Debug.Assert(hasNext == true); + Debugging.Assert(() => hasNext == true); term = termAtt.ToString(); } #pragma warning disable 168 @@ -212,7 +213,7 @@ protected override IQueryNode PostProcessNode(IQueryNode node) try { bool hasNext = buffer.IncrementToken(); - Debug.Assert(hasNext == true); + Debugging.Assert(() => hasNext == true); term = termAtt.ToString(); } #pragma warning disable 168 @@ -239,7 +240,7 @@ protected override IQueryNode PostProcessNode(IQueryNode node) try { bool hasNext = buffer.IncrementToken(); - Debug.Assert(hasNext == true); + Debugging.Assert(() => hasNext == true); term = termAtt.ToString(); } #pragma warning disable 168 @@ -306,7 +307,7 @@ protected override IQueryNode PostProcessNode(IQueryNode node) try { bool hasNext = buffer.IncrementToken(); - Debug.Assert(hasNext == true); + Debugging.Assert(() => hasNext == true); term = termAtt.ToString(); if (posIncrAtt != null) { @@ -378,7 +379,7 @@ protected override IQueryNode PostProcessNode(IQueryNode node) try { bool hasNext = buffer.IncrementToken(); - Debug.Assert(hasNext == true); + Debugging.Assert(() => hasNext == true); term = termAtt.ToString(); if (posIncrAtt != null) diff --git a/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs b/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs index 0969d82b00..1be52702c4 100644 --- a/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs +++ b/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs @@ -1,4 +1,5 @@ using Lucene.Net.Analysis; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Search; using Lucene.Net.Util; @@ -241,7 +242,7 @@ private void ParseSubQuery(State state) private void ConsumeSubQuery(State state) { - Debug.Assert((m_flags & Operator.PRECEDENCE_OPERATORS) != 0); + Debugging.Assert(() => (m_flags & Operator.PRECEDENCE_OPERATORS) != 0); int start = ++state.Index; int precedence = 1; bool escaped = false; @@ -314,7 +315,7 @@ private void ConsumeSubQuery(State state) private void ConsumePhrase(State state) { - Debug.Assert((m_flags & Operator.PHRASE_OPERATOR) != 0); + Debugging.Assert(() => (m_flags & Operator.PHRASE_OPERATOR) != 0); int start = ++state.Index; int copied = 0; bool escaped = false; diff --git a/src/Lucene.Net.Replicator/IndexAndTaxonomyRevision.cs b/src/Lucene.Net.Replicator/IndexAndTaxonomyRevision.cs index 0864fc79d8..4c9be8f07f 100644 --- a/src/Lucene.Net.Replicator/IndexAndTaxonomyRevision.cs +++ b/src/Lucene.Net.Replicator/IndexAndTaxonomyRevision.cs @@ -1,4 +1,5 @@ using J2N.Text; +using Lucene.Net.Diagnostics; using Lucene.Net.Facet.Taxonomy.Directory; using Lucene.Net.Facet.Taxonomy.WriterCache; using Lucene.Net.Index; @@ -186,7 +187,7 @@ public virtual int CompareTo(IRevision other) /// public virtual Stream Open(string source, string fileName) { - Debug.Assert(source.Equals(INDEX_SOURCE, StringComparison.Ordinal) || source.Equals(TAXONOMY_SOURCE, StringComparison.Ordinal), string.Format("invalid source; expected=({0} or {1}) got={2}", INDEX_SOURCE, TAXONOMY_SOURCE, source)); + Debugging.Assert(() => source.Equals(INDEX_SOURCE, StringComparison.Ordinal) || source.Equals(TAXONOMY_SOURCE, StringComparison.Ordinal), () => string.Format("invalid source; expected=({0} or {1}) got={2}", INDEX_SOURCE, TAXONOMY_SOURCE, source)); IndexCommit commit = source.Equals(INDEX_SOURCE, StringComparison.Ordinal) ? indexCommit : taxonomyCommit; return new IndexInputStream(commit.Directory.OpenInput(fileName, IOContext.READ_ONCE)); } diff --git a/src/Lucene.Net.Replicator/IndexRevision.cs b/src/Lucene.Net.Replicator/IndexRevision.cs index 1869acfe80..e8895a756d 100644 --- a/src/Lucene.Net.Replicator/IndexRevision.cs +++ b/src/Lucene.Net.Replicator/IndexRevision.cs @@ -1,4 +1,5 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using System; @@ -133,7 +134,7 @@ public virtual int CompareTo(IRevision other) public virtual Stream Open(string source, string fileName) { - Debug.Assert(source.Equals(SOURCE, StringComparison.Ordinal), string.Format("invalid source; expected={0} got={1}", SOURCE, source)); + Debugging.Assert(() => source.Equals(SOURCE, StringComparison.Ordinal), () => string.Format("invalid source; expected={0} got={1}", SOURCE, source)); return new IndexInputStream(commit.Directory.OpenInput(fileName, IOContext.READ_ONCE)); } diff --git a/src/Lucene.Net.Replicator/ReplicationClient.cs b/src/Lucene.Net.Replicator/ReplicationClient.cs index 454ca58019..f958f52b4d 100644 --- a/src/Lucene.Net.Replicator/ReplicationClient.cs +++ b/src/Lucene.Net.Replicator/ReplicationClient.cs @@ -9,6 +9,7 @@ using System.Threading; using JCG = J2N.Collections.Generic; using Directory = Lucene.Net.Store.Directory; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Replicator { @@ -369,7 +370,7 @@ protected virtual IDictionary> RequiredFiles(IDictio // make sure to preserve revisionFiles order List res = new List(); string source = e.Key; - Debug.Assert(newRevisionFiles.ContainsKey(source), string.Format("source not found in newRevisionFiles: {0}", newRevisionFiles)); + Debugging.Assert(() => newRevisionFiles.ContainsKey(source), () => string.Format("source not found in newRevisionFiles: {0}", newRevisionFiles)); foreach (RevisionFile file in newRevisionFiles[source]) { if (!handlerFiles.Contains(file.FileName)) @@ -415,7 +416,7 @@ public virtual void StartUpdateThread(long intervalMillis, string threadName) updateThread = new ReplicationThread(intervalMillis, threadName, DoUpdate, HandleUpdateException, updateLock); updateThread.Start(); // we rely on isAlive to return true in isUpdateThreadAlive, assert to be on the safe side - Debug.Assert(updateThread.IsAlive, "updateThread started but not alive?"); + Debugging.Assert(() => updateThread.IsAlive, () => "updateThread started but not alive?"); } /// diff --git a/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs b/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs index 4c08e54929..0a9c457e67 100644 --- a/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs +++ b/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Index; +using Lucene.Net.Diagnostics; +using Lucene.Net.Index; using Lucene.Net.Search; using Lucene.Net.Util; using System; @@ -190,7 +191,7 @@ protected override SortedDocValues GetSortedDocValues(AtomicReaderContext contex case Selector.MIDDLE_MAX: return new MiddleMaxValue(randomOrds); case Selector.MIN: default: - Debug.Assert(false); + Debugging.Assert(() => false); return null; } } diff --git a/src/Lucene.Net.Spatial/Prefix/AbstractPrefixTreeFilter.cs b/src/Lucene.Net.Spatial/Prefix/AbstractPrefixTreeFilter.cs index a9230cf5fb..cfe132c606 100644 --- a/src/Lucene.Net.Spatial/Prefix/AbstractPrefixTreeFilter.cs +++ b/src/Lucene.Net.Spatial/Prefix/AbstractPrefixTreeFilter.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Search; using Lucene.Net.Spatial.Prefix.Tree; @@ -112,7 +113,7 @@ public BaseTermsEnumTraverser(AbstractPrefixTreeFilter outerInstance, AtomicRead protected virtual void CollectDocs(FixedBitSet bitSet) { //WARN: keep this specialization in sync - Debug.Assert(m_termsEnum != null); + Debugging.Assert(() => m_termsEnum != null); m_docsEnum = m_termsEnum.Docs(m_acceptDocs, m_docsEnum, DocsFlags.NONE); int docid; while ((docid = m_docsEnum.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS) diff --git a/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs b/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs index 31f84bfe22..1eb27b7537 100644 --- a/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs +++ b/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Search; using Lucene.Net.Spatial.Prefix.Tree; @@ -51,7 +52,7 @@ public AbstractVisitingPrefixTreeFilter(IShape queryShape, string fieldName, Spa : base(queryShape, fieldName, grid, detailLevel) { this.m_prefixGridScanLevel = Math.Max(0, Math.Min(prefixGridScanLevel, grid.MaxLevels - 1)); - Debug.Assert(detailLevel <= grid.MaxLevels); + Debugging.Assert(() => detailLevel <= grid.MaxLevels); } public override bool Equals(object o) @@ -134,7 +135,7 @@ public VisitorTemplate(AbstractVisitingPrefixTreeFilter outerInstance, AtomicRea public virtual DocIdSet GetDocIdSet() { - Debug.Assert(curVNode == null, "Called more than once?"); + Debugging.Assert(() => curVNode == null, () => "Called more than once?"); if (m_termsEnum == null) { return null; @@ -168,7 +169,7 @@ public virtual DocIdSet GetDocIdSet() // LUCENENET IMPORTANT: Must not call this inline with Debug.Assert // because the compiler removes Debug.Assert statements in release mode!! bool hasNext = curVNode.children.MoveNext(); - Debug.Assert(hasNext); + Debugging.Assert(() => hasNext); curVNode = curVNode.children.Current; } @@ -205,7 +206,7 @@ public virtual DocIdSet GetDocIdSet() if (compare > 0) { // leap frog (termsEnum is beyond where we would otherwise seek) - Debug.Assert(!m_context.AtomicReader.GetTerms(m_outerInstance.m_fieldName).GetIterator(null).SeekExact(curVNodeTerm), "should be absent"); + Debugging.Assert(() => !m_context.AtomicReader.GetTerms(m_outerInstance.m_fieldName).GetIterator(null).SeekExact(curVNodeTerm), () => "should be absent"); } else { @@ -249,7 +250,7 @@ public virtual DocIdSet GetDocIdSet() /// private void AddIntersectingChildren() { - Debug.Assert(thisTerm != null); + Debugging.Assert(() => thisTerm != null); Cell cell = curVNode.cell; if (cell.Level >= m_outerInstance.m_detailLevel) { @@ -260,7 +261,7 @@ private void AddIntersectingChildren() { //If the next indexed term just adds a leaf marker ('+') to cell, // then add all of those docs - Debug.Assert(StringHelper.StartsWith(thisTerm, curVNodeTerm));//TODO refactor to use method on curVNode.cell + Debugging.Assert(() => StringHelper.StartsWith(thisTerm, curVNodeTerm));//TODO refactor to use method on curVNode.cell scanCell = m_outerInstance.m_grid.GetCell(thisTerm.Bytes, thisTerm.Offset, thisTerm.Length, scanCell); if (scanCell.Level == cell.Level && scanCell.IsLeaf) { @@ -371,7 +372,7 @@ public void Dispose() public bool MoveNext() { - //Debug.Assert(cellIter.Current != null); + //Debugging.Assert(cellIter.Current != null); // LUCENENET NOTE: The consumer of this class calls // cellIter.MoveNext() before it is instantiated. @@ -489,9 +490,9 @@ internal VNode(VNode parent) internal virtual void Reset(Cell cell) { - Debug.Assert(cell != null); + Debugging.Assert(() => cell != null); this.cell = cell; - Debug.Assert(children == null); + Debugging.Assert(() => children == null); } } diff --git a/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs b/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs index 0ece2349af..8464ec9853 100644 --- a/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs +++ b/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Search; using Lucene.Net.Spatial.Prefix.Tree; @@ -101,7 +102,7 @@ internal SmallDocSet Visit(Cell cell, IBits acceptContains) if (cell.Level != 0 && ((cell.ShapeRel == SpatialRelation.NOT_SET || cell.ShapeRel == SpatialRelation.WITHIN))) { subCellsFilter = null; - Debug.Assert(cell.Shape.Relate(outerInstance.m_queryShape) == SpatialRelation.WITHIN); + Debugging.Assert(() => cell.Shape.Relate(outerInstance.m_queryShape) == SpatialRelation.WITHIN); } ICollection subCells = cell.GetSubCells(subCellsFilter); foreach (Cell subCell in subCells) @@ -146,7 +147,7 @@ internal SmallDocSet Visit(Cell cell, IBits acceptContains) private bool SeekExact(Cell cell) { - Debug.Assert(new BytesRef(cell.GetTokenBytes()).CompareTo(termBytes) > 0); + Debugging.Assert(() => new BytesRef(cell.GetTokenBytes()).CompareTo(termBytes) > 0); this.termBytes.Bytes = cell.GetTokenBytes(); this.termBytes.Length = this.termBytes.Bytes.Length; if (m_termsEnum == null) @@ -156,7 +157,7 @@ private bool SeekExact(Cell cell) private SmallDocSet GetDocs(Cell cell, IBits acceptContains) { - Debug.Assert(new BytesRef(cell.GetTokenBytes()).Equals(termBytes)); + Debugging.Assert(() => new BytesRef(cell.GetTokenBytes()).Equals(termBytes)); return this.CollectDocs(acceptContains); } @@ -164,8 +165,8 @@ private SmallDocSet GetDocs(Cell cell, IBits acceptContains) private SmallDocSet GetLeafDocs(Cell leafCell, IBits acceptContains) { - Debug.Assert(new BytesRef(leafCell.GetTokenBytes()).Equals(termBytes)); - Debug.Assert(!leafCell.Equals(lastLeaf));//don't call for same leaf again + Debugging.Assert(() => new BytesRef(leafCell.GetTokenBytes()).Equals(termBytes)); + Debugging.Assert(() => !leafCell.Equals(lastLeaf));//don't call for same leaf again lastLeaf = leafCell; if (m_termsEnum == null) @@ -297,7 +298,7 @@ public override DocIdSetIterator GetIterator() } docs[d++] = v; } - Debug.Assert(d == intSet.Count); + Debugging.Assert(() => d == intSet.Count); int size = d; //sort them Array.Sort(docs, 0, size); diff --git a/src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs b/src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs index 383cec8234..004be33381 100644 --- a/src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs +++ b/src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Spatial4n.Core.Shapes; using System; using System.Collections.Generic; @@ -103,7 +104,7 @@ protected internal Cell(SpatialPrefixTree outerInstance, byte[] bytes, int off, public virtual void Reset(byte[] bytes, int off, int len) { - Debug.Assert(Level != 0); + Debugging.Assert(() => Level != 0); token = null; m_shapeRel = SpatialRelation.NOT_SET; this.bytes = bytes; @@ -138,7 +139,7 @@ private void B_fixLeaf() /// Note: not supported at level 0. public virtual void SetLeaf() { - Debug.Assert(Level != 0); + Debugging.Assert(() => Level != 0); m_leaf = true; } diff --git a/src/Lucene.Net.Spatial/Prefix/Tree/QuadPrefixTree.cs b/src/Lucene.Net.Spatial/Prefix/Tree/QuadPrefixTree.cs index 5f459829f2..4d399859a7 100644 --- a/src/Lucene.Net.Spatial/Prefix/Tree/QuadPrefixTree.cs +++ b/src/Lucene.Net.Spatial/Prefix/Tree/QuadPrefixTree.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Spatial4n.Core.Context; using Spatial4n.Core.Shapes; using System; @@ -170,7 +171,7 @@ private void Build( IShape shape, int maxLevel) { - Debug.Assert(str.Length == level); + Debugging.Assert(() => str.Length == level); double w = levelW[level] / 2; double h = levelH[level] / 2; @@ -196,7 +197,7 @@ private void CheckBattenberg( IShape shape, int maxLevel) { - Debug.Assert(str.Length == level); + Debugging.Assert(() => str.Length == level); double w = levelW[level] / 2; double h = levelH[level] / 2; diff --git a/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs b/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs index 3857f2a389..34d923d989 100644 --- a/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs +++ b/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Spatial4n.Core.Context; using Spatial4n.Core.Shapes; using System; @@ -47,7 +48,7 @@ public abstract class SpatialPrefixTree public SpatialPrefixTree(SpatialContext ctx, int maxLevels) { - Debug.Assert(maxLevels > 0); + Debugging.Assert(() => maxLevels > 0); this.m_ctx = ctx; this.m_maxLevels = maxLevels; } @@ -268,7 +269,7 @@ public virtual IList GetCells(IPoint p, int detailLevel, bool inclParents) return new ReadOnlyCollection(new[] { cell }); } string endToken = cell.TokenString; - Debug.Assert(endToken.Length == detailLevel); + Debugging.Assert(() => endToken.Length == detailLevel); IList cells = new List(detailLevel); for (int i = 1; i < detailLevel; i++) { diff --git a/src/Lucene.Net.Spatial/Prefix/WithinPrefixTreeFilter.cs b/src/Lucene.Net.Spatial/Prefix/WithinPrefixTreeFilter.cs index 41b2ff7497..daf619c491 100644 --- a/src/Lucene.Net.Spatial/Prefix/WithinPrefixTreeFilter.cs +++ b/src/Lucene.Net.Spatial/Prefix/WithinPrefixTreeFilter.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Search; using Lucene.Net.Spatial.Prefix.Tree; @@ -202,8 +203,8 @@ protected internal override bool Visit(Cell cell) protected internal override void VisitLeaf(Cell cell) { //visitRelation is declared as a field, populated by visit() so we don't recompute it - Debug.Assert(m_outerInstance.m_detailLevel != cell.Level); - Debug.Assert(visitRelation == cell.Shape.Relate(m_outerInstance.m_queryShape)); + Debugging.Assert(() => m_outerInstance.m_detailLevel != cell.Level); + Debugging.Assert(() => visitRelation == cell.Shape.Relate(m_outerInstance.m_queryShape)); if (AllCellsIntersectQuery(cell, visitRelation)) { CollectDocs(inside); diff --git a/src/Lucene.Net.Spatial/Vector/DistanceValueSource.cs b/src/Lucene.Net.Spatial/Vector/DistanceValueSource.cs index 327093e63f..35270a8fd6 100644 --- a/src/Lucene.Net.Spatial/Vector/DistanceValueSource.cs +++ b/src/Lucene.Net.Spatial/Vector/DistanceValueSource.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Index; +using Lucene.Net.Diagnostics; +using Lucene.Net.Index; using Lucene.Net.Queries.Function; using Lucene.Net.Search; using Lucene.Net.Util; @@ -103,7 +104,7 @@ public override double DoubleVal(int doc) // make sure it has minX and area if (validX.Get(doc)) { - Debug.Assert(validY.Get(doc)); + Debugging.Assert(() => validY.Get(doc)); return calculator.Distance(outerInstance.from, ptX.Get(doc), ptY.Get(doc)) * outerInstance.multiplier; } return nullValue; diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs index 3ec11c5668..f8d6c753ea 100644 --- a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs +++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs @@ -1,5 +1,6 @@ using J2N.Collections.Generic.Extensions; using Lucene.Net.Analysis; +using Lucene.Net.Diagnostics; using Lucene.Net.Store; using Lucene.Net.Support; using Lucene.Net.Support.IO; @@ -269,7 +270,7 @@ private void ReplaceSep(Automaton a) IList newTransitions = new List(); foreach (Transition t in state.GetTransitions()) { - Debug.Assert(t.Min == t.Max); + Debugging.Assert(() => t.Min == t.Max); if (t.Min == TokenStreamToAutomaton.POS_SEP) { if (preserveSep) @@ -358,8 +359,8 @@ public int Compare(BytesRef a, BytesRef b) // Next by cost: long aCost = readerA.ReadInt32(); long bCost = readerB.ReadInt32(); - Debug.Assert(DecodeWeight(aCost) >= 0); - Debug.Assert(DecodeWeight(bCost) >= 0); + Debugging.Assert(() => DecodeWeight(aCost) >= 0); + Debugging.Assert(() => DecodeWeight(bCost) >= 0); if (aCost < bCost) { return -1; @@ -486,7 +487,7 @@ public override void Build(IInputIterator iterator) output.WriteBytes(surfaceForm.Bytes, surfaceForm.Offset, surfaceForm.Length); } - Debug.Assert(output.Position == requiredLength, output.Position + " vs " + requiredLength); + Debugging.Assert(() => output.Position == requiredLength, () => output.Position + " vs " + requiredLength); writer.Write(buffer, 0, output.Position); } @@ -660,7 +661,7 @@ private LookupResult GetLookupResult(long? output1, BytesRef output2, CharsRef s break; } } - Debug.Assert(sepIndex != -1); + Debugging.Assert(() => sepIndex != -1); spare.Grow(sepIndex); int payloadLen = output2.Length - sepIndex - 1; @@ -706,7 +707,7 @@ private bool SameSurfaceForm(BytesRef key, BytesRef output2) public override IList DoLookup(string key, IEnumerable contexts, bool onlyMorePopular, int num) { - Debug.Assert(num > 0); + Debugging.Assert(() => num > 0); if (onlyMorePopular) { @@ -798,7 +799,7 @@ public override IList DoLookup(string key, IEnumerable c } var completions = searcher.Search(); - Debug.Assert(completions.IsComplete); + Debugging.Assert(() => completions.IsComplete); // NOTE: this is rather inefficient: we enumerate // every matching "exactly the same analyzed form" @@ -841,7 +842,7 @@ public override IList DoLookup(string key, IEnumerable c } var completions2 = searcher2.Search(); - Debug.Assert(completions2.IsComplete); + Debugging.Assert(() => completions2.IsComplete); foreach (Util.Fst.Util.Result.Pair> completion in completions2) { @@ -919,7 +920,7 @@ protected override bool AcceptResult(Int32sRef input, PairOutputs results.Count == 1); return false; } else @@ -963,7 +964,7 @@ internal ISet ToFiniteStrings(BytesRef surfaceForm, TokenStreamToAuto ReplaceSep(automaton); automaton = ConvertAutomaton(automaton); - Debug.Assert(SpecialOperations.IsFinite(automaton)); + Debugging.Assert(() => SpecialOperations.IsFinite(automaton)); // Get all paths from the automaton (there can be // more than one path, eg if the analyzer created a diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/BlendedInfixSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/BlendedInfixSuggester.cs index 8d6713bb4d..d20b4461b0 100644 --- a/src/Lucene.Net.Suggest/Suggest/Analyzing/BlendedInfixSuggester.cs +++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/BlendedInfixSuggester.cs @@ -8,6 +8,7 @@ using System.IO; using JCG = J2N.Collections.Generic; using Directory = Lucene.Net.Store.Directory; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Search.Suggest.Analyzing { @@ -144,7 +145,7 @@ protected override FieldType GetTextFieldType() { BinaryDocValues textDV = MultiDocValues.GetBinaryValues(searcher.IndexReader, TEXT_FIELD_NAME); - Debug.Assert(textDV != null); + Debugging.Assert(() => textDV != null); // This will just be null if app didn't pass payloads to build(): // TODO: maybe just stored fields? they compress... diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/FSTUtil.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/FSTUtil.cs index 72736b2777..a676d8e375 100644 --- a/src/Lucene.Net.Suggest/Suggest/Analyzing/FSTUtil.cs +++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/FSTUtil.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Util; +using Lucene.Net.Diagnostics; +using Lucene.Net.Util; using Lucene.Net.Util.Automaton; using Lucene.Net.Util.Fst; using System.Collections.Generic; @@ -68,7 +69,7 @@ public Path(State state, FST.Arc fstNode, T output, Int32sRef input) /// public static IList> IntersectPrefixPaths(Automaton a, FST fst) { - Debug.Assert(a.IsDeterministic); + Debugging.Assert(() => a.IsDeterministic); IList> queue = new List>(); List> endNodes = new List>(); queue.Add(new Path(a.GetInitialState(), fst.GetFirstArc(new FST.Arc()), fst.Outputs.NoOutput, new Int32sRef())); @@ -119,8 +120,8 @@ public static IList> IntersectPrefixPaths(Automaton a, FST fst) FST.Arc nextArc = Lucene.Net.Util.Fst.Util.ReadCeilArc(min, fst, path.FstNode, scratchArc, fstReader); while (nextArc != null && nextArc.Label <= max) { - Debug.Assert(nextArc.Label <= max); - Debug.Assert(nextArc.Label >= min, nextArc.Label + " " + min); + Debugging.Assert(() => nextArc.Label <= max); + Debugging.Assert(() => nextArc.Label >= min, () => nextArc.Label + " " + min); Int32sRef newInput = new Int32sRef(currentInput.Length + 1); newInput.CopyInt32s(currentInput); newInput.Int32s[currentInput.Length] = nextArc.Label; @@ -129,7 +130,7 @@ public static IList> IntersectPrefixPaths(Automaton a, FST fst) .CopyFrom(nextArc), fst.Outputs.Add(path.Output, nextArc.Output), newInput)); int label = nextArc.Label; // used in assert nextArc = nextArc.IsLast ? null : fst.ReadNextRealArc(nextArc, fstReader); - Debug.Assert(nextArc == null || label < nextArc.Label, "last: " + label + " next: " + (nextArc == null ? "" : nextArc.Label.ToString())); + Debugging.Assert(() => nextArc == null || label < nextArc.Label, () => "last: " + label + " next: " + (nextArc == null ? "" : nextArc.Label.ToString())); } } } diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs index 2d058f3f3d..c53d8c0982 100644 --- a/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs +++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs @@ -3,6 +3,7 @@ using Lucene.Net.Analysis.Shingle; using Lucene.Net.Analysis.TokenAttributes; using Lucene.Net.Codecs; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index; using Lucene.Net.Index.Extensions; @@ -555,7 +556,7 @@ public virtual IList DoLookup(string key, IEnumerable co // a separate dedicated att for this? int gramCount = posLenAtt.PositionLength; - Debug.Assert(gramCount <= grams); + Debugging.Assert(() => gramCount <= grams); // Safety: make sure the recalculated count "agrees": if (CountGrams(tokenBytes) != gramCount) @@ -681,7 +682,7 @@ public virtual IList DoLookup(string key, IEnumerable co { BytesRef context = new BytesRef(token.Bytes, token.Offset, i); long? output = Lucene.Net.Util.Fst.Util.Get(fst, Lucene.Net.Util.Fst.Util.ToInt32sRef(context, new Int32sRef())); - Debug.Assert(output != null); + Debugging.Assert(() => output != null); contextCount = DecodeWeight(output); lastTokenFragment = new BytesRef(token.Bytes, token.Offset + i + 1, token.Length - i - 1); break; @@ -698,7 +699,7 @@ public virtual IList DoLookup(string key, IEnumerable co { finalLastToken = BytesRef.DeepCopyOf(lastTokenFragment); } - Debug.Assert(finalLastToken.Offset == 0); + Debugging.Assert(() => finalLastToken.Offset == 0); CharsRef spare = new CharsRef(); @@ -725,7 +726,7 @@ public virtual IList DoLookup(string key, IEnumerable co searcher.AddStartPaths(arc, prefixOutput, true, new Int32sRef()); completions = searcher.Search(); - Debug.Assert(completions.IsComplete); + Debugging.Assert(() => completions.IsComplete); } catch (IOException bogus) { @@ -753,7 +754,7 @@ public virtual IList DoLookup(string key, IEnumerable co { if (token.Bytes[token.Offset + i] == separator) { - Debug.Assert(token.Length - i - 1 > 0); + Debugging.Assert(() => token.Length - i - 1 > 0); lastToken = new BytesRef(token.Bytes, token.Offset + i + 1, token.Length - i - 1); break; } @@ -771,7 +772,7 @@ public virtual IList DoLookup(string key, IEnumerable co // return numbers that are greater than long.MaxValue, which results in a negative long number. (long)(long.MaxValue * (decimal)backoff * ((decimal)DecodeWeight(completion.Output)) / contextCount)); results.Add(result); - Debug.Assert(results.Count == seen.Count); + Debugging.Assert(() => results.Count == seen.Count); //System.out.println(" add result=" + result); nextCompletionContinue:; } @@ -873,7 +874,7 @@ private long EncodeWeight(long ngramCount) //private long decodeWeight(Pair output) { private static long DecodeWeight(long? output) { - Debug.Assert(output != null); + Debugging.Assert(() => output != null); return (int)(long.MaxValue - output); // LUCENENET TODO: Perhaps a Java Lucene bug? Why cast to int when returning long? } diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/SuggestStopFilter.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/SuggestStopFilter.cs index 75524a2ffb..4455dfadac 100644 --- a/src/Lucene.Net.Suggest/Suggest/Analyzing/SuggestStopFilter.cs +++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/SuggestStopFilter.cs @@ -1,6 +1,7 @@ using Lucene.Net.Analysis; using Lucene.Net.Analysis.TokenAttributes; using Lucene.Net.Analysis.Util; +using Lucene.Net.Diagnostics; using System.Diagnostics; namespace Lucene.Net.Search.Suggest.Analyzing @@ -114,7 +115,7 @@ public override bool IncrementToken() m_input.End(); endState = CaptureState(); int finalEndOffset = offsetAtt.EndOffset; - Debug.Assert(finalEndOffset >= endOffset); + Debugging.Assert(() => finalEndOffset >= endOffset); if (finalEndOffset > endOffset) { // OK there was a token separator after the diff --git a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs index 76bb1d978d..d9d974d31b 100644 --- a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs +++ b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Util; +using Lucene.Net.Diagnostics; +using Lucene.Net.Util; using Lucene.Net.Util.Fst; using System; using System.Collections.Generic; @@ -406,7 +407,7 @@ private bool Collect(IList res, int num, int bucket, BytesRef output { output.Bytes = ArrayUtil.Grow(output.Bytes); } - Debug.Assert(output.Offset == 0); + Debugging.Assert(() => output.Offset == 0); output.Bytes[output.Length++] = (byte) arc.Label; FST.BytesReader fstReader = automaton.GetBytesReader(); automaton.ReadFirstTargetArc(arc, arc, fstReader); diff --git a/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs b/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs index 36c6b8e5df..ad27c567a1 100644 --- a/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs +++ b/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Store; +using Lucene.Net.Diagnostics; +using Lucene.Net.Store; using Lucene.Net.Support; using Lucene.Net.Util; using Lucene.Net.Util.Fst; @@ -139,7 +140,7 @@ public override IList DoLookup(string key, IEnumerable c { throw new ArgumentException("this suggester doesn't support contexts"); } - Debug.Assert(num > 0); + Debugging.Assert(() => num > 0); if (onlyMorePopular) { @@ -189,7 +190,7 @@ public override IList DoLookup(string key, IEnumerable c try { completions = Lucene.Net.Util.Fst.Util.ShortestPaths(fst, arc, prefixOutput, weightComparer, num, !exactFirst); - Debug.Assert(completions.IsComplete); + Debugging.Assert(() => completions.IsComplete); } catch (IOException bogus) { @@ -212,7 +213,7 @@ public override IList DoLookup(string key, IEnumerable c private long? LookupPrefix(BytesRef scratch, FST.Arc arc) //Bogus { - Debug.Assert(0 == (long)fst.Outputs.NoOutput); + Debugging.Assert(() => 0 == (long)fst.Outputs.NoOutput); long output = 0; var bytesReader = fst.GetBytesReader(); @@ -293,7 +294,7 @@ internal WFSTInputIterator(WFSTCompletionLookup outerInstance, IInputIterator so : base(source) { this.outerInstance = outerInstance; - Debug.Assert(source.HasPayloads == false); + Debugging.Assert(() => source.HasPayloads == false); } protected internal override void Encode(OfflineSorter.ByteSequencesWriter writer, ByteArrayDataOutput output, byte[] buffer, BytesRef spare, BytesRef payload, ICollection contexts, long weight) diff --git a/src/Lucene.Net.Suggest/Suggest/UnsortedInputIterator.cs b/src/Lucene.Net.Suggest/Suggest/UnsortedInputIterator.cs index 4bd25a0fa3..b2c32d9430 100644 --- a/src/Lucene.Net.Suggest/Suggest/UnsortedInputIterator.cs +++ b/src/Lucene.Net.Suggest/Suggest/UnsortedInputIterator.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Util; +using Lucene.Net.Diagnostics; +using Lucene.Net.Util; using System; using System.Collections.Generic; using System.Diagnostics; @@ -61,7 +62,7 @@ public override long Weight { get { - Debug.Assert(currentOrd == ords[m_curPos]); + Debugging.Assert(() => currentOrd == ords[m_curPos]); return m_freqs[currentOrd]; } } @@ -82,7 +83,7 @@ public override BytesRef Payload { if (HasPayloads && m_curPos < m_payloads.Length) { - Debug.Assert(currentOrd == ords[m_curPos]); + Debugging.Assert(() => currentOrd == ords[m_curPos]); return m_payloads.Get(payloadSpare, currentOrd); } return null; @@ -95,7 +96,7 @@ public override ICollection Contexts { if (HasContexts && m_curPos < m_contextSets.Count) { - Debug.Assert(currentOrd == ords[m_curPos]); + Debugging.Assert(() => currentOrd == ords[m_curPos]); return m_contextSets[currentOrd]; } return null; diff --git a/src/Lucene.Net/Analysis/NumericTokenStream.cs b/src/Lucene.Net/Analysis/NumericTokenStream.cs index 9902fffb65..fbff885d4b 100644 --- a/src/Lucene.Net/Analysis/NumericTokenStream.cs +++ b/src/Lucene.Net/Analysis/NumericTokenStream.cs @@ -1,4 +1,5 @@ using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Util; using System; using System.Diagnostics; @@ -177,7 +178,7 @@ public NumericTermAttribute() public void FillBytesRef() { - Debug.Assert(ValueSize == 64 || ValueSize == 32); + Debugging.Assert(() => ValueSize == 64 || ValueSize == 32); if (ValueSize == 64) { NumericUtils.Int64ToPrefixCoded(_value, Shift, _bytes); diff --git a/src/Lucene.Net/Analysis/TokenStreamToAutomaton.cs b/src/Lucene.Net/Analysis/TokenStreamToAutomaton.cs index 3dac999d07..93d15f24c5 100644 --- a/src/Lucene.Net/Analysis/TokenStreamToAutomaton.cs +++ b/src/Lucene.Net/Analysis/TokenStreamToAutomaton.cs @@ -1,5 +1,6 @@ using J2N; using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Util; using System; using System.Diagnostics; @@ -154,7 +155,7 @@ public virtual Automaton ToAutomaton(TokenStream @in) { posInc = 1; } - Debug.Assert(pos > -1 || posInc > 0); + Debugging.Assert(() => pos > -1 || posInc > 0); if (posInc > 0) { @@ -162,7 +163,7 @@ public virtual Automaton ToAutomaton(TokenStream @in) pos += posInc; posData = positions.Get(pos); - Debug.Assert(posData.leaving == null); + Debugging.Assert(() => posData.leaving == null); if (posData.arriving == null) { diff --git a/src/Lucene.Net/Codecs/BlockTermState.cs b/src/Lucene.Net/Codecs/BlockTermState.cs index e799921719..28161158e8 100644 --- a/src/Lucene.Net/Codecs/BlockTermState.cs +++ b/src/Lucene.Net/Codecs/BlockTermState.cs @@ -1,4 +1,4 @@ -using System.Diagnostics; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Codecs { @@ -57,7 +57,7 @@ protected internal BlockTermState() public override void CopyFrom(TermState other) { - Debug.Assert(other is BlockTermState, "can not copy from " + other.GetType().Name); + Debugging.Assert(() => other is BlockTermState, () => "can not copy from " + other.GetType().Name); BlockTermState other2 = (BlockTermState)other; base.CopyFrom(other); DocFreq = other2.DocFreq; diff --git a/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs b/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs index a654054377..2be06072df 100644 --- a/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs +++ b/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Support; using Lucene.Net.Util.Fst; @@ -167,13 +168,13 @@ public BlockTreeTermsReader(Directory dir, FieldInfos fieldInfos, SegmentInfo in { int field = @in.ReadVInt32(); long numTerms = @in.ReadVInt64(); - Debug.Assert(numTerms >= 0); + Debugging.Assert(() => numTerms >= 0); int numBytes = @in.ReadVInt32(); BytesRef rootCode = new BytesRef(new byte[numBytes]); @in.ReadBytes(rootCode.Bytes, 0, numBytes); rootCode.Length = numBytes; FieldInfo fieldInfo = fieldInfos.FieldInfo(field); - Debug.Assert(fieldInfo != null, "field=" + field); + Debugging.Assert(() => fieldInfo != null, () => "field=" + field); long sumTotalTermFreq = fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY ? -1 : @in.ReadVInt64(); long sumDocFreq = @in.ReadVInt64(); int docCount = @in.ReadVInt32(); @@ -291,7 +292,7 @@ public override IEnumerator GetEnumerator() public override Terms GetTerms(string field) { - Debug.Assert(field != null); + Debugging.Assert(() => field != null); FieldReader ret; fields.TryGetValue(field, out ret); return ret; @@ -478,7 +479,7 @@ internal virtual void EndBlock(FieldReader.SegmentTermsEnum.Frame frame) } endBlockCount++; long otherBytes = frame.fpEnd - frame.fp - frame.suffixesReader.Length - frame.statsReader.Length; - Debug.Assert(otherBytes > 0, "otherBytes=" + otherBytes + " frame.fp=" + frame.fp + " frame.fpEnd=" + frame.fpEnd); + Debugging.Assert(() => otherBytes > 0, () => "otherBytes=" + otherBytes + " frame.fp=" + frame.fp + " frame.fpEnd=" + frame.fpEnd); TotalBlockOtherBytes += otherBytes; } @@ -489,9 +490,9 @@ internal virtual void Term(BytesRef term) internal virtual void Finish() { - Debug.Assert(startBlockCount == endBlockCount, "startBlockCount=" + startBlockCount + " endBlockCount=" + endBlockCount); - Debug.Assert(TotalBlockCount == FloorSubBlockCount + NonFloorBlockCount, "floorSubBlockCount=" + FloorSubBlockCount + " nonFloorBlockCount=" + NonFloorBlockCount + " totalBlockCount=" + TotalBlockCount); - Debug.Assert(TotalBlockCount == MixedBlockCount + TermsOnlyBlockCount + SubBlocksOnlyBlockCount, "totalBlockCount=" + TotalBlockCount + " mixedBlockCount=" + MixedBlockCount + " subBlocksOnlyBlockCount=" + SubBlocksOnlyBlockCount + " termsOnlyBlockCount=" + TermsOnlyBlockCount); + Debugging.Assert(() => startBlockCount == endBlockCount, () => "startBlockCount=" + startBlockCount + " endBlockCount=" + endBlockCount); + Debugging.Assert(() => TotalBlockCount == FloorSubBlockCount + NonFloorBlockCount, () => "floorSubBlockCount=" + FloorSubBlockCount + " nonFloorBlockCount=" + NonFloorBlockCount + " totalBlockCount=" + TotalBlockCount); + Debugging.Assert(() => TotalBlockCount == MixedBlockCount + TermsOnlyBlockCount + SubBlocksOnlyBlockCount, () => "totalBlockCount=" + TotalBlockCount + " mixedBlockCount=" + MixedBlockCount + " subBlocksOnlyBlockCount=" + SubBlocksOnlyBlockCount + " termsOnlyBlockCount=" + TermsOnlyBlockCount); } public override string ToString() @@ -529,7 +530,7 @@ public override string ToString() @out.AppendLine(" " + prefix.ToString().PadLeft(2, ' ') + ": " + blockCount); } } - Debug.Assert(TotalBlockCount == total); + Debugging.Assert(() => TotalBlockCount == total); } return @out.ToString(); } @@ -560,7 +561,7 @@ public sealed class FieldReader : Terms internal FieldReader(BlockTreeTermsReader outerInstance, FieldInfo fieldInfo, long numTerms, BytesRef rootCode, long sumTotalTermFreq, long sumDocFreq, int docCount, long indexStartFP, int longsSize, IndexInput indexIn) { this.outerInstance = outerInstance; - Debug.Assert(numTerms > 0); + Debugging.Assert(() => numTerms > 0); this.fieldInfo = fieldInfo; //DEBUG = BlockTreeTermsReader.DEBUG && fieldInfo.name.Equals("id", StringComparison.Ordinal); this.numTerms = numTerms; @@ -760,7 +761,7 @@ public Frame(BlockTreeTermsReader.FieldReader.IntersectEnum outerInstance, int o internal void LoadNextFloorBlock() { - Debug.Assert(numFollowFloorBlocks > 0); + Debugging.Assert(() => numFollowFloorBlocks > 0); //if (DEBUG) System.out.println(" loadNextFoorBlock trans=" + transitions[transitionIndex]); do @@ -845,7 +846,7 @@ internal void Load(BytesRef frameIndexData) outerInstance.@in.Seek(fp); int code_ = outerInstance.@in.ReadVInt32(); entCount = (int)((uint)code_ >> 1); - Debug.Assert(entCount > 0); + Debugging.Assert(() => entCount > 0); isLastInFloor = (code_ & 1) != 0; // term suffixes: @@ -906,7 +907,7 @@ public bool Next() public bool NextLeaf() { //if (DEBUG) System.out.println(" frame.next ord=" + ord + " nextEnt=" + nextEnt + " entCount=" + entCount); - Debug.Assert(nextEnt != -1 && nextEnt < entCount, "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); + Debugging.Assert(() => nextEnt != -1 && nextEnt < entCount, () => "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); nextEnt++; suffix = suffixesReader.ReadVInt32(); startBytePos = suffixesReader.Position; @@ -917,7 +918,7 @@ public bool NextLeaf() public bool NextNonLeaf() { //if (DEBUG) System.out.println(" frame.next ord=" + ord + " nextEnt=" + nextEnt + " entCount=" + entCount); - Debug.Assert(nextEnt != -1 && nextEnt < entCount, "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); + Debugging.Assert(() => nextEnt != -1 && nextEnt < entCount, () => "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); nextEnt++; int code = suffixesReader.ReadVInt32(); suffix = (int)((uint)code >> 1); @@ -944,7 +945,7 @@ public void DecodeMetaData() // lazily catch up on metadata decode: int limit = TermBlockOrd; bool absolute = metaDataUpto == 0; - Debug.Assert(limit > 0); + Debugging.Assert(() => limit > 0); // TODO: better API would be "jump straight to term=N"??? while (metaDataUpto < limit) @@ -1021,7 +1022,7 @@ public IntersectEnum(BlockTreeTermsReader.FieldReader outerInstance, CompiledAut FST.Arc arc = outerInstance.index.GetFirstArc(arcs[0]); // Empty string prefix must have an output in the index! - Debug.Assert(arc.IsFinal); + Debugging.Assert(() => arc.IsFinal); // Special pushFrame since it's the first one: Frame f = stack[0]; @@ -1033,7 +1034,7 @@ public IntersectEnum(BlockTreeTermsReader.FieldReader outerInstance, CompiledAut f.Load(outerInstance.rootCode); // for assert: - Debug.Assert(SetSavedStartTerm(startTerm)); + Debugging.Assert(() => SetSavedStartTerm(startTerm)); currentFrame = f; if (startTerm != null) @@ -1067,7 +1068,7 @@ private Frame GetFrame(int ord) } stack = next; } - Debug.Assert(stack[ord].ord == ord); + Debugging.Assert(() => stack[ord].ord == ord); return stack[ord]; } @@ -1101,7 +1102,7 @@ private Frame PushFrame(int state) // possible: FST.Arc arc = currentFrame.arc; int idx = currentFrame.prefix; - Debug.Assert(currentFrame.suffix > 0); + Debugging.Assert(() => currentFrame.suffix > 0); BytesRef output = currentFrame.outputPrefix; while (idx < f.prefix) { @@ -1110,14 +1111,14 @@ private Frame PushFrame(int state) // case by using current arc as starting point, // passed to findTargetArc arc = outerInstance.index.FindTargetArc(target, arc, GetArc(1 + idx), fstReader); - Debug.Assert(arc != null); + Debugging.Assert(() => arc != null); output = outerInstance.outerInstance.fstOutputs.Add(output, arc.Output); idx++; } f.arc = arc; f.outputPrefix = output; - Debug.Assert(arc.IsFinal); + Debugging.Assert(() => arc.IsFinal); f.Load(outerInstance.outerInstance.fstOutputs.Add(output, arc.NextFinalOutput)); return f; } @@ -1168,7 +1169,7 @@ private int GetState() for (int idx = 0; idx < currentFrame.suffix; idx++) { state = runAutomaton.Step(state, currentFrame.suffixBytes[currentFrame.startBytePos + idx] & 0xff); - Debug.Assert(state != -1); + Debugging.Assert(() => state != -1); } return state; } @@ -1180,13 +1181,13 @@ private int GetState() private void SeekToStartTerm(BytesRef target) { //if (DEBUG) System.out.println("seek to startTerm=" + target.utf8ToString()); - Debug.Assert(currentFrame.ord == 0); + Debugging.Assert(() => currentFrame.ord == 0); if (term.Length < target.Length) { term.Bytes = ArrayUtil.Grow(term.Bytes, target.Length); } FST.Arc arc = arcs[0]; - Debug.Assert(arc == currentFrame.arc); + Debugging.Assert(() => arc == currentFrame.arc); for (int idx = 0; idx <= target.Length; idx++) { @@ -1264,7 +1265,7 @@ private void SeekToStartTerm(BytesRef target) } } - Debug.Assert(false); + Debugging.Assert(() => false); } public override BytesRef Next() @@ -1294,7 +1295,7 @@ public override BytesRef Next() } long lastFP = currentFrame.fpOrig; currentFrame = stack[currentFrame.ord - 1]; - Debug.Assert(currentFrame.lastSubFP == lastFP); + Debugging.Assert(() => currentFrame.lastSubFP == lastFP); //if (DEBUG) System.out.println("\n frame ord=" + currentFrame.ord + " prefix=" + brToString(new BytesRef(term.bytes, term.offset, currentFrame.prefix)) + " state=" + currentFrame.state + " lastInFloor?=" + currentFrame.isLastInFloor + " fp=" + currentFrame.fp + " trans=" + (currentFrame.transitions.length == 0 ? "n/a" : currentFrame.transitions[currentFrame.transitionIndex]) + " outputPrefix=" + currentFrame.outputPrefix); } } @@ -1348,7 +1349,7 @@ public override BytesRef Next() byte[] commonSuffixBytes = compiledAutomaton.CommonSuffixRef.Bytes; int lenInPrefix = compiledAutomaton.CommonSuffixRef.Length - currentFrame.suffix; - Debug.Assert(compiledAutomaton.CommonSuffixRef.Offset == 0); + Debugging.Assert(() => compiledAutomaton.CommonSuffixRef.Offset == 0); int suffixBytesPos; int commonSuffixBytesPos = 0; @@ -1359,7 +1360,7 @@ public override BytesRef Next() // test whether the prefix part matches: byte[] termBytes = term.Bytes; int termBytesPos = currentFrame.prefix - lenInPrefix; - Debug.Assert(termBytesPos >= 0); + Debugging.Assert(() => termBytesPos >= 0); int termBytesPosEnd = currentFrame.prefix; while (termBytesPos < termBytesPosEnd) { @@ -1427,10 +1428,7 @@ public override BytesRef Next() { CopyTerm(); //if (DEBUG) System.out.println(" term match to state=" + state + "; return term=" + brToString(term)); - if (!(savedStartTerm == null || term.CompareTo(savedStartTerm) > 0)) - { - Debug.Assert(false, "saveStartTerm=" + savedStartTerm.Utf8ToString() + " term=" + term.Utf8ToString()); - } + Debugging.Assert(() => savedStartTerm == null || term.CompareTo(savedStartTerm) > 0, () => "saveStartTerm=" + savedStartTerm.Utf8ToString() + " term=" + term.Utf8ToString()); return term; } else @@ -1534,7 +1532,7 @@ public SegmentTermsEnum(BlockTreeTermsReader.FieldReader outerInstance) { arc = outerInstance.index.GetFirstArc(arcs[0]); // Empty string prefix must have an output in the index! - Debug.Assert(arc.IsFinal); + Debugging.Assert(() => arc.IsFinal); } else { @@ -1582,7 +1580,7 @@ public Stats ComputeBlockStats() { arc = outerInstance.index.GetFirstArc(arcs[0]); // Empty string prefix must have an output in the index! - Debug.Assert(arc.IsFinal); + Debugging.Assert(() => arc.IsFinal); } else { @@ -1617,7 +1615,7 @@ public Stats ComputeBlockStats() } long lastFP = currentFrame.fpOrig; currentFrame = stack[currentFrame.ord - 1]; - Debug.Assert(lastFP == currentFrame.lastSubFP); + Debugging.Assert(() => lastFP == currentFrame.lastSubFP); // if (DEBUG) { // System.out.println(" reset validIndexPrefix=" + validIndexPrefix); // } @@ -1657,7 +1655,7 @@ public Stats ComputeBlockStats() { arc = outerInstance.index.GetFirstArc(arcs[0]); // Empty string prefix must have an output in the index! - Debug.Assert(arc.IsFinal); + Debugging.Assert(() => arc.IsFinal); } else { @@ -1684,7 +1682,7 @@ private Frame GetFrame(int ord) } stack = next; } - Debug.Assert(stack[ord].ord == ord); + Debugging.Assert(() => stack[ord].ord == ord); return stack[ord]; } @@ -1743,7 +1741,7 @@ internal Frame PushFrame(FST.Arc arc, long fp, int length) // System.out.println(" skip rewind!"); // } } - Debug.Assert(length == f.prefix); + Debugging.Assert(() => length == f.prefix); } else { @@ -1789,7 +1787,7 @@ public override bool SeekExact(BytesRef target) term.Bytes = ArrayUtil.Grow(term.Bytes, 1 + target.Length); } - Debug.Assert(ClearEOF()); + Debugging.Assert(() => ClearEOF()); FST.Arc arc; int targetUpto; @@ -1811,12 +1809,12 @@ public override bool SeekExact(BytesRef target) // } arc = arcs[0]; - Debug.Assert(arc.IsFinal); + Debugging.Assert(() => arc.IsFinal); output = arc.Output; targetUpto = 0; Frame lastFrame = stack[0]; - Debug.Assert(validIndexPrefix <= term.Length); + Debugging.Assert(() => validIndexPrefix <= term.Length); int targetLimit = Math.Min(target.Length, validIndexPrefix); @@ -1840,7 +1838,7 @@ public override bool SeekExact(BytesRef target) //if (arc.label != (target.bytes[target.offset + targetUpto] & 0xFF)) { //System.out.println("FAIL: arc.label=" + (char) arc.label + " targetLabel=" + (char) (target.bytes[target.offset + targetUpto] & 0xFF)); //} - Debug.Assert(arc.Label == (target.Bytes[target.Offset + targetUpto] & 0xFF), "arc.label=" + (char)arc.Label + " targetLabel=" + (char)(target.Bytes[target.Offset + targetUpto] & 0xFF)); + Debugging.Assert(() => arc.Label == (target.Bytes[target.Offset + targetUpto] & 0xFF), () => "arc.label=" + (char)arc.Label + " targetLabel=" + (char)(target.Bytes[target.Offset + targetUpto] & 0xFF)); if (arc.Output != outerInstance.outerInstance.NO_OUTPUT) { output = outerInstance.outerInstance.fstOutputs.Add(output, arc.Output); @@ -1907,7 +1905,7 @@ public override bool SeekExact(BytesRef target) else { // Target is exactly the same as current term - Debug.Assert(term.Length == target.Length); + Debugging.Assert(() => term.Length == target.Length); if (termExists) { // if (DEBUG) { @@ -1932,8 +1930,8 @@ public override bool SeekExact(BytesRef target) arc = outerInstance.index.GetFirstArc(arcs[0]); // Empty string prefix must have an output (block) in the index! - Debug.Assert(arc.IsFinal); - Debug.Assert(arc.Output != null); + Debugging.Assert(() => arc.IsFinal); + Debugging.Assert(() => arc.Output != null); // if (DEBUG) { // System.out.println(" no seek state; push root frame"); @@ -2005,7 +2003,7 @@ public override bool SeekExact(BytesRef target) arc = nextArc; term.Bytes[targetUpto] = (byte)targetLabel; // Aggregate output as we go: - Debug.Assert(arc.Output != null); + Debugging.Assert(() => arc.Output != null); if (arc.Output != outerInstance.outerInstance.NO_OUTPUT) { output = outerInstance.outerInstance.fstOutputs.Add(output, arc.Output); @@ -2073,7 +2071,7 @@ public override SeekStatus SeekCeil(BytesRef target) term.Bytes = ArrayUtil.Grow(term.Bytes, 1 + target.Length); } - Debug.Assert(ClearEOF()); + Debugging.Assert(ClearEOF); //if (DEBUG) { //System.out.println("\nBTTR.seekCeil seg=" + segment + " target=" + fieldInfo.name + ":" + target.utf8ToString() + " " + target + " current=" + brToString(term) + " (exists?=" + termExists + ") validIndexPrefix= " + validIndexPrefix); @@ -2100,12 +2098,12 @@ public override SeekStatus SeekCeil(BytesRef target) //} arc = arcs[0]; - Debug.Assert(arc.IsFinal); + Debugging.Assert(() => arc.IsFinal); output = arc.Output; targetUpto = 0; Frame lastFrame = stack[0]; - Debug.Assert(validIndexPrefix <= term.Length); + Debugging.Assert(() => validIndexPrefix <= term.Length); int targetLimit = Math.Min(target.Length, validIndexPrefix); @@ -2126,7 +2124,7 @@ public override SeekStatus SeekCeil(BytesRef target) break; } arc = arcs[1 + targetUpto]; - Debug.Assert(arc.Label == (target.Bytes[target.Offset + targetUpto] & 0xFF), "arc.label=" + (char)arc.Label + " targetLabel=" + (char)(target.Bytes[target.Offset + targetUpto] & 0xFF)); + Debugging.Assert(() => arc.Label == (target.Bytes[target.Offset + targetUpto] & 0xFF), () => "arc.label=" + (char)arc.Label + " targetLabel=" + (char)(target.Bytes[target.Offset + targetUpto] & 0xFF)); // TOOD: we could save the outputs in local // byte[][] instead of making new objs ever // seek; but, often the FST doesn't have any @@ -2195,7 +2193,7 @@ public override SeekStatus SeekCeil(BytesRef target) else { // Target is exactly the same as current term - Debug.Assert(term.Length == target.Length); + Debugging.Assert(() => term.Length == target.Length); if (termExists) { //if (DEBUG) { @@ -2217,8 +2215,8 @@ public override SeekStatus SeekCeil(BytesRef target) arc = outerInstance.index.GetFirstArc(arcs[0]); // Empty string prefix must have an output (block) in the index! - Debug.Assert(arc.IsFinal); - Debug.Assert(arc.Output != null); + Debugging.Assert(() => arc.IsFinal); + Debugging.Assert(() => arc.Output != null); //if (DEBUG) { //System.out.println(" no seek state; push root frame"); @@ -2292,7 +2290,7 @@ public override SeekStatus SeekCeil(BytesRef target) term.Bytes[targetUpto] = (byte)targetLabel; arc = nextArc; // Aggregate output as we go: - Debug.Assert(arc.Output != null); + Debugging.Assert(() => arc.Output != null); if (arc.Output != outerInstance.outerInstance.NO_OUTPUT) { output = outerInstance.outerInstance.fstOutputs.Add(output, arc.Output); @@ -2362,7 +2360,7 @@ public override SeekStatus SeekCeil(BytesRef target) // while (true) // { // Frame f = GetFrame(ord); - // Debug.Assert(f != null); + // Debugging.Assert(f != null); // BytesRef prefix = new BytesRef(term.Bytes, 0, f.Prefix); // if (f.NextEnt == -1) // { @@ -2374,7 +2372,7 @@ public override SeekStatus SeekCeil(BytesRef target) // } // if (OuterInstance.Index != null) // { - // Debug.Assert(!isSeekFrame || f.Arc != null, "isSeekFrame=" + isSeekFrame + " f.arc=" + f.Arc); + // Debugging.Assert(!isSeekFrame || f.Arc != null, "isSeekFrame=" + isSeekFrame + " f.arc=" + f.Arc); // if (f.Prefix > 0 && isSeekFrame && f.Arc.Label != (term.Bytes[f.Prefix - 1] & 0xFF)) // { // @out.println(" broken seek state: arc.label=" + (char)f.Arc.Label + " vs term byte=" + (char)(term.Bytes[f.Prefix - 1] & 0xFF)); @@ -2425,7 +2423,7 @@ public override BytesRef Next() { arc = outerInstance.index.GetFirstArc(arcs[0]); // Empty string prefix must have an output in the index! - Debug.Assert(arc.IsFinal); + Debugging.Assert(() => arc.IsFinal); } else { @@ -2437,7 +2435,7 @@ public override BytesRef Next() targetBeforeCurrentLength = currentFrame.ord; - Debug.Assert(!eof); + Debugging.Assert(() => !eof); //if (DEBUG) { //System.out.println("\nBTTR.next seg=" + segment + " term=" + brToString(term) + " termExists?=" + termExists + " field=" + fieldInfo.name + " termBlockOrd=" + currentFrame.state.termBlockOrd + " validIndexPrefix=" + validIndexPrefix); //printSeekState(); @@ -2453,7 +2451,7 @@ public override BytesRef Next() // works properly: //if (DEBUG) System.out.println(" re-seek to pending term=" + term.utf8ToString() + " " + term); bool result = SeekExact(term); - Debug.Assert(result); + Debugging.Assert(() => result); } // Pop finished blocks @@ -2469,7 +2467,7 @@ public override BytesRef Next() if (currentFrame.ord == 0) { //if (DEBUG) System.out.println(" return null"); - Debug.Assert(SetEOF()); + Debugging.Assert(() => SetEOF()); term.Length = 0; validIndexPrefix = 0; currentFrame.Rewind(); @@ -2523,7 +2521,7 @@ public override BytesRef Term { get { - Debug.Assert(!eof); + Debugging.Assert(() => !eof); return term; } } @@ -2532,7 +2530,7 @@ public override int DocFreq { get { - Debug.Assert(!eof); + Debugging.Assert(() => !eof); //if (DEBUG) System.out.println("BTR.docFreq"); currentFrame.DecodeMetaData(); //if (DEBUG) System.out.println(" return " + currentFrame.state.docFreq); @@ -2544,7 +2542,7 @@ public override long TotalTermFreq { get { - Debug.Assert(!eof); + Debugging.Assert(() => !eof); currentFrame.DecodeMetaData(); return currentFrame.state.TotalTermFreq; } @@ -2552,7 +2550,7 @@ public override long TotalTermFreq public override DocsEnum Docs(IBits skipDocs, DocsEnum reuse, DocsFlags flags) { - Debug.Assert(!eof); + Debugging.Assert(() => !eof); //if (DEBUG) { //System.out.println("BTTR.docs seg=" + segment); //} @@ -2571,7 +2569,7 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits skipDocs, DocsAndPos return null; } - Debug.Assert(!eof); + Debugging.Assert(() => !eof); currentFrame.DecodeMetaData(); return outerInstance.outerInstance.postingsReader.DocsAndPositions(outerInstance.fieldInfo, currentFrame.state, skipDocs, reuse, flags); } @@ -2581,15 +2579,15 @@ public override void SeekExact(BytesRef target, TermState otherState) // if (DEBUG) { // System.out.println("BTTR.seekExact termState seg=" + segment + " target=" + target.utf8ToString() + " " + target + " state=" + otherState); // } - Debug.Assert(ClearEOF()); + Debugging.Assert(() => ClearEOF()); if (target.CompareTo(term) != 0 || !termExists) { - Debug.Assert(otherState != null && otherState is BlockTermState); + Debugging.Assert(() => otherState != null && otherState is BlockTermState); currentFrame = staticFrame; currentFrame.state.CopyFrom(otherState); term.CopyBytes(target); currentFrame.metaDataUpto = currentFrame.TermBlockOrd; - Debug.Assert(currentFrame.metaDataUpto > 0); + Debugging.Assert(() => currentFrame.metaDataUpto > 0); validIndexPrefix = 0; } else @@ -2602,7 +2600,7 @@ public override void SeekExact(BytesRef target, TermState otherState) public override TermState GetTermState() { - Debug.Assert(!eof); + Debugging.Assert(() => !eof); currentFrame.DecodeMetaData(); TermState ts = (TermState)currentFrame.state.Clone(); //if (DEBUG) System.out.println("BTTR.termState seg=" + segment + " state=" + ts); @@ -2734,7 +2732,7 @@ internal void LoadNextFloorBlock() //if (DEBUG) { //System.out.println(" loadNextFloorBlock fp=" + fp + " fpEnd=" + fpEnd); //} - Debug.Assert(arc == null || isFloor, "arc=" + arc + " isFloor=" + isFloor); + Debugging.Assert(() => arc == null || isFloor, () => "arc=" + arc + " isFloor=" + isFloor); fp = fpEnd; nextEnt = -1; LoadBlock(); @@ -2768,9 +2766,9 @@ internal void LoadBlock() outerInstance.@in.Seek(fp); int code = outerInstance.@in.ReadVInt32(); entCount = (int)((uint)code >> 1); - Debug.Assert(entCount > 0); + Debugging.Assert(() => entCount > 0); isLastInFloor = (code & 1) != 0; - Debug.Assert(arc == null || (isLastInFloor || isFloor)); + Debugging.Assert(() => arc == null || (isLastInFloor || isFloor)); // TODO: if suffixes were stored in random-access // array structure, then we could do binary search @@ -2891,7 +2889,7 @@ public bool Next() public bool NextLeaf() { //if (DEBUG) System.out.println(" frame.next ord=" + ord + " nextEnt=" + nextEnt + " entCount=" + entCount); - Debug.Assert(nextEnt != -1 && nextEnt < entCount, "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); + Debugging.Assert(() => nextEnt != -1 && nextEnt < entCount, () => "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); nextEnt++; suffix = suffixesReader.ReadVInt32(); startBytePos = suffixesReader.Position; @@ -2909,7 +2907,7 @@ public bool NextLeaf() public bool NextNonLeaf() { //if (DEBUG) System.out.println(" frame.next ord=" + ord + " nextEnt=" + nextEnt + " entCount=" + entCount); - Debug.Assert(nextEnt != -1 && nextEnt < entCount, "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); + Debugging.Assert(() => nextEnt != -1 && nextEnt < entCount, () => "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); nextEnt++; int code = suffixesReader.ReadVInt32(); suffix = (int)((uint)code >> 1); @@ -2968,7 +2966,7 @@ public void ScanToFloorFrame(BytesRef target) return; } - Debug.Assert(numFollowFloorBlocks != 0); + Debugging.Assert(() => numFollowFloorBlocks != 0); long newFP = fpOrig; while (true) @@ -3028,7 +3026,7 @@ public void DecodeMetaData() // lazily catch up on metadata decode: int limit = TermBlockOrd; bool absolute = metaDataUpto == 0; - Debug.Assert(limit > 0); + Debugging.Assert(() => limit > 0); // TODO: better API would be "jump straight to term=N"??? while (metaDataUpto < limit) @@ -3084,7 +3082,7 @@ private bool PrefixMatches(BytesRef target) /// public void ScanToSubBlock(long subFP) { - Debug.Assert(!isLeafBlock); + Debugging.Assert(() => !isLeafBlock); //if (DEBUG) System.out.println(" scanToSubBlock fp=" + fp + " subFP=" + subFP + " entCount=" + entCount + " lastSubFP=" + lastSubFP); //assert nextEnt == 0; if (lastSubFP == subFP) @@ -3092,12 +3090,12 @@ public void ScanToSubBlock(long subFP) //if (DEBUG) System.out.println(" already positioned"); return; } - Debug.Assert(subFP < fp, "fp=" + fp + " subFP=" + subFP); + Debugging.Assert(() => subFP < fp, () => "fp=" + fp + " subFP=" + subFP); long targetSubCode = fp - subFP; //if (DEBUG) System.out.println(" targetSubCode=" + targetSubCode); while (true) { - Debug.Assert(nextEnt < entCount); + Debugging.Assert(() => nextEnt < entCount); nextEnt++; int code = suffixesReader.ReadVInt32(); suffixesReader.SkipBytes(isLeafBlock ? code : (int)((uint)code >> 1)); @@ -3136,7 +3134,7 @@ public SeekStatus ScanToTermLeaf(BytesRef target, bool exactOnly) { // if (DEBUG) System.out.println(" scanToTermLeaf: block fp=" + fp + " prefix=" + prefix + " nextEnt=" + nextEnt + " (of " + entCount + ") target=" + brToString(target) + " term=" + brToString(term)); - Debug.Assert(nextEnt != -1); + Debugging.Assert(() => nextEnt != -1); outerInstance.termExists = true; subCode = 0; @@ -3150,7 +3148,7 @@ public SeekStatus ScanToTermLeaf(BytesRef target, bool exactOnly) return SeekStatus.END; } - Debug.Assert(PrefixMatches(target)); + Debugging.Assert(() => PrefixMatches(target)); // Loop over each entry (term or sub-block) in this block: //nextTerm: while(nextEnt < entCount) { @@ -3189,7 +3187,7 @@ public SeekStatus ScanToTermLeaf(BytesRef target, bool exactOnly) } else { - Debug.Assert(targetPos == targetLimit); + Debugging.Assert(() => targetPos == targetLimit); cmp = termLen - target.Length; stop = true; } @@ -3245,7 +3243,7 @@ public SeekStatus ScanToTermLeaf(BytesRef target, bool exactOnly) // would have followed the index to this // sub-block from the start: - Debug.Assert(outerInstance.termExists); + Debugging.Assert(() => outerInstance.termExists); FillTerm(); //if (DEBUG) System.out.println(" found!"); return SeekStatus.FOUND; @@ -3282,7 +3280,7 @@ public SeekStatus ScanToTermNonLeaf(BytesRef target, bool exactOnly) { //if (DEBUG) System.out.println(" scanToTermNonLeaf: block fp=" + fp + " prefix=" + prefix + " nextEnt=" + nextEnt + " (of " + entCount + ") target=" + brToString(target) + " term=" + brToString(term)); - Debug.Assert(nextEnt != -1); + Debugging.Assert(() => nextEnt != -1); if (nextEnt == entCount) { @@ -3294,7 +3292,7 @@ public SeekStatus ScanToTermNonLeaf(BytesRef target, bool exactOnly) return SeekStatus.END; } - Debug.Assert(PrefixMatches(target)); + Debugging.Assert(() => PrefixMatches(target)); // Loop over each entry (term or sub-block) in this block: //nextTerm: while(nextEnt < entCount) { @@ -3344,7 +3342,7 @@ public SeekStatus ScanToTermNonLeaf(BytesRef target, bool exactOnly) } else { - Debug.Assert(targetPos == targetLimit); + Debugging.Assert(() => targetPos == targetLimit); cmp = termLen - target.Length; stop = true; } @@ -3401,7 +3399,7 @@ public SeekStatus ScanToTermNonLeaf(BytesRef target, bool exactOnly) // would have followed the index to this // sub-block from the start: - Debug.Assert(outerInstance.termExists); + Debugging.Assert(() => outerInstance.termExists); FillTerm(); //if (DEBUG) System.out.println(" found!"); return SeekStatus.FOUND; diff --git a/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs b/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs index 594937cde9..424bd4ebdd 100644 --- a/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs +++ b/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs @@ -1,4 +1,5 @@ using J2N.Text; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using Lucene.Net.Util.Fst; using System; @@ -258,9 +259,9 @@ private class FieldMetaData public FieldMetaData(FieldInfo fieldInfo, BytesRef rootCode, long numTerms, long indexStartFP, long sumTotalTermFreq, long sumDocFreq, int docCount, int longsSize) { - Debug.Assert(numTerms > 0); + Debugging.Assert(() => numTerms > 0); this.FieldInfo = fieldInfo; - Debug.Assert(rootCode != null, "field=" + fieldInfo.Name + " numTerms=" + numTerms); + Debugging.Assert(() => rootCode != null, () => "field=" + fieldInfo.Name + " numTerms=" + numTerms); this.RootCode = rootCode; this.IndexStartFP = indexStartFP; this.NumTerms = numTerms; @@ -367,14 +368,14 @@ public override TermsConsumer AddField(FieldInfo field) { //DEBUG = field.name.Equals("id", StringComparison.Ordinal); //if (DEBUG) System.out.println("\nBTTW.addField seg=" + segment + " field=" + field.name); - Debug.Assert(currentField == null || currentField.Name.CompareToOrdinal(field.Name) < 0); + Debugging.Assert(() => currentField == null || currentField.Name.CompareToOrdinal(field.Name) < 0); currentField = field; return new TermsWriter(this, field); } internal static long EncodeOutput(long fp, bool hasTerms, bool isFloor) { - Debug.Assert(fp < (1L << 62)); + Debugging.Assert(() => fp < (1L << 62)); return (fp << 2) | (uint)(hasTerms ? OUTPUT_FLAG_HAS_TERMS : 0) | (uint)(isFloor ? OUTPUT_FLAG_IS_FLOOR : 0); } @@ -480,11 +481,11 @@ public void CompileIndex(IList floorBlocks, RAMOutputStream scratc // LUCENENET specific - we use a custom wrapper function to display floorBlocks, since // it might contain garbage that cannot be converted into text. This is compiled out // of the relese, though. - Debug.Assert( - (IsFloor && floorBlocks != null && floorBlocks.Count != 0) || (!IsFloor && floorBlocks == null), - "isFloor=" + IsFloor + " floorBlocks=" + ToString(floorBlocks)); + Debugging.Assert( + () => (IsFloor && floorBlocks != null && floorBlocks.Count != 0) || (!IsFloor && floorBlocks == null), + () => "isFloor=" + IsFloor + " floorBlocks=" + ToString(floorBlocks)); - Debug.Assert(scratchBytes.GetFilePointer() == 0); + Debugging.Assert(() => scratchBytes.GetFilePointer() == 0); // TODO: try writing the leading vLong in MSB order // (opposite of what Lucene does today), for better @@ -495,12 +496,12 @@ public void CompileIndex(IList floorBlocks, RAMOutputStream scratc scratchBytes.WriteVInt32(floorBlocks.Count); foreach (PendingBlock sub in floorBlocks) { - Debug.Assert(sub.FloorLeadByte != -1); + Debugging.Assert(() => sub.FloorLeadByte != -1); //if (DEBUG) { // System.out.println(" write floorLeadByte=" + Integer.toHexString(sub.floorLeadByte&0xff)); //} scratchBytes.WriteByte((byte)(sbyte)sub.FloorLeadByte); - Debug.Assert(sub.Fp > Fp); + Debugging.Assert(() => sub.Fp > Fp); scratchBytes.WriteVInt64((sub.Fp - Fp) << 1 | (uint)(sub.HasTerms ? 1 : 0)); } } @@ -508,7 +509,7 @@ public void CompileIndex(IList floorBlocks, RAMOutputStream scratc ByteSequenceOutputs outputs = ByteSequenceOutputs.Singleton; Builder indexBuilder = new Builder(FST.INPUT_TYPE.BYTE1, 0, 0, true, false, int.MaxValue, outputs, null, false, PackedInt32s.COMPACT, true, 15); var bytes = new byte[(int)scratchBytes.GetFilePointer()]; - Debug.Assert(bytes.Length > 0); + Debugging.Assert(() => bytes.Length > 0); scratchBytes.WriteTo(bytes, 0); indexBuilder.Add(Util.ToInt32sRef(Prefix, scratchIntsRef), new BytesRef(bytes, 0, bytes.Length)); scratchBytes.Reset(); @@ -727,8 +728,8 @@ internal virtual void WriteBlocks(Int32sRef prevTerm, int prefixLength, int coun // Suffix is 0, ie prefix 'foo' and term is // 'foo' so the term has empty string suffix // in this block - Debug.Assert(lastSuffixLeadLabel == -1); - Debug.Assert(numSubs == 0); + Debugging.Assert(() => lastSuffixLeadLabel == -1); + Debugging.Assert(() => numSubs == 0); suffixLeadLabel = -1; } else @@ -739,7 +740,7 @@ internal virtual void WriteBlocks(Int32sRef prevTerm, int prefixLength, int coun else { PendingBlock block = (PendingBlock)ent; - Debug.Assert(block.Prefix.Length > prefixLength); + Debugging.Assert(() => block.Prefix.Length > prefixLength); suffixLeadLabel = block.Prefix.Bytes[block.Prefix.Offset + prefixLength] & 0xff; } @@ -860,7 +861,7 @@ internal virtual void WriteBlocks(Int32sRef prevTerm, int prefixLength, int coun //System.out.println(" = " + pendingCount); pendingCount = 0; - Debug.Assert(outerInstance.minItemsInBlock == 1 || subCount > 1, "minItemsInBlock=" + outerInstance.minItemsInBlock + " subCount=" + subCount + " sub=" + sub + " of " + numSubs + " subTermCount=" + subTermCountSums[sub] + " subSubCount=" + subSubCounts[sub] + " depth=" + prefixLength); + Debugging.Assert(() => outerInstance.minItemsInBlock == 1 || subCount > 1, () => "minItemsInBlock=" + outerInstance.minItemsInBlock + " subCount=" + subCount + " sub=" + sub + " of " + numSubs + " subTermCount=" + subTermCountSums[sub] + " subSubCount=" + subSubCounts[sub] + " depth=" + prefixLength); subCount = 0; startLabel = subBytes[sub + 1]; @@ -875,8 +876,8 @@ internal virtual void WriteBlocks(Int32sRef prevTerm, int prefixLength, int coun // block. NOTE that this may be too small (< // minItemsInBlock); need a true segmenter // here - Debug.Assert(startLabel != -1); - Debug.Assert(firstBlock != null); + Debugging.Assert(() => startLabel != -1); + Debugging.Assert(() => firstBlock != null); prevTerm.Int32s[prevTerm.Offset + prefixLength] = startLabel; //System.out.println(" final " + (numSubs-sub-1) + " subs"); /* @@ -896,7 +897,7 @@ internal virtual void WriteBlocks(Int32sRef prevTerm, int prefixLength, int coun prevTerm.Int32s[prevTerm.Offset + prefixLength] = savLabel; - Debug.Assert(firstBlock != null); + Debugging.Assert(() => firstBlock != null); firstBlock.CompileIndex(floorBlocks, outerInstance.scratchBytes); pending.Add(firstBlock); @@ -925,11 +926,11 @@ private string ToString(BytesRef b) // block: private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexPrefixLength, int startBackwards, int length, int futureTermCount, bool isFloor, int floorLeadByte, bool isLastInFloor) { - Debug.Assert(length > 0); + Debugging.Assert(() => length > 0); int start = pending.Count - startBackwards; - Debug.Assert(start >= 0, "pending.Count=" + pending.Count + " startBackwards=" + startBackwards + " length=" + length); + Debugging.Assert(() => start >= 0, () => "pending.Count=" + pending.Count + " startBackwards=" + startBackwards + " length=" + length); IList slice = pending.SubList(start, start + length); @@ -988,7 +989,7 @@ private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexP subIndices = null; foreach (PendingEntry ent in slice) { - Debug.Assert(ent.IsTerm); + Debugging.Assert(() => ent.IsTerm); PendingTerm term = (PendingTerm)ent; BlockTermState state = term.State; int suffix = term.Term.Length - prefixLength; @@ -1006,7 +1007,7 @@ private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexP statsWriter.WriteVInt32(state.DocFreq); if (fieldInfo.IndexOptions != IndexOptions.DOCS_ONLY) { - Debug.Assert(state.TotalTermFreq >= state.DocFreq, state.TotalTermFreq + " vs " + state.DocFreq); + Debugging.Assert(() => state.TotalTermFreq >= state.DocFreq, () => state.TotalTermFreq + " vs " + state.DocFreq); statsWriter.WriteVInt64(state.TotalTermFreq - state.DocFreq); } @@ -1014,7 +1015,7 @@ private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexP outerInstance.postingsWriter.EncodeTerm(longs, bytesWriter, fieldInfo, state, absolute); for (int pos = 0; pos < longsSize; pos++) { - Debug.Assert(longs[pos] >= 0); + Debugging.Assert(() => longs[pos] >= 0); metaWriter.WriteVInt64(longs[pos]); } bytesWriter.WriteTo(metaWriter); @@ -1049,7 +1050,7 @@ private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexP statsWriter.WriteVInt32(state.DocFreq); if (fieldInfo.IndexOptions != IndexOptions.DOCS_ONLY) { - Debug.Assert(state.TotalTermFreq >= state.DocFreq); + Debugging.Assert(() => state.TotalTermFreq >= state.DocFreq); statsWriter.WriteVInt64(state.TotalTermFreq - state.DocFreq); } @@ -1065,7 +1066,7 @@ private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexP outerInstance.postingsWriter.EncodeTerm(longs, bytesWriter, fieldInfo, state, absolute); for (int pos = 0; pos < longsSize; pos++) { - Debug.Assert(longs[pos] >= 0); + Debugging.Assert(() => longs[pos] >= 0); metaWriter.WriteVInt64(longs[pos]); } bytesWriter.WriteTo(metaWriter); @@ -1079,13 +1080,13 @@ private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexP PendingBlock block = (PendingBlock)ent; int suffix = block.Prefix.Length - prefixLength; - Debug.Assert(suffix > 0); + Debugging.Assert(() => suffix > 0); // For non-leaf block we borrow 1 bit to record // if entry is term or sub-block suffixWriter.WriteVInt32((suffix << 1) | 1); suffixWriter.WriteBytes(block.Prefix.Bytes, prefixLength, suffix); - Debug.Assert(block.Fp < startFP); + Debugging.Assert(() => block.Fp < startFP); // if (DEBUG) { // BytesRef suffixBytes = new BytesRef(suffix); @@ -1099,7 +1100,7 @@ private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexP } } - Debug.Assert(subIndices.Count != 0); + Debugging.Assert(() => subIndices.Count != 0); } // TODO: we could block-write the term suffix pointers; @@ -1178,7 +1179,7 @@ public override PostingsConsumer StartTerm(BytesRef text) public override void FinishTerm(BytesRef text, TermStats stats) { - Debug.Assert(stats.DocFreq > 0); + Debugging.Assert(() => stats.DocFreq > 0); //if (DEBUG) System.out.println("BTTW.finishTerm term=" + fieldInfo.name + ":" + toString(text) + " seg=" + segment + " df=" + stats.docFreq); blockBuilder.Add(Util.ToInt32sRef(text, scratchIntsRef), noOutputs.NoOutput); @@ -1200,10 +1201,10 @@ public override void Finish(long sumTotalTermFreq, long sumDocFreq, int docCount blockBuilder.Finish(); // We better have one final "root" block: - Debug.Assert(pending.Count == 1 && !pending[0].IsTerm, "pending.size()=" + pending.Count + " pending=" + pending); + Debugging.Assert(() => pending.Count == 1 && !pending[0].IsTerm, () => "pending.size()=" + pending.Count + " pending=" + pending); PendingBlock root = (PendingBlock)pending[0]; - Debug.Assert(root.Prefix.Length == 0); - Debug.Assert(root.Index.EmptyOutput != null); + Debugging.Assert(() => root.Prefix.Length == 0); + Debugging.Assert(() => root.Index.EmptyOutput != null); this.sumTotalTermFreq = sumTotalTermFreq; this.sumDocFreq = sumDocFreq; @@ -1226,9 +1227,9 @@ public override void Finish(long sumTotalTermFreq, long sumDocFreq, int docCount } else { - Debug.Assert(sumTotalTermFreq == 0 || fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY && sumTotalTermFreq == -1); - Debug.Assert(sumDocFreq == 0); - Debug.Assert(docCount == 0); + Debugging.Assert(() => sumTotalTermFreq == 0 || fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY && sumTotalTermFreq == -1); + Debugging.Assert(() => sumDocFreq == 0); + Debugging.Assert(() => docCount == 0); } } diff --git a/src/Lucene.Net/Codecs/CodecUtil.cs b/src/Lucene.Net/Codecs/CodecUtil.cs index 8985fae2af..648b032464 100644 --- a/src/Lucene.Net/Codecs/CodecUtil.cs +++ b/src/Lucene.Net/Codecs/CodecUtil.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Store; using Lucene.Net.Util; using System; @@ -267,7 +268,7 @@ public static long ChecksumEntireFile(IndexInput input) IndexInput clone = (IndexInput)input.Clone(); clone.Seek(0); ChecksumIndexInput @in = new BufferedChecksumIndexInput(clone); - Debug.Assert(@in.GetFilePointer() == 0); + Debugging.Assert(() => @in.GetFilePointer() == 0); @in.Seek(@in.Length - FooterLength()); return CheckFooter(@in); } diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs index d45adb3d1c..05fd3e8fb2 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System; using System.Diagnostics; @@ -105,7 +106,7 @@ private void Reset() private void WriteBlock() { - Debug.Assert(blockChunks > 0); + Debugging.Assert(() => blockChunks > 0); fieldsIndexOut.WriteVInt32(blockChunks); // The trick here is that we only store the difference from the average start @@ -143,7 +144,7 @@ private void WriteBlock() for (int i = 0; i < blockChunks; ++i) { long delta = docBase - avgChunkDocs * i; - Debug.Assert(PackedInt32s.BitsRequired(MoveSignToLowOrderBit(delta)) <= writer.BitsPerValue); + Debugging.Assert(() => PackedInt32s.BitsRequired(MoveSignToLowOrderBit(delta)) <= writer.BitsPerValue); writer.Add(MoveSignToLowOrderBit(delta)); docBase += docBaseDeltas[i]; } @@ -178,7 +179,7 @@ private void WriteBlock() { startPointer += startPointerDeltas[i]; long delta = startPointer - avgChunkSize * i; - Debug.Assert(PackedInt32s.BitsRequired(MoveSignToLowOrderBit(delta)) <= writer.BitsPerValue); + Debugging.Assert(() => PackedInt32s.BitsRequired(MoveSignToLowOrderBit(delta)) <= writer.BitsPerValue); writer.Add(MoveSignToLowOrderBit(delta)); } writer.Finish(); @@ -196,7 +197,7 @@ internal void WriteIndex(int numDocs, long startPointer) { firstStartPointer = maxStartPointer = startPointer; } - Debug.Assert(firstStartPointer > 0 && startPointer >= firstStartPointer); + Debugging.Assert(() => firstStartPointer > 0 && startPointer >= firstStartPointer); docBaseDeltas[blockChunks] = numDocs; startPointerDeltas[blockChunks] = startPointer - maxStartPointer; diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs index 130f2dc347..46eb4f4b28 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs @@ -1,4 +1,5 @@ using Lucene.Net.Codecs.Lucene40; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Diagnostics; @@ -100,7 +101,7 @@ public CompressingStoredFieldsReader(Directory d, SegmentInfo si, string segment indexStream = d.OpenChecksumInput(indexStreamFN, context); string codecNameIdx = formatName + CompressingStoredFieldsWriter.CODEC_SFX_IDX; version = CodecUtil.CheckHeader(indexStream, codecNameIdx, CompressingStoredFieldsWriter.VERSION_START, CompressingStoredFieldsWriter.VERSION_CURRENT); - Debug.Assert(CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); + Debugging.Assert(() => CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); indexReader = new CompressingStoredFieldsIndexReader(indexStream, si); long maxPointer = -1; @@ -139,7 +140,7 @@ public CompressingStoredFieldsReader(Directory d, SegmentInfo si, string segment { throw new CorruptIndexException("Version mismatch between stored fields index and data: " + version + " != " + fieldsVersion); } - Debug.Assert(CodecUtil.HeaderLength(codecNameDat) == fieldsStream.GetFilePointer()); + Debugging.Assert(() => CodecUtil.HeaderLength(codecNameDat) == fieldsStream.GetFilePointer()); if (version >= CompressingStoredFieldsWriter.VERSION_BIG_CHUNKS) { @@ -332,8 +333,8 @@ public override void VisitDocument(int docID, StoredFieldVisitor visitor) DataInput documentInput; if (version >= CompressingStoredFieldsWriter.VERSION_BIG_CHUNKS && totalLength >= 2 * chunkSize) { - Debug.Assert(chunkSize > 0); - Debug.Assert(offset < chunkSize); + Debugging.Assert(() => chunkSize > 0); + Debugging.Assert(() => offset < chunkSize); decompressor.Decompress(fieldsStream, chunkSize, offset, Math.Min(length, chunkSize - offset), bytes); documentInput = new DataInputAnonymousInnerClassHelper(this, offset, length); @@ -342,7 +343,7 @@ public override void VisitDocument(int docID, StoredFieldVisitor visitor) { BytesRef bytes = totalLength <= BUFFER_REUSE_THRESHOLD ? this.bytes : new BytesRef(); decompressor.Decompress(fieldsStream, totalLength, offset, length, bytes); - Debug.Assert(bytes.Length == length); + Debugging.Assert(() => bytes.Length == length); documentInput = new ByteArrayDataInput(bytes.Bytes, bytes.Offset, bytes.Length); } @@ -353,7 +354,7 @@ public override void VisitDocument(int docID, StoredFieldVisitor visitor) FieldInfo fieldInfo = fieldInfos.FieldInfo(fieldNumber); int bits = (int)(infoAndBits & CompressingStoredFieldsWriter.TYPE_MASK); - Debug.Assert(bits <= CompressingStoredFieldsWriter.NUMERIC_DOUBLE, "bits=" + bits.ToString("x")); + Debugging.Assert(() => bits <= CompressingStoredFieldsWriter.NUMERIC_DOUBLE, () => "bits=" + bits.ToString("x")); switch (visitor.NeedsField(fieldInfo)) { @@ -390,7 +391,7 @@ public DataInputAnonymousInnerClassHelper(CompressingStoredFieldsReader outerIns internal virtual void FillBuffer() { - Debug.Assert(decompressed <= length); + Debugging.Assert(() => decompressed <= length); if (decompressed == length) { throw new Exception(); @@ -488,7 +489,7 @@ internal int ChunkSize() /// internal void Next(int doc) { - Debug.Assert(doc >= this.docBase + this.chunkDocs, doc + " " + this.docBase + " " + this.chunkDocs); + Debugging.Assert(() => doc >= this.docBase + this.chunkDocs, () => doc + " " + this.docBase + " " + this.chunkDocs); fieldsStream.Seek(outerInstance.indexReader.GetStartPointer(doc)); int docBase = fieldsStream.ReadVInt32(); @@ -587,7 +588,7 @@ internal void Decompress() /// internal void CopyCompressedData(DataOutput @out) { - Debug.Assert(outerInstance.Version == CompressingStoredFieldsWriter.VERSION_CURRENT); + Debugging.Assert(() => outerInstance.Version == CompressingStoredFieldsWriter.VERSION_CURRENT); long chunkEnd = docBase + chunkDocs == outerInstance.numDocs ? outerInstance.maxPointer : outerInstance.indexReader.GetStartPointer(docBase + chunkDocs); @out.CopyBytes(fieldsStream, chunkEnd - fieldsStream.GetFilePointer()); } diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsWriter.cs b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsWriter.cs index 587df3540a..b9a390067b 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsWriter.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsWriter.cs @@ -1,5 +1,6 @@ using J2N; using Lucene.Net.Codecs.Lucene40; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index; using Lucene.Net.Store; @@ -90,7 +91,7 @@ public sealed class CompressingStoredFieldsWriter : StoredFieldsWriter /// Sole constructor. public CompressingStoredFieldsWriter(Directory directory, SegmentInfo si, string segmentSuffix, IOContext context, string formatName, CompressionMode compressionMode, int chunkSize) { - Debug.Assert(directory != null); + Debugging.Assert(() => directory != null); this.directory = directory; this.segment = si.Name; this.segmentSuffix = segmentSuffix; @@ -113,8 +114,8 @@ public CompressingStoredFieldsWriter(Directory directory, SegmentInfo si, string string codecNameDat = formatName + CODEC_SFX_DAT; CodecUtil.WriteHeader(indexStream, codecNameIdx, VERSION_CURRENT); CodecUtil.WriteHeader(fieldsStream, codecNameDat, VERSION_CURRENT); - Debug.Assert(CodecUtil.HeaderLength(codecNameDat) == fieldsStream.GetFilePointer()); - Debug.Assert(CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); + Debugging.Assert(() => CodecUtil.HeaderLength(codecNameDat) == fieldsStream.GetFilePointer()); + Debugging.Assert(() => CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); indexWriter = new CompressingStoredFieldsIndexWriter(indexStream); indexStream = null; @@ -177,7 +178,7 @@ public override void FinishDocument() /// private static void SaveInt32s(int[] values, int length, DataOutput @out) { - Debug.Assert(length > 0); + Debugging.Assert(() => length > 0); if (length == 1) { @out.WriteVInt32(values[0]); @@ -245,7 +246,7 @@ private void Flush() for (int i = numBufferedDocs - 1; i > 0; --i) { lengths[i] = endOffsets[i] - endOffsets[i - 1]; - Debug.Assert(lengths[i] >= 0); + Debugging.Assert(() => lengths[i] >= 0); } WriteHeader(docBase, numBufferedDocs, numStoredFields, lengths); @@ -373,7 +374,7 @@ public override void Finish(FieldInfos fis, int numDocs) } else { - Debug.Assert(bufferedDocs.Length == 0); + Debugging.Assert(() => bufferedDocs.Length == 0); } if (docBase != numDocs) { @@ -381,7 +382,7 @@ public override void Finish(FieldInfos fis, int numDocs) } indexWriter.Finish(numDocs, fieldsStream.GetFilePointer()); CodecUtil.WriteFooter(fieldsStream); - Debug.Assert(bufferedDocs.Length == 0); + Debugging.Assert(() => bufferedDocs.Length == 0); } [MethodImpl(MethodImplOptions.NoInlining)] @@ -442,7 +443,7 @@ public override int Merge(MergeState mergeState) if (numBufferedDocs == 0 && startOffsets[it.chunkDocs - 1] < chunkSize && startOffsets[it.chunkDocs - 1] + it.lengths[it.chunkDocs - 1] >= chunkSize && NextDeletedDoc(it.docBase, liveDocs, it.docBase + it.chunkDocs) == it.docBase + it.chunkDocs) // no deletion in the chunk - chunk is large enough - chunk is small enough - starting a new chunk { - Debug.Assert(docID == it.docBase); + Debugging.Assert(() => docID == it.docBase); // no need to decompress, just copy data indexWriter.WriteIndex(it.chunkDocs, fieldsStream.GetFilePointer()); diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs index 1032ebe1a6..c8e4a25ce0 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; @@ -77,7 +78,7 @@ public CompressingTermVectorsReader(Directory d, SegmentInfo si, string segmentS indexStream = d.OpenChecksumInput(indexStreamFN, context); string codecNameIdx = formatName + CompressingTermVectorsWriter.CODEC_SFX_IDX; version = CodecUtil.CheckHeader(indexStream, codecNameIdx, CompressingTermVectorsWriter.VERSION_START, CompressingTermVectorsWriter.VERSION_CURRENT); - Debug.Assert(CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); + Debugging.Assert(() => CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); indexReader = new CompressingStoredFieldsIndexReader(indexStream, si); if (version >= CompressingTermVectorsWriter.VERSION_CHECKSUM) @@ -103,7 +104,7 @@ public CompressingTermVectorsReader(Directory d, SegmentInfo si, string segmentS { throw new Exception("Version mismatch between stored fields index and data: " + version + " != " + version2); } - Debug.Assert(CodecUtil.HeaderLength(codecNameDat) == vectorsStream.GetFilePointer()); + Debugging.Assert(() => CodecUtil.HeaderLength(codecNameDat) == vectorsStream.GetFilePointer()); packedIntsVersion = vectorsStream.ReadVInt32(); chunkSize = vectorsStream.ReadVInt32(); @@ -215,7 +216,7 @@ public override Fields Get(int doc) int[] fieldNums; { int token = vectorsStream.ReadByte() & 0xFF; - Debug.Assert(token != 0); // means no term vectors, cannot happen since we checked for numFields == 0 + Debugging.Assert(() => token != 0); // means no term vectors, cannot happen since we checked for numFields == 0 int bitsPerFieldNum = token & 0x1F; int totalDistinctFields = (int)((uint)token >> 5); if (totalDistinctFields == 0x07) @@ -245,7 +246,7 @@ public override Fields Get(int doc) for (int i = 0; i < totalFields; ++i) { int fieldNumOff = (int)allFieldNumOffs.Get(i); - Debug.Assert(fieldNumOff >= 0 && fieldNumOff < fieldNums.Length); + Debugging.Assert(() => fieldNumOff >= 0 && fieldNumOff < fieldNums.Length); int fgs = (int)fieldFlags.Get(fieldNumOff); f.Set(i, fgs); } @@ -382,7 +383,7 @@ public override Fields Get(int doc) totalPayloads += freq; } } - Debug.Assert(i != totalFields - 1 || termIndex == totalTerms, termIndex + " " + totalTerms); + Debugging.Assert(() => i != totalFields - 1 || termIndex == totalTerms, () => termIndex + " " + totalTerms); } int[][] positionIndex = PositionIndex(skip, numFields, numTerms, termFreqs); @@ -515,7 +516,7 @@ public override Fields Get(int doc) ++posIdx; } } - Debug.Assert(posIdx == totalFreq); + Debugging.Assert(() => posIdx == totalFreq); } termIndex += termCount; } @@ -537,7 +538,7 @@ public override Fields Get(int doc) } termIndex += termCount; } - Debug.Assert(termIndex == totalTerms, termIndex + " " + totalTerms); + Debugging.Assert(() => termIndex == totalTerms, () => termIndex + " " + totalTerms); } // decompress data @@ -576,7 +577,7 @@ public override Fields Get(int doc) } } - Debug.Assert(Sum(fieldLengths) == docLen, Sum(fieldLengths) + " != " + docLen); + Debugging.Assert(() => Sum(fieldLengths) == docLen, () => Sum(fieldLengths) + " != " + docLen); return new TVFields(this, fieldNums, FieldFlags, fieldNumOffs, fieldNumTerms, fieldLengths, prefixLengths, suffixLengths, fieldTermFreqs, positionIndex, positions, startOffsets, lengths, payloadBytes, payloadIndex, suffixBytes); } @@ -731,7 +732,7 @@ public override Terms GetTerms(string field) break; } } - Debug.Assert(fieldLen >= 0); + Debugging.Assert(() => fieldLen >= 0); return new TVTerms(outerInstance, numTerms[idx], fieldFlags[idx], prefixLengths[idx], suffixLengths[idx], termFreqs[idx], positionIndex[idx], positions[idx], startOffsets[idx], lengths[idx], payloadIndex[idx], payloadBytes, new BytesRef(suffixBytes.Bytes, suffixBytes.Offset + fieldOff, fieldLen)); } @@ -842,7 +843,7 @@ public override BytesRef Next() } else { - Debug.Assert(ord < numTerms); + Debugging.Assert(() => ord < numTerms); ++ord; } diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs index 9252c16d35..4c2d15a71f 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; @@ -244,7 +245,7 @@ internal virtual void AddPosition(int position, int startOffset, int length, int /// Sole constructor. public CompressingTermVectorsWriter(Directory directory, SegmentInfo si, string segmentSuffix, IOContext context, string formatName, CompressionMode compressionMode, int chunkSize) { - Debug.Assert(directory != null); + Debugging.Assert(() => directory != null); this.directory = directory; this.segment = si.Name; this.segmentSuffix = segmentSuffix; @@ -268,8 +269,8 @@ public CompressingTermVectorsWriter(Directory directory, SegmentInfo si, string string codecNameDat = formatName + CODEC_SFX_DAT; CodecUtil.WriteHeader(indexStream, codecNameIdx, VERSION_CURRENT); CodecUtil.WriteHeader(vectorsStream, codecNameDat, VERSION_CURRENT); - Debug.Assert(CodecUtil.HeaderLength(codecNameDat) == vectorsStream.GetFilePointer()); - Debug.Assert(CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); + Debugging.Assert(() => CodecUtil.HeaderLength(codecNameDat) == vectorsStream.GetFilePointer()); + Debugging.Assert(() => CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); indexWriter = new CompressingStoredFieldsIndexWriter(indexStream); indexStream = null; @@ -350,7 +351,7 @@ public override void FinishField() public override void StartTerm(BytesRef term, int freq) { - Debug.Assert(freq >= 1); + Debugging.Assert(() => freq >= 1); int prefix = StringHelper.BytesDifference(lastTerm, term); curField.AddTerm(freq, prefix, term.Length - prefix); termSuffixes.WriteBytes(term.Bytes, term.Offset + prefix, term.Length - prefix); @@ -366,7 +367,7 @@ public override void StartTerm(BytesRef term, int freq) public override void AddPosition(int position, int startOffset, int endOffset, BytesRef payload) { - Debug.Assert(curField.flags != 0); + Debugging.Assert(() => curField.flags != 0); curField.AddPosition(position, startOffset, endOffset - startOffset, payload == null ? 0 : payload.Length); if (curField.hasPayloads && payload != null) { @@ -383,7 +384,7 @@ private bool TriggerFlush() private void Flush() { int chunkDocs = pendingDocs.Count; - Debug.Assert(chunkDocs > 0, chunkDocs.ToString()); + Debugging.Assert(() => chunkDocs > 0, chunkDocs.ToString); // write the index file indexWriter.WriteIndex(chunkDocs, vectorsStream.GetFilePointer()); @@ -463,7 +464,7 @@ private int[] FlushFieldNums() } int numDistinctFields = fieldNums.Count; - Debug.Assert(numDistinctFields > 0); + Debugging.Assert(() => numDistinctFields > 0); int bitsRequired = PackedInt32s.BitsRequired(fieldNums.Max); int token = (Math.Min(numDistinctFields - 1, 0x07) << 5) | bitsRequired; vectorsStream.WriteByte((byte)(sbyte)token); @@ -495,7 +496,7 @@ private void FlushFields(int totalFields, int[] fieldNums) foreach (FieldData fd in dd.fields) { int fieldNumIndex = Array.BinarySearch(fieldNums, fd.fieldNum); - Debug.Assert(fieldNumIndex >= 0); + Debugging.Assert(() => fieldNumIndex >= 0); writer.Add(fieldNumIndex); } } @@ -515,7 +516,7 @@ private void FlushFlags(int totalFields, int[] fieldNums) foreach (FieldData fd in dd.fields) { int fieldNumOff = Array.BinarySearch(fieldNums, fd.fieldNum); - Debug.Assert(fieldNumOff >= 0); + Debugging.Assert(() => fieldNumOff >= 0); if (fieldFlags[fieldNumOff] == -1) { fieldFlags[fieldNumOff] = fd.flags; @@ -537,10 +538,10 @@ private void FlushFlags(int totalFields, int[] fieldNums) PackedInt32s.Writer writer = PackedInt32s.GetWriterNoHeader(vectorsStream, PackedInt32s.Format.PACKED, fieldFlags.Length, FLAGS_BITS, 1); foreach (int flags in fieldFlags) { - Debug.Assert(flags >= 0); + Debugging.Assert(() => flags >= 0); writer.Add(flags); } - Debug.Assert(writer.Ord == fieldFlags.Length - 1); + Debugging.Assert(() => writer.Ord == fieldFlags.Length - 1); writer.Finish(); } else @@ -555,7 +556,7 @@ private void FlushFlags(int totalFields, int[] fieldNums) writer.Add(fd.flags); } } - Debug.Assert(writer.Ord == totalFields - 1); + Debugging.Assert(() => writer.Ord == totalFields - 1); writer.Finish(); } } @@ -580,7 +581,7 @@ private void FlushNumTerms(int totalFields) writer.Add(fd.numTerms); } } - Debug.Assert(writer.Ord == totalFields - 1); + Debugging.Assert(() => writer.Ord == totalFields - 1); writer.Finish(); } @@ -648,7 +649,7 @@ private void FlushPositions() previousPosition = position; } } - Debug.Assert(pos == fd.totalPositions); + Debugging.Assert(() => pos == fd.totalPositions); } } } @@ -684,7 +685,7 @@ private void FlushOffsets(int[] fieldNums) ++pos; } } - Debug.Assert(pos == fd.totalPositions); + Debugging.Assert(() => pos == fd.totalPositions); } } } @@ -752,7 +753,7 @@ private void FlushOffsets(int[] fieldNums) writer.Add(lengthsBuf[fd.offStart + pos++] - fd.prefixLengths[i] - fd.suffixLengths[i]); } } - Debug.Assert(pos == fd.totalPositions); + Debugging.Assert(() => pos == fd.totalPositions); } } } @@ -796,8 +797,8 @@ public override void Finish(FieldInfos fis, int numDocs) public override void AddProx(int numProx, DataInput positions, DataInput offsets) { - Debug.Assert((curField.hasPositions) == (positions != null)); - Debug.Assert((curField.hasOffsets) == (offsets != null)); + Debugging.Assert(() => (curField.hasPositions) == (positions != null)); + Debugging.Assert(() => (curField.hasOffsets) == (offsets != null)); if (curField.hasPositions) { @@ -919,7 +920,7 @@ public override int Merge(MergeState mergeState) { int docBase = vectorsStream.ReadVInt32(); int chunkDocs = vectorsStream.ReadVInt32(); - Debug.Assert(docBase + chunkDocs <= matchingSegmentReader.MaxDoc); + Debugging.Assert(() => docBase + chunkDocs <= matchingSegmentReader.MaxDoc); if (docBase + chunkDocs < matchingSegmentReader.MaxDoc && NextDeletedDoc(docBase, liveDocs, docBase + chunkDocs) == docBase + chunkDocs) { long chunkEnd = index.GetStartPointer(docBase + chunkDocs); diff --git a/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs b/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs index 976c72909a..5ca2019787 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System.Diagnostics; using System.IO; using System.IO.Compression; @@ -151,7 +152,7 @@ public DecompressorAnonymousInnerClassHelper() public override void Decompress(DataInput @in, int originalLength, int offset, int length, BytesRef bytes) { - Debug.Assert(offset + length <= originalLength); + Debugging.Assert(() => offset + length <= originalLength); // add 7 padding bytes, this is not necessary but can help decompression run faster if (bytes.Bytes.Length < originalLength + 7) { @@ -211,7 +212,7 @@ internal DeflateDecompressor() public override void Decompress(DataInput input, int originalLength, int offset, int length, BytesRef bytes) { - Debug.Assert(offset + length <= originalLength); + Debugging.Assert(() => offset + length <= originalLength); if (length == 0) { bytes.Length = 0; @@ -272,7 +273,7 @@ public override void Compress(byte[] bytes, int off, int len, DataOutput output) if (resultArray.Length == 0) { - Debug.Assert(len == 0, len.ToString()); + Debugging.Assert(() => len == 0, len.ToString); output.WriteVInt32(0); return; } diff --git a/src/Lucene.Net/Codecs/Compressing/LZ4.cs b/src/Lucene.Net/Codecs/Compressing/LZ4.cs index 878ecdc5a8..cf1c493536 100644 --- a/src/Lucene.Net/Codecs/Compressing/LZ4.cs +++ b/src/Lucene.Net/Codecs/Compressing/LZ4.cs @@ -1,4 +1,5 @@ using J2N.Numerics; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Diagnostics; @@ -75,7 +76,7 @@ private static bool ReadInt32Equals(byte[] buf, int i, int j) private static int CommonBytes(byte[] b, int o1, int o2, int limit) { - Debug.Assert(o1 < o2); + Debugging.Assert(() => o1 < o2); int count = 0; while (o2 < limit && b[o1++] == b[o2++]) { @@ -134,7 +135,7 @@ public static int Decompress(DataInput compressed, int decompressedLen, byte[] d var byte1 = compressed.ReadByte(); var byte2 = compressed.ReadByte(); int matchDec = (byte1 & 0xFF) | ((byte2 & 0xFF) << 8); - Debug.Assert(matchDec > 0); + Debugging.Assert(() => matchDec > 0); int matchLen = token & 0x0F; if (matchLen == 0x0F) @@ -202,14 +203,14 @@ private static void EncodeLastLiterals(byte[] bytes, int anchor, int literalLen, private static void EncodeSequence(byte[] bytes, int anchor, int matchRef, int matchOff, int matchLen, DataOutput @out) { int literalLen = matchOff - anchor; - Debug.Assert(matchLen >= 4); + Debugging.Assert(() => matchLen >= 4); // encode token int token = (Math.Min(literalLen, 0x0F) << 4) | Math.Min(matchLen - 4, 0x0F); EncodeLiterals(bytes, token, anchor, literalLen, @out); // encode match dec int matchDec = matchOff - matchRef; - Debug.Assert(matchDec > 0 && matchDec < 1 << 16); + Debugging.Assert(() => matchDec > 0 && matchDec < 1 << 16); @out.WriteByte((byte)(sbyte)matchDec); @out.WriteByte((byte)(sbyte)((int)((uint)matchDec >> 8))); @@ -274,7 +275,7 @@ public static void Compress(byte[] bytes, int off, int len, DataOutput @out, Has int v = ReadInt32(bytes, off); int h = Hash(v, hashLog); @ref = @base + (int)hashTable.Get(h); - Debug.Assert(PackedInt32s.BitsRequired(off - @base) <= hashTable.BitsPerValue); + Debugging.Assert(() => PackedInt32s.BitsRequired(off - @base) <= hashTable.BitsPerValue); hashTable.Set(h, off - @base); if (off - @ref < MAX_DISTANCE && ReadInt32(bytes, @ref) == v) { @@ -296,7 +297,7 @@ public static void Compress(byte[] bytes, int off, int len, DataOutput @out, Has // last literals int literalLen = end - anchor; - Debug.Assert(literalLen >= LAST_LITERALS || literalLen == len); + Debugging.Assert(() => literalLen >= LAST_LITERALS || literalLen == len); EncodeLastLiterals(bytes, anchor, end - anchor, @out); } @@ -364,7 +365,7 @@ private void AddHash(byte[] bytes, int off) int v = ReadInt32(bytes, off); int h = HashHC(v); int delta = off - hashTable[h]; - Debug.Assert(delta > 0, delta.ToString()); + Debugging.Assert(() => delta > 0, delta.ToString); if (delta >= MAX_DISTANCE) { delta = MAX_DISTANCE - 1; @@ -512,7 +513,7 @@ public static void CompressHC(byte[] src, int srcOff, int srcLen, DataOutput @ou while (true) { - Debug.Assert(match1.start >= anchor); + Debugging.Assert(() => match1.start >= anchor); if (match1.End() >= mfLimit || !ht.InsertAndFindWiderMatch(src, match1.End() - 2, match1.start + 1, matchLimit, match1.len, match2)) { // no better match @@ -528,7 +529,7 @@ public static void CompressHC(byte[] src, int srcOff, int srcLen, DataOutput @ou CopyTo(match0, match1); } } - Debug.Assert(match2.start > match1.start); + Debugging.Assert(() => match2.start > match1.start); if (match2.start - match1.start < 3) // First Match too small : removed { diff --git a/src/Lucene.Net/Codecs/DocValuesConsumer.cs b/src/Lucene.Net/Codecs/DocValuesConsumer.cs index 8282d03b1a..8cbd4a53e9 100644 --- a/src/Lucene.Net/Codecs/DocValuesConsumer.cs +++ b/src/Lucene.Net/Codecs/DocValuesConsumer.cs @@ -1,4 +1,5 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.Diagnostics; @@ -485,7 +486,7 @@ private IEnumerable GetMergeSortedSetValuesEnumerable(OrdinalMap map, if (currentLiveDocs == null || currentLiveDocs.Get(docIDUpto)) { - Debug.Assert(docIDUpto < currentReader.MaxDoc); + Debugging.Assert(() => docIDUpto < currentReader.MaxDoc); SortedSetDocValues dv = dvs[readerUpto]; dv.SetDocument(docIDUpto); ordUpto = ordLength = 0; @@ -515,7 +516,7 @@ internal class BitsFilteredTermsEnum : FilteredTermsEnum internal BitsFilteredTermsEnum(TermsEnum @in, Int64BitSet liveTerms) : base(@in, false) { - Debug.Assert(liveTerms != null); + Debugging.Assert(() => liveTerms != null); this.liveTerms = liveTerms; } diff --git a/src/Lucene.Net/Codecs/FieldsConsumer.cs b/src/Lucene.Net/Codecs/FieldsConsumer.cs index 1f906c792a..5c791189ab 100644 --- a/src/Lucene.Net/Codecs/FieldsConsumer.cs +++ b/src/Lucene.Net/Codecs/FieldsConsumer.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System; using System.Diagnostics; using System.Runtime.CompilerServices; @@ -86,7 +87,7 @@ public virtual void Merge(MergeState mergeState, Fields fields) foreach (string field in fields) { FieldInfo info = mergeState.FieldInfos.FieldInfo(field); - Debug.Assert(info != null, "FieldInfo for field is null: " + field); + Debugging.Assert(() => info != null, () => "FieldInfo for field is null: " + field); Terms terms = fields.GetTerms(field); if (terms != null) { diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xFields.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xFields.cs index 27b637c275..94db36b0b8 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xFields.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xFields.cs @@ -1,4 +1,5 @@ using J2N.Text; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using System; using System.Collections.Generic; @@ -163,7 +164,7 @@ public override int Count { get { - Debug.Assert(preTerms.Count == fields.Count); + Debugging.Assert(() => preTerms.Count == fields.Count); return fields.Count; } } @@ -248,7 +249,7 @@ public override bool HasOffsets get { // preflex doesn't support this - Debug.Assert(fieldInfo.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) < 0); + Debugging.Assert(() => fieldInfo.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) < 0); return false; } } @@ -303,11 +304,11 @@ private bool SeekToNonBMP(SegmentTermEnum te, BytesRef term, int pos) { int savLength = term.Length; - Debug.Assert(term.Offset == 0); + Debugging.Assert(() => term.Offset == 0); // The 3 bytes starting at downTo make up 1 // unicode character: - Debug.Assert(IsHighBMPChar(term.Bytes, pos)); + Debugging.Assert(() => IsHighBMPChar(term.Bytes, pos)); // NOTE: we cannot make this assert, because // AutomatonQuery legitimately sends us malformed UTF8 @@ -360,7 +361,7 @@ private bool SeekToNonBMP(SegmentTermEnum te, BytesRef term, int pos) // Now test if prefix is identical and we found // a non-BMP char at the same position: BytesRef b2 = t2.Bytes; - Debug.Assert(b2.Offset == 0); + Debugging.Assert(() => b2.Offset == 0); bool matches; if (b2.Length >= term.Length && IsNonBMPChar(b2.Bytes, pos)) @@ -464,8 +465,8 @@ private bool DoPop() Console.WriteLine(" try pop"); } - Debug.Assert(newSuffixStart <= prevTerm.Length); - Debug.Assert(newSuffixStart < scratchTerm.Length || newSuffixStart == 0); + Debugging.Assert(() => newSuffixStart <= prevTerm.Length); + Debugging.Assert(() => newSuffixStart < scratchTerm.Length || newSuffixStart == 0); if (prevTerm.Length > newSuffixStart && IsNonBMPChar(prevTerm.Bytes, newSuffixStart) && IsHighBMPChar(scratchTerm.Bytes, newSuffixStart)) { @@ -494,7 +495,7 @@ private bool DoPop() } BytesRef b2 = t2.Bytes; - Debug.Assert(b2.Offset == 0); + Debugging.Assert(() => b2.Offset == 0); // Set newSuffixStart -- we can't use // termEnum's since the above seek may have @@ -595,8 +596,8 @@ private void SurrogateDance() // this code assumes TermInfosReader/SegmentTermEnum // always use BytesRef.offset == 0 - Debug.Assert(prevTerm.Offset == 0); - Debug.Assert(scratchTerm.Offset == 0); + Debugging.Assert(() => prevTerm.Offset == 0); + Debugging.Assert(() => scratchTerm.Offset == 0); // Need to loop here because we may need to do multiple // pops, and possibly a continue in the end, ie: @@ -649,7 +650,7 @@ private void DoPushes() if (IsNonBMPChar(scratchTerm.Bytes, upTo) && (upTo > newSuffixStart || (upTo >= prevTerm.Length || (!IsNonBMPChar(prevTerm.Bytes, upTo) && !IsHighBMPChar(prevTerm.Bytes, upTo))))) { // A non-BMP char (4 bytes UTF8) starts here: - Debug.Assert(scratchTerm.Length >= upTo + 4); + Debugging.Assert(() => scratchTerm.Length >= upTo + 4); int savLength = scratchTerm.Length; scratch[0] = (sbyte)scratchTerm.Bytes[upTo]; @@ -697,7 +698,7 @@ private void DoPushes() if (t2 != null && t2.Field == internedFieldName) { BytesRef b2 = t2.Bytes; - Debug.Assert(b2.Offset == 0); + Debugging.Assert(() => b2.Offset == 0); if (b2.Length >= upTo + 3 && IsHighBMPChar(b2.Bytes, upTo)) { matches = true; @@ -822,7 +823,7 @@ public override SeekStatus SeekCeil(BytesRef term) TermInfosReader tis = outerInstance.TermsDict; Term t0 = new Term(fieldInfo.Name, term); - Debug.Assert(termEnum != null); + Debugging.Assert(() => termEnum != null); tis.SeekEnum(termEnum, t0, false); @@ -853,7 +854,7 @@ public override SeekStatus SeekCeil(BytesRef term) // find an E, try swapping in S, backwards: scratchTerm.CopyBytes(term); - Debug.Assert(scratchTerm.Offset == 0); + Debugging.Assert(() => scratchTerm.Offset == 0); for (int i = scratchTerm.Length - 1; i >= 0; i--) { @@ -902,7 +903,7 @@ public override SeekStatus SeekCeil(BytesRef term) } BytesRef br = t.Bytes; - Debug.Assert(br.Offset == 0); + Debugging.Assert(() => br.Offset == 0); SetNewSuffixStart(term, br); @@ -912,14 +913,14 @@ public override SeekStatus SeekCeil(BytesRef term) if (t2 == null || t2.Field != internedFieldName) { // PreFlex codec interns field names; verify: - Debug.Assert(t2 == null || !t2.Field.Equals(internedFieldName, StringComparison.Ordinal)); + Debugging.Assert(() => t2 == null || !t2.Field.Equals(internedFieldName, StringComparison.Ordinal)); current = null; return SeekStatus.END; } else { current = t2.Bytes; - Debug.Assert(!unicodeSortOrder || term.CompareTo(current) < 0, "term=" + UnicodeUtil.ToHexString(term.Utf8ToString()) + " vs current=" + UnicodeUtil.ToHexString(current.Utf8ToString())); + Debugging.Assert(() => !unicodeSortOrder || term.CompareTo(current) < 0, () => "term=" + UnicodeUtil.ToHexString(term.Utf8ToString()) + " vs current=" + UnicodeUtil.ToHexString(current.Utf8ToString())); return SeekStatus.NOT_FOUND; } } @@ -995,7 +996,7 @@ public override BytesRef Next() if (t == null || t.Field != internedFieldName) { // PreFlex codec interns field names; verify: - Debug.Assert(t == null || !t.Field.Equals(internedFieldName, StringComparison.Ordinal)); + Debugging.Assert(() => t == null || !t.Field.Equals(internedFieldName, StringComparison.Ordinal)); current = null; } else @@ -1020,7 +1021,7 @@ public override BytesRef Next() if (t == null || t.Field != internedFieldName) { // PreFlex codec interns field names; verify: - Debug.Assert(t == null || !t.Field.Equals(internedFieldName, StringComparison.Ordinal)); + Debugging.Assert(() => t == null || !t.Field.Equals(internedFieldName, StringComparison.Ordinal)); return null; } else @@ -1189,7 +1190,7 @@ public override int Advance(int target) public override int NextPosition() { - Debug.Assert(docID != NO_MORE_DOCS); + Debugging.Assert(() => docID != NO_MORE_DOCS); return pos.NextPosition(); } diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xNormsProducer.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xNormsProducer.cs index c33416f84e..5e2c5d09de 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xNormsProducer.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xNormsProducer.cs @@ -1,5 +1,6 @@ using J2N.Threading.Atomic; using J2N.Runtime.CompilerServices; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.Diagnostics; @@ -135,7 +136,7 @@ public Lucene3xNormsProducer(Directory dir, SegmentInfo info, FieldInfos fields, } } // TODO: change to a real check? see LUCENE-3619 - Debug.Assert(singleNormStream == null || nextNormSeek == singleNormStream.Length, singleNormStream != null ? "len: " + singleNormStream.Length + " expected: " + nextNormSeek : "null"); + Debugging.Assert(() => singleNormStream == null || nextNormSeek == singleNormStream.Length, () => singleNormStream != null ? "len: " + singleNormStream.Length + " expected: " + nextNormSeek : "null"); success = true; } finally @@ -187,7 +188,7 @@ private static bool HasSeparateNorms(SegmentInfo info, int number) } else { - Debug.Assert(Convert.ToInt64(v, CultureInfo.InvariantCulture) != SegmentInfo.NO); + Debugging.Assert(() => Convert.ToInt64(v, CultureInfo.InvariantCulture) != SegmentInfo.NO); return true; } } @@ -257,7 +258,7 @@ public override long Get(int docID) public override NumericDocValues GetNumeric(FieldInfo field) { var dv = norms[field.Name]; - Debug.Assert(dv != null); + Debugging.Assert(() => dv != null); return dv.Instance; } diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSegmentInfoReader.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSegmentInfoReader.cs index c5bf9acc6f..9b8ee1528a 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSegmentInfoReader.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSegmentInfoReader.cs @@ -1,4 +1,5 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.Diagnostics; @@ -192,7 +193,7 @@ private SegmentCommitInfo ReadLegacySegmentInfo(Directory dir, int format, Index //System.out.println("version=" + version + " name=" + name + " docCount=" + docCount + " delGen=" + delGen + " dso=" + docStoreOffset + " dss=" + docStoreSegment + " dssCFs=" + docStoreIsCompoundFile + " b=" + b + " format=" + format); - Debug.Assert(1 == b, "expected 1 but was: " + b + " format: " + format); + Debugging.Assert(() => 1 == b, () => "expected 1 but was: " + b + " format: " + format); int numNormGen = input.ReadInt32(); IDictionary normGen; if (numNormGen == SegmentInfo.NO) @@ -210,7 +211,7 @@ private SegmentCommitInfo ReadLegacySegmentInfo(Directory dir, int format, Index bool isCompoundFile = input.ReadByte() == SegmentInfo.YES; int delCount = input.ReadInt32(); - Debug.Assert(delCount <= docCount); + Debugging.Assert(() => delCount <= docCount); bool hasProx = input.ReadByte() == 1; @@ -282,7 +283,7 @@ private SegmentCommitInfo ReadLegacySegmentInfo(Directory dir, int format, Index else { // We should have already hit indexformat too old exception - Debug.Assert(false); + Debugging.Assert(() => false); } } } diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xStoredFieldsReader.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xStoredFieldsReader.cs index cde76433d2..a9eb3e5fbe 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xStoredFieldsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xStoredFieldsReader.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System; using System.Diagnostics; using System.IO; @@ -192,7 +193,7 @@ public Lucene3xStoredFieldsReader(Directory d, SegmentInfo si, FieldInfos fn, IO // Verify the file is long enough to hold all of our // docs - Debug.Assert(((int)(indexSize / 8)) >= size + this.docStoreOffset, "indexSize=" + indexSize + " size=" + size + " docStoreOffset=" + docStoreOffset); + Debugging.Assert(() => ((int)(indexSize / 8)) >= size + this.docStoreOffset, () => "indexSize=" + indexSize + " size=" + size + " docStoreOffset=" + docStoreOffset); } else { @@ -272,7 +273,7 @@ public override sealed void VisitDocument(int n, StoredFieldVisitor visitor) FieldInfo fieldInfo = fieldInfos.FieldInfo(fieldNumber); int bits = fieldsStream.ReadByte() & 0xFF; - Debug.Assert(bits <= (FIELD_IS_NUMERIC_MASK | FIELD_IS_BINARY), "bits=" + bits.ToString("x")); + Debugging.Assert(() => bits <= (FIELD_IS_NUMERIC_MASK | FIELD_IS_BINARY), () => "bits=" + bits.ToString("x")); switch (visitor.NeedsField(fieldInfo)) { diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs index 19c9529876..7f2f5a65aa 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using System; using System.Collections; @@ -139,8 +140,8 @@ public Lucene3xTermVectorsReader(Directory d, SegmentInfo si, FieldInfos fieldIn tvf = d.OpenInput(fn, context); int tvfFormat = CheckValidFormat(tvf); - Debug.Assert(format == tvdFormat); - Debug.Assert(format == tvfFormat); + Debugging.Assert(() => format == tvdFormat); + Debugging.Assert(() => format == tvfFormat); numTotalDocs = (int)(tvx.Length >> 4); @@ -148,7 +149,7 @@ public Lucene3xTermVectorsReader(Directory d, SegmentInfo si, FieldInfos fieldIn { this.docStoreOffset = 0; this.size = numTotalDocs; - Debug.Assert(size == 0 || numTotalDocs == size); + Debugging.Assert(() => size == 0 || numTotalDocs == size); } else { @@ -156,7 +157,7 @@ public Lucene3xTermVectorsReader(Directory d, SegmentInfo si, FieldInfos fieldIn this.size = size; // Verify the file is long enough to hold all of our // docs - Debug.Assert(numTotalDocs >= size + docStoreOffset, "numTotalDocs=" + numTotalDocs + " size=" + size + " docStoreOffset=" + docStoreOffset); + Debugging.Assert(() => numTotalDocs >= size + docStoreOffset, () => "numTotalDocs=" + numTotalDocs + " size=" + size + " docStoreOffset=" + docStoreOffset); } this.fieldInfos = fieldInfos; @@ -232,7 +233,7 @@ public TVFields(Lucene3xTermVectorsReader outerInstance, int docID) outerInstance.tvd.Seek(outerInstance.tvx.ReadInt64()); int fieldCount = outerInstance.tvd.ReadVInt32(); - Debug.Assert(fieldCount >= 0); + Debugging.Assert(() => fieldCount >= 0); if (fieldCount != 0) { fieldNumbers = new int[fieldCount]; @@ -685,7 +686,7 @@ public override int Freq } else { - Debug.Assert(startOffsets != null); + Debugging.Assert(() => startOffsets != null); return startOffsets.Length; } } @@ -736,7 +737,7 @@ public override BytesRef GetPayload() public override int NextPosition() { - //Debug.Assert((positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length); + //Debugging.Assert((positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length); // LUCENENET: The above assertion was for control flow when testing. In Java, it would throw an AssertionError, which is // caught by the BaseTermVectorsFormatTestCase.assertEquals(RandomTokenStream tk, FieldType ft, Terms terms) method in the diff --git a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermDocs.cs b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermDocs.cs index ba215e5133..691e1d9145 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermDocs.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermDocs.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System; using System.Diagnostics; using IBits = Lucene.Net.Util.IBits; @@ -170,7 +171,7 @@ public virtual bool Next() else { freq = m_freqStream.ReadVInt32(); // else read freq - Debug.Assert(freq != 1); + Debugging.Assert(() => freq != 1); } } diff --git a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermEnum.cs b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermEnum.cs index 1527672d2f..af78aa04f3 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermEnum.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermEnum.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System; using System.Diagnostics; using FieldInfos = Lucene.Net.Index.FieldInfos; @@ -104,8 +105,8 @@ public SegmentTermEnum(IndexInput i, FieldInfos fis, bool isi) indexInterval = input.ReadInt32(); skipInterval = input.ReadInt32(); maxSkipLevels = input.ReadInt32(); - Debug.Assert(indexInterval > 0, "indexInterval=" + indexInterval + " is negative; must be > 0"); - Debug.Assert(skipInterval > 0, "skipInterval=" + skipInterval + " is negative; must be > 0"); + Debugging.Assert(() => indexInterval > 0, () => "indexInterval=" + indexInterval + " is negative; must be > 0"); + Debugging.Assert(() => skipInterval > 0, () => "skipInterval=" + skipInterval + " is negative; must be > 0"); } } diff --git a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermPositions.cs b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermPositions.cs index 550a13fe06..87d74684f5 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermPositions.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermPositions.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using System; using System.Diagnostics; @@ -168,7 +169,7 @@ protected internal override void SkipProx(long proxPointer, int payloadLength) private void SkipPositions(int n) { - Debug.Assert(m_indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); + Debugging.Assert(() => m_indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); for (int f = n; f > 0; f--) // skip unread positions { ReadDeltaPosition(); diff --git a/src/Lucene.Net/Codecs/Lucene3x/TermBuffer.cs b/src/Lucene.Net/Codecs/Lucene3x/TermBuffer.cs index 8a6314a2e8..045c149608 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/TermBuffer.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/TermBuffer.cs @@ -1,4 +1,5 @@ using J2N.Text; +using Lucene.Net.Diagnostics; using Lucene.Net.Util; using System; using System.Collections.Generic; @@ -69,7 +70,7 @@ public void Read(IndexInput input, FieldInfos fieldInfos) newSuffixStart = input.ReadVInt32(); int length = input.ReadVInt32(); int totalLength = newSuffixStart + length; - Debug.Assert(totalLength <= ByteBlockPool.BYTE_BLOCK_SIZE - 2, "termLength=" + totalLength + ",resource=" + input); + Debugging.Assert(() => totalLength <= ByteBlockPool.BYTE_BLOCK_SIZE - 2, () => "termLength=" + totalLength + ",resource=" + input); if (bytes.Bytes.Length < totalLength) { bytes.Grow(totalLength); @@ -87,15 +88,15 @@ public void Read(IndexInput input, FieldInfos fieldInfos) } else { - Debug.Assert(fieldInfos.FieldInfo(currentFieldNumber) != null, currentFieldNumber.ToString()); + Debugging.Assert(() => fieldInfos.FieldInfo(currentFieldNumber) != null, currentFieldNumber.ToString); field = fieldInfos.FieldInfo(currentFieldNumber).Name.Intern(); } } else { - Debug.Assert(field.Equals(fieldInfos.FieldInfo(fieldNumber).Name, StringComparison.Ordinal), - "currentFieldNumber=" + currentFieldNumber + + Debugging.Assert(() => field.Equals(fieldInfos.FieldInfo(fieldNumber).Name, StringComparison.Ordinal), + () => "currentFieldNumber=" + currentFieldNumber + " field=" + field + " vs " + fieldInfos.FieldInfo(fieldNumber) == null ? "null" : fieldInfos.FieldInfo(fieldNumber).Name); } diff --git a/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs b/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs index 8b7422b05a..cd77fbc8de 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs @@ -1,4 +1,5 @@ using J2N.Text; +using Lucene.Net.Diagnostics; using Lucene.Net.Util; using System; using System.Collections.Generic; @@ -64,7 +65,7 @@ public sealed class TermInfoAndOrd : TermInfo public TermInfoAndOrd(TermInfo ti, long termOrd) : base(ti) { - Debug.Assert(termOrd >= 0); + Debugging.Assert(() => termOrd >= 0); this.termOrd = termOrd; } } @@ -292,8 +293,8 @@ internal TermInfo SeekEnum(SegmentTermEnum enumerator, Term term, TermInfoAndOrd } else { - Debug.Assert(SameTermInfo(ti, tiOrd, enumerator)); - Debug.Assert(enumerator.position == tiOrd.termOrd); + Debugging.Assert(() => SameTermInfo(ti, tiOrd, enumerator)); + Debugging.Assert(() => enumerator.position == tiOrd.termOrd); } } } @@ -334,8 +335,8 @@ internal TermInfo SeekEnum(SegmentTermEnum enumerator, Term term, TermInfoAndOrd } else { - Debug.Assert(SameTermInfo(ti_, tiOrd, enumerator)); - Debug.Assert(enumerator.position == tiOrd.termOrd); + Debugging.Assert(() => SameTermInfo(ti_, tiOrd, enumerator)); + Debugging.Assert(() => enumerator.position == tiOrd.termOrd); } } else diff --git a/src/Lucene.Net/Codecs/Lucene40/BitVector.cs b/src/Lucene.Net/Codecs/Lucene40/BitVector.cs index 5d7dfbd288..e8265ce932 100644 --- a/src/Lucene.Net/Codecs/Lucene40/BitVector.cs +++ b/src/Lucene.Net/Codecs/Lucene40/BitVector.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Diagnostics; @@ -125,7 +126,7 @@ public bool GetAndSet(int bit) if (count != -1) { count++; - Debug.Assert(count <= size); + Debugging.Assert(() => count <= size); } return false; } @@ -162,7 +163,7 @@ public bool GetAndClear(int bit) if (count != -1) { count--; - Debug.Assert(count >= 0); + Debugging.Assert(() => count >= 0); } return true; } @@ -174,7 +175,7 @@ public bool GetAndClear(int bit) /// public bool Get(int bit) { - Debug.Assert(bit >= 0 && bit < size, "bit " + bit + " is out of bounds 0.." + (size - 1)); + Debugging.Assert(() => bit >= 0 && bit < size, () => "bit " + bit + " is out of bounds 0.." + (size - 1)); return (bits[bit >> 3] & (1 << (bit & 7))) != 0; } @@ -201,7 +202,7 @@ public bool Get(int bit) /// computed and cached, so that, if the vector is not changed, no /// recomputation is done for repeated calls. /// - public int Count() + public int Count() // LUCENENET TODO: API - make into a property { // if the vector has been modified if (count == -1) @@ -214,7 +215,7 @@ public int Count() } count = c; } - Debug.Assert(count <= size, "count=" + count + " size=" + size); + Debugging.Assert(() => count <= size, () => "count=" + count + " size=" + size); return count; } @@ -258,7 +259,7 @@ public int GetRecomputedCount() /// public void Write(Directory d, string name, IOContext context) { - Debug.Assert(!(d is CompoundFileDirectory)); + Debugging.Assert(() => !(d is CompoundFileDirectory)); IndexOutput output = d.CreateOutput(name, context); try { @@ -275,7 +276,7 @@ public void Write(Directory d, string name, IOContext context) } CodecUtil.WriteFooter(output); bool verified = VerifyCount(); - Debug.Assert(verified); + Debugging.Assert(() => verified); } finally { @@ -351,7 +352,7 @@ private void WriteClearedDgaps(IndexOutput output) output.WriteByte(bits[i]); last = i; numCleared -= (8 - BitUtil.BitCount(bits[i])); - Debug.Assert(numCleared >= 0 || (i == (bits.Length - 1) && numCleared == -(8 - (size & 7)))); + Debugging.Assert(() => numCleared >= 0 || (i == (bits.Length - 1) && numCleared == -(8 - (size & 7)))); } } } @@ -460,8 +461,7 @@ public BitVector(Directory d, string name, IOContext context) CodecUtil.CheckEOF(input); #pragma warning restore 612, 618 } - bool verified = VerifyCount(); - Debug.Assert(verified); + Debugging.Assert(VerifyCount); } finally { @@ -472,11 +472,11 @@ public BitVector(Directory d, string name, IOContext context) // asserts only private bool VerifyCount() { - Debug.Assert(count != -1); + Debugging.Assert(() => count != -1); int countSav = count; count = -1; bool checkCount = countSav == Count(); - Debug.Assert(checkCount, "saved count was " + countSav + " but recomputed count is " + count); + Debugging.Assert(() => checkCount, () => "saved count was " + countSav + " but recomputed count is " + count); return true; } @@ -503,7 +503,7 @@ private void ReadSetDgaps(IndexInput input) last += input.ReadVInt32(); bits[last] = input.ReadByte(); n -= BitUtil.BitCount(bits[last]); - Debug.Assert(n >= 0); + Debugging.Assert(() => n >= 0); } } @@ -526,7 +526,7 @@ private void ReadClearedDgaps(IndexInput input) last += input.ReadVInt32(); bits[last] = input.ReadByte(); numCleared -= 8 - BitUtil.BitCount(bits[last]); - Debug.Assert(numCleared >= 0 || (last == (bits.Length - 1) && numCleared == -(8 - (size & 7)))); + Debugging.Assert(() => numCleared >= 0 || (last == (bits.Length - 1) && numCleared == -(8 - (size & 7)))); } } } diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40LiveDocsFormat.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40LiveDocsFormat.cs index fd0efe58ee..98dc1a6c2a 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40LiveDocsFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40LiveDocsFormat.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System.Collections.Generic; using System.Diagnostics; using System.Runtime.CompilerServices; @@ -91,9 +92,8 @@ public override IBits ReadLiveDocs(Directory dir, SegmentCommitInfo info, IOCont { string filename = IndexFileNames.FileNameFromGeneration(info.Info.Name, DELETES_EXTENSION, info.DelGen); BitVector liveDocs = new BitVector(dir, filename, context); - int liveDocsCount = liveDocs.Count(); - Debug.Assert(liveDocsCount == info.Info.DocCount - info.DelCount, "liveDocs.count()=" + liveDocsCount + " info.docCount=" + info.Info.DocCount + " info.getDelCount()=" + info.DelCount); - Debug.Assert(liveDocs.Length == info.Info.DocCount); + Debugging.Assert(() => liveDocs.Count() == info.Info.DocCount - info.DelCount, () => "liveDocs.count()=" + liveDocs.Count() + " info.docCount=" + info.Info.DocCount + " info.getDelCount()=" + info.DelCount); + Debugging.Assert(() => liveDocs.Length == info.Info.DocCount); return liveDocs; } @@ -102,9 +102,8 @@ public override void WriteLiveDocs(IMutableBits bits, Directory dir, SegmentComm { string filename = IndexFileNames.FileNameFromGeneration(info.Info.Name, DELETES_EXTENSION, info.NextDelGen); BitVector liveDocs = (BitVector)bits; - int liveDocsCount = liveDocs.Count(); - Debug.Assert(liveDocsCount == info.Info.DocCount - info.DelCount - newDelCount); - Debug.Assert(liveDocs.Length == info.Info.DocCount); + Debugging.Assert(() => liveDocs.Count() == info.Info.DocCount - info.DelCount - newDelCount); + Debugging.Assert(() => liveDocs.Length == info.Info.DocCount); liveDocs.Write(dir, filename, context); } diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsFormat.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsFormat.cs index 84f4b70065..b19545dc3a 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsFormat.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System; using System.Diagnostics; @@ -234,7 +235,7 @@ private Lucene40PostingsFormat(int minBlockSize, int maxBlockSize) : base() { this.m_minBlockSize = minBlockSize; - Debug.Assert(minBlockSize > 1); + Debugging.Assert(() => minBlockSize > 1); this.m_maxBlockSize = maxBlockSize; } diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsReader.cs index df2549da53..c2a5f96a00 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsReader.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Support; using System; @@ -192,13 +193,13 @@ public override void DecodeTerm(long[] longs, DataInput @in, FieldInfo fieldInfo System.out.println(" freqFP=" + termState2.freqOffset); } */ - Debug.Assert(termState2.freqOffset < freqIn.Length); + Debugging.Assert(() => termState2.freqOffset < freqIn.Length); if (termState2.DocFreq >= skipMinimum) { termState2.skipOffset = @in.ReadVInt64(); // if (DEBUG) System.out.println(" skipOffset=" + termState2.skipOffset + " vs freqIn.length=" + freqIn.length()); - Debug.Assert(termState2.freqOffset + termState2.skipOffset < freqIn.Length); + Debugging.Assert(() => termState2.freqOffset + termState2.skipOffset < freqIn.Length); } else { @@ -355,7 +356,7 @@ internal virtual DocsEnum Reset(FieldInfo fieldInfo, StandardTermState termState // cases freqIn.Seek(termState.freqOffset); m_limit = termState.DocFreq; - Debug.Assert(m_limit > 0); + Debugging.Assert(() => m_limit > 0); m_ord = 0; m_doc = -1; m_accum = 0; @@ -545,7 +546,7 @@ internal AllDocsSegmentDocsEnum(Lucene40PostingsReader outerInstance, IndexInput : base(outerInstance, startFreqIn, null) { this.outerInstance = outerInstance; - Debug.Assert(m_liveDocs == null); + Debugging.Assert(() => m_liveDocs == null); } public override int NextDoc() @@ -638,7 +639,7 @@ internal LiveDocsSegmentDocsEnum(Lucene40PostingsReader outerInstance, IndexInpu : base(outerInstance, startFreqIn, liveDocs) { this.outerInstance = outerInstance; - Debug.Assert(liveDocs != null); + Debugging.Assert(() => liveDocs != null); } public override int NextDoc() @@ -782,8 +783,8 @@ public SegmentDocsAndPositionsEnum(Lucene40PostingsReader outerInstance, IndexIn public SegmentDocsAndPositionsEnum Reset(FieldInfo fieldInfo, StandardTermState termState, IBits liveDocs) { - Debug.Assert(fieldInfo.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); - Debug.Assert(!fieldInfo.HasPayloads); + Debugging.Assert(() => fieldInfo.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); + Debugging.Assert(() => !fieldInfo.HasPayloads); this.liveDocs = liveDocs; @@ -794,7 +795,7 @@ public SegmentDocsAndPositionsEnum Reset(FieldInfo fieldInfo, StandardTermState lazyProxPointer = termState.proxOffset; limit = termState.DocFreq; - Debug.Assert(limit > 0); + Debugging.Assert(() => limit > 0); ord = 0; doc = -1; @@ -929,7 +930,7 @@ public override int NextPosition() posPendingCount--; - Debug.Assert(posPendingCount >= 0, "nextPosition() was called too many times (more than freq() times) posPendingCount=" + posPendingCount); + Debugging.Assert(() => posPendingCount >= 0, () => "nextPosition() was called too many times (more than freq() times) posPendingCount=" + posPendingCount); return position; } @@ -1002,8 +1003,8 @@ public virtual SegmentFullPositionsEnum Reset(FieldInfo fieldInfo, StandardTermS { storeOffsets = fieldInfo.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; storePayloads = fieldInfo.HasPayloads; - Debug.Assert(fieldInfo.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0); - Debug.Assert(storePayloads || storeOffsets); + Debugging.Assert(() => fieldInfo.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0); + Debugging.Assert(() => storePayloads || storeOffsets); if (payload == null) { payload = new BytesRef(); @@ -1159,9 +1160,9 @@ public override int NextPosition() { // new payload length payloadLength = proxIn.ReadVInt32(); - Debug.Assert(payloadLength >= 0); + Debugging.Assert(() => payloadLength >= 0); } - Debug.Assert(payloadLength != -1); + Debugging.Assert(() => payloadLength != -1); } if (storeOffsets) @@ -1199,9 +1200,9 @@ public override int NextPosition() { // new payload length payloadLength = proxIn.ReadVInt32(); - Debug.Assert(payloadLength >= 0); + Debugging.Assert(() => payloadLength >= 0); } - Debug.Assert(payloadLength != -1); + Debugging.Assert(() => payloadLength != -1); payloadPending = true; code_ = (int)((uint)code_ >> 1); @@ -1221,7 +1222,7 @@ public override int NextPosition() posPendingCount--; - Debug.Assert(posPendingCount >= 0, "nextPosition() was called too many times (more than freq() times) posPendingCount=" + posPendingCount); + Debugging.Assert(() => posPendingCount >= 0, () => "nextPosition() was called too many times (more than freq() times) posPendingCount=" + posPendingCount); //System.out.println("StandardR.D&PE nextPos return pos=" + position); return position; @@ -1243,8 +1244,8 @@ public override BytesRef GetPayload() { return null; } - Debug.Assert(lazyProxPointer == -1); - Debug.Assert(posPendingCount < freq); + Debugging.Assert(() => lazyProxPointer == -1); + Debugging.Assert(() => posPendingCount < freq); if (payloadPending) { diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsReader.cs index 33ea31894e..7057d681d7 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsReader.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System; using System.Diagnostics; using System.IO; @@ -91,8 +92,8 @@ public Lucene40StoredFieldsReader(Directory d, SegmentInfo si, FieldInfos fn, IO CodecUtil.CheckHeader(indexStream, Lucene40StoredFieldsWriter.CODEC_NAME_IDX, Lucene40StoredFieldsWriter.VERSION_START, Lucene40StoredFieldsWriter.VERSION_CURRENT); CodecUtil.CheckHeader(fieldsStream, Lucene40StoredFieldsWriter.CODEC_NAME_DAT, Lucene40StoredFieldsWriter.VERSION_START, Lucene40StoredFieldsWriter.VERSION_CURRENT); - Debug.Assert(Lucene40StoredFieldsWriter.HEADER_LENGTH_DAT == fieldsStream.GetFilePointer()); - Debug.Assert(Lucene40StoredFieldsWriter.HEADER_LENGTH_IDX == indexStream.GetFilePointer()); + Debugging.Assert(() => Lucene40StoredFieldsWriter.HEADER_LENGTH_DAT == fieldsStream.GetFilePointer()); + Debugging.Assert(() => Lucene40StoredFieldsWriter.HEADER_LENGTH_IDX == indexStream.GetFilePointer()); long indexSize = indexStream.Length - Lucene40StoredFieldsWriter.HEADER_LENGTH_IDX; this.size = (int)(indexSize >> 3); // Verify two sources of "maxDoc" agree: @@ -173,7 +174,7 @@ public override void VisitDocument(int n, StoredFieldVisitor visitor) FieldInfo fieldInfo = fieldInfos.FieldInfo(fieldNumber); int bits = fieldsStream.ReadByte() & 0xFF; - Debug.Assert(bits <= (Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_MASK | Lucene40StoredFieldsWriter.FIELD_IS_BINARY), "bits=" + bits.ToString("x")); + Debugging.Assert(() => bits <= (Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_MASK | Lucene40StoredFieldsWriter.FIELD_IS_BINARY), () => "bits=" + bits.ToString("x")); switch (visitor.NeedsField(fieldInfo)) { @@ -280,7 +281,7 @@ public IndexInput RawDocs(int[] lengths, int startDocID, int numDocs) { long offset; int docID = startDocID + count + 1; - Debug.Assert(docID <= numTotalDocs); + Debugging.Assert(() => docID <= numTotalDocs); if (docID < numTotalDocs) { offset = indexStream.ReadInt64(); diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs index ca8ba56046..4e39ff108b 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs @@ -1,4 +1,5 @@ using J2N; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using System; using System.Diagnostics; @@ -90,7 +91,7 @@ public sealed class Lucene40StoredFieldsWriter : StoredFieldsWriter /// Sole constructor. public Lucene40StoredFieldsWriter(Directory directory, string segment, IOContext context) { - Debug.Assert(directory != null); + Debugging.Assert(() => directory != null); this.directory = directory; this.segment = segment; @@ -102,8 +103,8 @@ public Lucene40StoredFieldsWriter(Directory directory, string segment, IOContext CodecUtil.WriteHeader(fieldsStream, CODEC_NAME_DAT, VERSION_CURRENT); CodecUtil.WriteHeader(indexStream, CODEC_NAME_IDX, VERSION_CURRENT); - Debug.Assert(HEADER_LENGTH_DAT == fieldsStream.GetFilePointer()); - Debug.Assert(HEADER_LENGTH_IDX == indexStream.GetFilePointer()); + Debugging.Assert(() => HEADER_LENGTH_DAT == fieldsStream.GetFilePointer()); + Debugging.Assert(() => HEADER_LENGTH_IDX == indexStream.GetFilePointer()); success = true; } finally @@ -262,7 +263,7 @@ public void AddRawDocuments(IndexInput stream, int[] lengths, int numDocs) position += lengths[i]; } fieldsStream.CopyBytes(stream, position - start); - Debug.Assert(fieldsStream.GetFilePointer() == position); + Debugging.Assert(() => fieldsStream.GetFilePointer() == position); } public override void Finish(FieldInfos fis, int numDocs) @@ -324,7 +325,7 @@ private int CopyFieldsWithDeletions(MergeState mergeState, AtomicReader reader, int docCount = 0; int maxDoc = reader.MaxDoc; IBits liveDocs = reader.LiveDocs; - Debug.Assert(liveDocs != null); + Debugging.Assert(() => liveDocs != null); if (matchingFieldsReader != null) { // We can bulk-copy because the fieldInfos are "congruent" diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs index fcef1e68bd..ceca2e2029 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Support; using System; @@ -119,16 +120,16 @@ public Lucene40TermVectorsReader(Directory d, SegmentInfo si, FieldInfos fieldIn fn = IndexFileNames.SegmentFileName(segment, "", VECTORS_FIELDS_EXTENSION); tvf = d.OpenInput(fn, context); int tvfVersion = CodecUtil.CheckHeader(tvf, CODEC_NAME_FIELDS, VERSION_START, VERSION_CURRENT); - Debug.Assert(HEADER_LENGTH_INDEX == tvx.GetFilePointer()); - Debug.Assert(HEADER_LENGTH_DOCS == tvd.GetFilePointer()); - Debug.Assert(HEADER_LENGTH_FIELDS == tvf.GetFilePointer()); - Debug.Assert(tvxVersion == tvdVersion); - Debug.Assert(tvxVersion == tvfVersion); + Debugging.Assert(() => HEADER_LENGTH_INDEX == tvx.GetFilePointer()); + Debugging.Assert(() => HEADER_LENGTH_DOCS == tvd.GetFilePointer()); + Debugging.Assert(() => HEADER_LENGTH_FIELDS == tvf.GetFilePointer()); + Debugging.Assert(() => tvxVersion == tvdVersion); + Debugging.Assert(() => tvxVersion == tvfVersion); numTotalDocs = (int)(tvx.Length - HEADER_LENGTH_INDEX >> 4); this.size = numTotalDocs; - Debug.Assert(size == 0 || numTotalDocs == size); + Debugging.Assert(() => size == 0 || numTotalDocs == size); this.fieldInfos = fieldInfos; success = true; @@ -199,7 +200,7 @@ internal void RawDocs(int[] tvdLengths, int[] tvfLengths, int startDocID, int nu while (count < numDocs) { int docID = startDocID + count + 1; - Debug.Assert(docID <= numTotalDocs); + Debugging.Assert(() => docID <= numTotalDocs); if (docID < numTotalDocs) { tvdPosition = tvx.ReadInt64(); @@ -209,7 +210,7 @@ internal void RawDocs(int[] tvdLengths, int[] tvfLengths, int startDocID, int nu { tvdPosition = tvd.Length; tvfPosition = tvf.Length; - Debug.Assert(count == numDocs - 1); + Debugging.Assert(() => count == numDocs - 1); } tvdLengths[count] = (int)(tvdPosition - lastTvdPosition); tvfLengths[count] = (int)(tvfPosition - lastTvfPosition); @@ -247,7 +248,7 @@ public TVFields(Lucene40TermVectorsReader outerInstance, int docID) outerInstance.tvd.Seek(outerInstance.tvx.ReadInt64()); int fieldCount = outerInstance.tvd.ReadVInt32(); - Debug.Assert(fieldCount >= 0); + Debugging.Assert(() => fieldCount >= 0); if (fieldCount != 0) { fieldNumbers = new int[fieldCount]; @@ -517,7 +518,7 @@ public override BytesRef Next() } payloadOffsets[posUpto] = totalPayloadLength; totalPayloadLength += lastPayloadLength; - Debug.Assert(totalPayloadLength >= 0); + Debugging.Assert(() => totalPayloadLength >= 0); } payloadData = new byte[totalPayloadLength]; tvf.ReadBytes(payloadData, 0, payloadData.Length); @@ -667,7 +668,7 @@ public override int Freq } else { - Debug.Assert(startOffsets != null); + Debugging.Assert(() => startOffsets != null); return startOffsets.Length; } } @@ -729,7 +730,7 @@ public override BytesRef GetPayload() public override int NextPosition() { - //Debug.Assert((positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length); + //Debugging.Assert((positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length); // LUCENENET: The above assertion was for control flow when testing. In Java, it would throw an AssertionError, which is // caught by the BaseTermVectorsFormatTestCase.assertEquals(RandomTokenStream tk, FieldType ft, Terms terms) method in the diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs index 0098850ae8..0a6e736377 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs @@ -1,4 +1,5 @@ using J2N.Text; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.Diagnostics; @@ -77,9 +78,9 @@ public Lucene40TermVectorsWriter(Directory directory, string segment, IOContext CodecUtil.WriteHeader(tvd, Lucene40TermVectorsReader.CODEC_NAME_DOCS, Lucene40TermVectorsReader.VERSION_CURRENT); tvf = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene40TermVectorsReader.VECTORS_FIELDS_EXTENSION), context); CodecUtil.WriteHeader(tvf, Lucene40TermVectorsReader.CODEC_NAME_FIELDS, Lucene40TermVectorsReader.VERSION_CURRENT); - Debug.Assert(Lucene40TermVectorsReader.HEADER_LENGTH_INDEX == tvx.GetFilePointer()); - Debug.Assert(Lucene40TermVectorsReader.HEADER_LENGTH_DOCS == tvd.GetFilePointer()); - Debug.Assert(Lucene40TermVectorsReader.HEADER_LENGTH_FIELDS == tvf.GetFilePointer()); + Debugging.Assert(() => Lucene40TermVectorsReader.HEADER_LENGTH_INDEX == tvx.GetFilePointer()); + Debugging.Assert(() => Lucene40TermVectorsReader.HEADER_LENGTH_DOCS == tvd.GetFilePointer()); + Debugging.Assert(() => Lucene40TermVectorsReader.HEADER_LENGTH_FIELDS == tvf.GetFilePointer()); success = true; } finally @@ -109,7 +110,7 @@ public override void StartDocument(int numVectorFields) public override void StartField(FieldInfo info, int numTerms, bool positions, bool offsets, bool payloads) { - Debug.Assert(lastFieldName == null || info.Name.CompareToOrdinal(lastFieldName) > 0, "fieldName=" + info.Name + " lastFieldName=" + lastFieldName); + Debugging.Assert(() => lastFieldName == null || info.Name.CompareToOrdinal(lastFieldName) > 0, () => "fieldName=" + info.Name + " lastFieldName=" + lastFieldName); lastFieldName = info.Name; this.positions = positions; this.offsets = offsets; @@ -138,7 +139,7 @@ public override void StartField(FieldInfo info, int numTerms, bool positions, bo [MethodImpl(MethodImplOptions.NoInlining)] public override void FinishDocument() { - Debug.Assert(fieldCount == numVectorFields); + Debugging.Assert(() => fieldCount == numVectorFields); for (int i = 1; i < fieldCount; i++) { tvd.WriteVInt64(fps[i] - fps[i - 1]); @@ -267,8 +268,8 @@ public override void FinishTerm() if (bufferedIndex > 0) { // dump buffer - Debug.Assert(positions && (offsets || payloads)); - Debug.Assert(bufferedIndex == bufferedFreq); + Debugging.Assert(() => positions && (offsets || payloads)); + Debugging.Assert(() => bufferedIndex == bufferedFreq); if (payloads) { tvf.WriteBytes(payloadData.Bytes, payloadData.Offset, payloadData.Length); @@ -356,8 +357,8 @@ private void AddRawDocuments(Lucene40TermVectorsReader reader, int[] tvdLengths, } tvd.CopyBytes(reader.TvdStream, tvdPosition - tvdStart); tvf.CopyBytes(reader.TvfStream, tvfPosition - tvfStart); - Debug.Assert(tvd.GetFilePointer() == tvdPosition); - Debug.Assert(tvf.GetFilePointer() == tvfPosition); + Debugging.Assert(() => tvd.GetFilePointer() == tvdPosition); + Debugging.Assert(() => tvf.GetFilePointer() == tvfPosition); } [MethodImpl(MethodImplOptions.NoInlining)] diff --git a/src/Lucene.Net/Codecs/Lucene41/ForUtil.cs b/src/Lucene.Net/Codecs/Lucene41/ForUtil.cs index 212178d1a2..cb39d5e34b 100644 --- a/src/Lucene.Net/Codecs/Lucene41/ForUtil.cs +++ b/src/Lucene.Net/Codecs/Lucene41/ForUtil.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Store; using Lucene.Net.Support; using Lucene.Net.Util.Packed; @@ -87,7 +88,7 @@ private static int ComputeIterations(PackedInt32s.IDecoder decoder) private static int EncodedSize(PackedInt32s.Format format, int packedIntsVersion, int bitsPerValue) { long byteCount = format.ByteCount(packedIntsVersion, Lucene41PostingsFormat.BLOCK_SIZE, bitsPerValue); - Debug.Assert(byteCount >= 0 && byteCount <= int.MaxValue, byteCount.ToString()); + Debugging.Assert(() => byteCount >= 0 && byteCount <= int.MaxValue, byteCount.ToString); return (int)byteCount; } @@ -110,8 +111,8 @@ internal ForUtil(float acceptableOverheadRatio, DataOutput @out) for (int bpv = 1; bpv <= 32; ++bpv) { PackedInt32s.FormatAndBits formatAndBits = PackedInt32s.FastestFormatAndBits(Lucene41PostingsFormat.BLOCK_SIZE, bpv, acceptableOverheadRatio); - Debug.Assert(formatAndBits.Format.IsSupported(formatAndBits.BitsPerValue)); - Debug.Assert(formatAndBits.BitsPerValue <= 32); + Debugging.Assert(() => formatAndBits.Format.IsSupported(formatAndBits.BitsPerValue)); + Debugging.Assert(() => formatAndBits.BitsPerValue <= 32); encodedSizes[bpv] = EncodedSize(formatAndBits.Format, PackedInt32s.VERSION_CURRENT, formatAndBits.BitsPerValue); encoders[bpv] = PackedInt32s.GetEncoder(formatAndBits.Format, PackedInt32s.VERSION_CURRENT, formatAndBits.BitsPerValue); decoders[bpv] = PackedInt32s.GetDecoder(formatAndBits.Format, PackedInt32s.VERSION_CURRENT, formatAndBits.BitsPerValue); @@ -140,7 +141,7 @@ internal ForUtil(DataInput @in) var bitsPerValue = (code & 31) + 1; PackedInt32s.Format format = PackedInt32s.Format.ById(formatId); - Debug.Assert(format.IsSupported(bitsPerValue)); + Debugging.Assert(() => format.IsSupported(bitsPerValue)); encodedSizes[bpv] = EncodedSize(format, packedIntsVersion, bitsPerValue); encoders[bpv] = PackedInt32s.GetEncoder(format, packedIntsVersion, bitsPerValue); decoders[bpv] = PackedInt32s.GetDecoder(format, packedIntsVersion, bitsPerValue); @@ -165,12 +166,12 @@ internal void WriteBlock(int[] data, byte[] encoded, IndexOutput @out) } int numBits = BitsRequired(data); - Debug.Assert(numBits > 0 && numBits <= 32, numBits.ToString()); + Debugging.Assert(() => numBits > 0 && numBits <= 32, numBits.ToString); PackedInt32s.IEncoder encoder = encoders[numBits]; int iters = iterations[numBits]; - Debug.Assert(iters * encoder.ByteValueCount >= Lucene41PostingsFormat.BLOCK_SIZE); + Debugging.Assert(() => iters * encoder.ByteValueCount >= Lucene41PostingsFormat.BLOCK_SIZE); int encodedSize = encodedSizes[numBits]; - Debug.Assert(iters * encoder.ByteBlockCount >= encodedSize); + Debugging.Assert(() => iters * encoder.ByteBlockCount >= encodedSize); @out.WriteByte((byte)numBits); @@ -188,7 +189,7 @@ internal void WriteBlock(int[] data, byte[] encoded, IndexOutput @out) internal void ReadBlock(IndexInput @in, byte[] encoded, int[] decoded) { int numBits = @in.ReadByte(); - Debug.Assert(numBits <= 32, numBits.ToString()); + Debugging.Assert(() => numBits <= 32, numBits.ToString); if (numBits == ALL_VALUES_EQUAL) { @@ -202,7 +203,7 @@ internal void ReadBlock(IndexInput @in, byte[] encoded, int[] decoded) PackedInt32s.IDecoder decoder = decoders[numBits]; int iters = iterations[numBits]; - Debug.Assert(iters * decoder.ByteValueCount >= Lucene41PostingsFormat.BLOCK_SIZE); + Debugging.Assert(() => iters * decoder.ByteValueCount >= Lucene41PostingsFormat.BLOCK_SIZE); decoder.Decode(encoded, 0, decoded, 0, iters); } @@ -220,7 +221,7 @@ internal void SkipBlock(IndexInput @in) @in.ReadVInt32(); return; } - Debug.Assert(numBits > 0 && numBits <= 32, numBits.ToString()); + Debugging.Assert(() => numBits > 0 && numBits <= 32, numBits.ToString); int encodedSize = encodedSizes[numBits]; @in.Seek(@in.GetFilePointer() + encodedSize); } @@ -247,7 +248,7 @@ private static int BitsRequired(int[] data) long or = 0; for (int i = 0; i < Lucene41PostingsFormat.BLOCK_SIZE; ++i) { - Debug.Assert(data[i] >= 0); + Debugging.Assert(() => data[i] >= 0); or |= (uint)data[i]; } return PackedInt32s.BitsRequired(or); diff --git a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsFormat.cs b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsFormat.cs index f1c9f13383..02115c90b5 100644 --- a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsFormat.cs @@ -1,4 +1,4 @@ -using System.Diagnostics; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Codecs.Lucene41 { @@ -392,9 +392,9 @@ public Lucene41PostingsFormat(int minTermBlockSize, int maxTermBlockSize) : base() { this.minTermBlockSize = minTermBlockSize; - Debug.Assert(minTermBlockSize > 1); + Debugging.Assert(() => minTermBlockSize > 1); this.maxTermBlockSize = maxTermBlockSize; - Debug.Assert(minTermBlockSize <= maxTermBlockSize); + Debugging.Assert(() => minTermBlockSize <= maxTermBlockSize); } public override string ToString() diff --git a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsReader.cs b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsReader.cs index fbf1e8dfed..0ef9de60ab 100644 --- a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsReader.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Support; @@ -401,7 +402,7 @@ public DocsEnum Reset(IBits liveDocs, Lucene41PostingsWriter.Int32BlockTermState private void RefillDocs() { int left = docFreq - docUpto; - Debug.Assert(left > 0); + Debugging.Assert(() => left > 0); if (left >= Lucene41PostingsFormat.BLOCK_SIZE) { @@ -510,7 +511,7 @@ public override int Advance(int target) if (!skipped) { - Debug.Assert(skipOffset != -1); + Debugging.Assert(() => skipOffset != -1); // this is the first time this enum has skipped // since reset() was called; load the skip data: skipper.Init(docTermStartFP + skipOffset, docTermStartFP, 0, 0, docFreq); @@ -527,7 +528,7 @@ public override int Advance(int target) // if (DEBUG) { // System.out.println("skipper moved to docUpto=" + newDocUpto + " vs current=" + docUpto + "; docID=" + skipper.getDoc() + " fp=" + skipper.getDocPointer()); // } - Debug.Assert(newDocUpto % Lucene41PostingsFormat.BLOCK_SIZE == 0, "got " + newDocUpto); + Debugging.Assert(() => newDocUpto % Lucene41PostingsFormat.BLOCK_SIZE == 0, () => "got " + newDocUpto); docUpto = newDocUpto; // Force to read next block @@ -730,7 +731,7 @@ public DocsAndPositionsEnum Reset(IBits liveDocs, Lucene41PostingsWriter.Int32Bl private void RefillDocs() { int left = docFreq - docUpto; - Debug.Assert(left > 0); + Debugging.Assert(() => left > 0); if (left >= Lucene41PostingsFormat.BLOCK_SIZE) { @@ -874,7 +875,7 @@ public override int Advance(int target) if (!skipped) { - Debug.Assert(skipOffset != -1); + Debugging.Assert(() => skipOffset != -1); // this is the first time this enum has skipped // since reset() was called; load the skip data: // if (DEBUG) { @@ -893,7 +894,7 @@ public override int Advance(int target) // System.out.println(" skipper moved to docUpto=" + newDocUpto + " vs current=" + docUpto + "; docID=" + skipper.getDoc() + " fp=" + skipper.getDocPointer() + " pos.fp=" + skipper.getPosPointer() + " pos.bufferUpto=" + skipper.getPosBufferUpto()); // } - Debug.Assert(newDocUpto % Lucene41PostingsFormat.BLOCK_SIZE == 0, "got " + newDocUpto); + Debugging.Assert(() => newDocUpto % Lucene41PostingsFormat.BLOCK_SIZE == 0, () => "got " + newDocUpto); docUpto = newDocUpto; // Force to read next block @@ -982,7 +983,7 @@ private void SkipPositions() // if (DEBUG) { // System.out.println(" skip whole block @ fp=" + posIn.getFilePointer()); // } - Debug.Assert(posIn.GetFilePointer() != lastPosBlockFP); + Debugging.Assert(() => posIn.GetFilePointer() != lastPosBlockFP); outerInstance.forUtil.SkipBlock(posIn); toSkip -= Lucene41PostingsFormat.BLOCK_SIZE; } @@ -1235,7 +1236,7 @@ public EverythingEnum Reset(IBits liveDocs, Lucene41PostingsWriter.Int32BlockTer private void RefillDocs() { int left = docFreq - docUpto; - Debug.Assert(left > 0); + Debugging.Assert(() => left > 0); if (left >= Lucene41PostingsFormat.BLOCK_SIZE) { @@ -1449,7 +1450,7 @@ public override int Advance(int target) if (!skipped) { - Debug.Assert(skipOffset != -1); + Debugging.Assert(() => skipOffset != -1); // this is the first time this enum has skipped // since reset() was called; load the skip data: // if (DEBUG) { @@ -1467,7 +1468,7 @@ public override int Advance(int target) // if (DEBUG) { // System.out.println(" skipper moved to docUpto=" + newDocUpto + " vs current=" + docUpto + "; docID=" + skipper.getDoc() + " fp=" + skipper.getDocPointer() + " pos.fp=" + skipper.getPosPointer() + " pos.bufferUpto=" + skipper.getPosBufferUpto() + " pay.fp=" + skipper.getPayPointer() + " lastStartOffset=" + lastStartOffset); // } - Debug.Assert(newDocUpto % Lucene41PostingsFormat.BLOCK_SIZE == 0, "got " + newDocUpto); + Debugging.Assert(() => newDocUpto % Lucene41PostingsFormat.BLOCK_SIZE == 0, () => "got " + newDocUpto); docUpto = newDocUpto; // Force to read next block @@ -1567,7 +1568,7 @@ private void SkipPositions() // if (DEBUG) { // System.out.println(" skip whole block @ fp=" + posIn.getFilePointer()); // } - Debug.Assert(posIn.GetFilePointer() != lastPosBlockFP); + Debugging.Assert(() => posIn.GetFilePointer() != lastPosBlockFP); outerInstance.forUtil.SkipBlock(posIn); if (indexHasPayloads) diff --git a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsWriter.cs b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsWriter.cs index e2c44e6172..a60d951774 100644 --- a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsWriter.cs +++ b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsWriter.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System; using System.Diagnostics; @@ -376,8 +377,8 @@ public override void AddPosition(int position, BytesRef payload, int startOffset if (fieldHasOffsets) { - Debug.Assert(startOffset >= lastStartOffset); - Debug.Assert(endOffset >= startOffset); + Debugging.Assert(() => startOffset >= lastStartOffset); + Debugging.Assert(() => endOffset >= startOffset); offsetStartDeltaBuffer[posBufferUpto] = startOffset - lastStartOffset; offsetLengthBuffer[posBufferUpto] = endOffset - startOffset; lastStartOffset = startOffset; @@ -438,11 +439,11 @@ public override void FinishDoc() public override void FinishTerm(BlockTermState state) { Int32BlockTermState state2 = (Int32BlockTermState)state; - Debug.Assert(state2.DocFreq > 0); + Debugging.Assert(() => state2.DocFreq > 0); // TODO: wasteful we are counting this (counting # docs // for this term) in two places? - Debug.Assert(state2.DocFreq == docCount, state2.DocFreq + " vs " + docCount); + Debugging.Assert(() => state2.DocFreq == docCount, () => state2.DocFreq + " vs " + docCount); // if (DEBUG) { // System.out.println("FPW.finishTerm docFreq=" + state2.docFreq); @@ -497,7 +498,7 @@ public override void FinishTerm(BlockTermState state) // totalTermFreq is just total number of positions(or payloads, or offsets) // associated with current term. - Debug.Assert(state2.TotalTermFreq != -1); + Debugging.Assert(() => state2.TotalTermFreq != -1); if (state2.TotalTermFreq > Lucene41PostingsFormat.BLOCK_SIZE) { // record file offset for last pos in last block @@ -575,7 +576,7 @@ public override void FinishTerm(BlockTermState state) if (fieldHasPayloads) { - Debug.Assert(payloadBytesReadUpto == payloadByteUpto); + Debugging.Assert(() => payloadBytesReadUpto == payloadByteUpto); payloadByteUpto = 0; } } diff --git a/src/Lucene.Net/Codecs/Lucene41/Lucene41SkipReader.cs b/src/Lucene.Net/Codecs/Lucene41/Lucene41SkipReader.cs index 5b411dd224..6b459ce7df 100644 --- a/src/Lucene.Net/Codecs/Lucene41/Lucene41SkipReader.cs +++ b/src/Lucene.Net/Codecs/Lucene41/Lucene41SkipReader.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System.Diagnostics; @@ -128,7 +129,7 @@ public void Init(long skipPointer, long docBasePointer, long posBasePointer, lon } else { - Debug.Assert(posBasePointer == 0); + Debugging.Assert(() => posBasePointer == 0); } } diff --git a/src/Lucene.Net/Codecs/Lucene42/Lucene42NormsConsumer.cs b/src/Lucene.Net/Codecs/Lucene42/Lucene42NormsConsumer.cs index 541bddf6b2..c32c67633b 100644 --- a/src/Lucene.Net/Codecs/Lucene42/Lucene42NormsConsumer.cs +++ b/src/Lucene.Net/Codecs/Lucene42/Lucene42NormsConsumer.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.Diagnostics; @@ -92,7 +93,7 @@ public override void AddNumericField(FieldInfo field, IEnumerable values) long count = 0; foreach (long? nv in values) { - Debug.Assert(nv != null); + Debugging.Assert(() => nv != null); long v = nv.Value; if (gcd != 1) @@ -126,7 +127,7 @@ public override void AddNumericField(FieldInfo field, IEnumerable values) ++count; } - Debug.Assert(count == maxDoc); + Debugging.Assert(() => count == maxDoc); } if (uniqueValues != null) diff --git a/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesConsumer.cs b/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesConsumer.cs index c0ac98b386..49deb1b413 100644 --- a/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesConsumer.cs +++ b/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesConsumer.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections; using System.Collections.Generic; @@ -494,13 +495,13 @@ public override void AddSortedSetField(FieldInfo field, IEnumerable va } else { - Debug.Assert(current == 1); + Debugging.Assert(() => current == 1); ordsIter.MoveNext(); yield return ordsIter.Current; } } - Debug.Assert(!ordsIter.MoveNext()); + Debugging.Assert(() => !ordsIter.MoveNext()); } protected override void Dispose(bool disposing) diff --git a/src/Lucene.Net/Codecs/Lucene46/Lucene46FieldInfosWriter.cs b/src/Lucene.Net/Codecs/Lucene46/Lucene46FieldInfosWriter.cs index 16002415db..52be418aba 100644 --- a/src/Lucene.Net/Codecs/Lucene46/Lucene46FieldInfosWriter.cs +++ b/src/Lucene.Net/Codecs/Lucene46/Lucene46FieldInfosWriter.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System; using System.Diagnostics; @@ -72,7 +73,7 @@ public override void Write(Directory directory, string segmentName, string segme if (fi.IsIndexed) { bits |= Lucene46FieldInfosFormat.IS_INDEXED; - Debug.Assert(indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); + Debugging.Assert(() => indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); if (indexOptions == IndexOptions.DOCS_ONLY) { bits |= Lucene46FieldInfosFormat.OMIT_TERM_FREQ_AND_POSITIONS; @@ -93,7 +94,7 @@ public override void Write(Directory directory, string segmentName, string segme // pack the DV types in one byte var dv = DocValuesByte(fi.DocValuesType); var nrm = DocValuesByte(fi.NormType); - Debug.Assert((dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0); + Debugging.Assert(() => (dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0); var val = (byte)(0xff & ((nrm << 4) | (byte)dv)); output.WriteByte(val); output.WriteInt64(fi.DocValuesGen); diff --git a/src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs b/src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs index 3930a40ade..ecdb5bdc23 100644 --- a/src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs +++ b/src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Diagnostics; @@ -105,7 +106,7 @@ public override int NextDoc() current = subs[upto].DocsEnum; currentBase = mergeState.DocBase[reader]; currentMap = mergeState.DocMaps[reader]; - Debug.Assert(currentMap.MaxDoc == subs[upto].Slice.Length, "readerIndex=" + reader + " subs.len=" + subs.Length + " len1=" + currentMap.MaxDoc + " vs " + subs[upto].Slice.Length); + Debugging.Assert(() => currentMap.MaxDoc == subs[upto].Slice.Length, () => "readerIndex=" + reader + " subs.len=" + subs.Length + " len1=" + currentMap.MaxDoc + " vs " + subs[upto].Slice.Length); } } diff --git a/src/Lucene.Net/Codecs/MultiLevelSkipListReader.cs b/src/Lucene.Net/Codecs/MultiLevelSkipListReader.cs index 1fecf80cb1..c6520340d4 100644 --- a/src/Lucene.Net/Codecs/MultiLevelSkipListReader.cs +++ b/src/Lucene.Net/Codecs/MultiLevelSkipListReader.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System; using System.Diagnostics; @@ -252,7 +253,7 @@ public virtual void Init(long skipPointer, int df) { this.skipPointer[0] = skipPointer; this.docCount = df; - Debug.Assert(skipPointer >= 0 && skipPointer <= skipStream[0].Length, "invalid skip pointer: " + skipPointer + ", length=" + skipStream[0].Length); + Debugging.Assert(() => skipPointer >= 0 && skipPointer <= skipStream[0].Length, () => "invalid skip pointer: " + skipPointer + ", length=" + skipStream[0].Length); Array.Clear(m_skipDoc, 0, m_skipDoc.Length); Array.Clear(numSkipped, 0, numSkipped.Length); Array.Clear(childPointer, 0, childPointer.Length); diff --git a/src/Lucene.Net/Codecs/MultiLevelSkipListWriter.cs b/src/Lucene.Net/Codecs/MultiLevelSkipListWriter.cs index ac82fe5be3..beb24e422a 100644 --- a/src/Lucene.Net/Codecs/MultiLevelSkipListWriter.cs +++ b/src/Lucene.Net/Codecs/MultiLevelSkipListWriter.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System.Diagnostics; using System.IO; @@ -147,7 +148,7 @@ public virtual void ResetSkip() /// If an I/O error occurs. public virtual void BufferSkip(int df) { - Debug.Assert(df % skipInterval == 0); + Debugging.Assert(() => df % skipInterval == 0); int numLevels = 1; df /= skipInterval; diff --git a/src/Lucene.Net/Codecs/PerField/PerFieldDocValuesFormat.cs b/src/Lucene.Net/Codecs/PerField/PerFieldDocValuesFormat.cs index a0f40123c2..d9904c6ef3 100644 --- a/src/Lucene.Net/Codecs/PerField/PerFieldDocValuesFormat.cs +++ b/src/Lucene.Net/Codecs/PerField/PerFieldDocValuesFormat.cs @@ -1,4 +1,5 @@ using J2N.Runtime.CompilerServices; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.Diagnostics; @@ -154,7 +155,7 @@ internal virtual DocValuesConsumer GetInstance(FieldInfo field) string formatName_ = format.Name; string previousValue = field.PutAttribute(PER_FIELD_FORMAT_KEY, formatName_); - Debug.Assert(field.DocValuesGen != -1 || previousValue == null, "formatName=" + formatName_ + " prevValue=" + previousValue); + Debugging.Assert(() => field.DocValuesGen != -1 || previousValue == null, () => "formatName=" + formatName_ + " prevValue=" + previousValue); int? suffix = null; @@ -198,12 +199,12 @@ internal virtual DocValuesConsumer GetInstance(FieldInfo field) else { // we've already seen this format, so just grab its suffix - Debug.Assert(suffixes.ContainsKey(formatName_)); + Debugging.Assert(() => suffixes.ContainsKey(formatName_)); suffix = consumer.Suffix; } previousValue = field.PutAttribute(PER_FIELD_SUFFIX_KEY, Convert.ToString(suffix, CultureInfo.InvariantCulture)); - Debug.Assert(field.DocValuesGen != -1 || previousValue == null, "suffix=" + Convert.ToString(suffix, CultureInfo.InvariantCulture) + " prevValue=" + previousValue); + Debugging.Assert(() => field.DocValuesGen != -1 || previousValue == null, () => "suffix=" + Convert.ToString(suffix, CultureInfo.InvariantCulture) + " prevValue=" + previousValue); // TODO: we should only provide the "slice" of FIS // that this DVF actually sees ... @@ -264,7 +265,7 @@ public FieldsReader(PerFieldDocValuesFormat outerInstance, SegmentReadState read { // null formatName means the field is in fieldInfos, but has no docvalues! string suffix = fi.GetAttribute(PER_FIELD_SUFFIX_KEY); - Debug.Assert(suffix != null); + Debugging.Assert(() => suffix != null); DocValuesFormat format = DocValuesFormat.ForName(formatName); string segmentSuffix = GetFullSegmentSuffix(readState.SegmentSuffix, GetSuffix(formatName, suffix)); // LUCENENET: Eliminated extra lookup by using TryGetValue instead of ContainsKey @@ -305,7 +306,7 @@ internal FieldsReader(PerFieldDocValuesFormat outerInstance, FieldsReader other) { DocValuesProducer producer; oldToNew.TryGetValue(ent.Value, out producer); - Debug.Assert(producer != null); + Debugging.Assert(() => producer != null); fields[ent.Key] = producer; } } diff --git a/src/Lucene.Net/Codecs/PerField/PerFieldPostingsFormat.cs b/src/Lucene.Net/Codecs/PerField/PerFieldPostingsFormat.cs index 8c3d72d737..e5dbc96318 100644 --- a/src/Lucene.Net/Codecs/PerField/PerFieldPostingsFormat.cs +++ b/src/Lucene.Net/Codecs/PerField/PerFieldPostingsFormat.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.Diagnostics; @@ -115,7 +116,7 @@ public override TermsConsumer AddField(FieldInfo field) string formatName = format.Name; string previousValue = field.PutAttribute(PER_FIELD_FORMAT_KEY, formatName); - Debug.Assert(previousValue == null); + Debugging.Assert(() => previousValue == null); int? suffix; @@ -146,12 +147,12 @@ public override TermsConsumer AddField(FieldInfo field) else { // we've already seen this format, so just grab its suffix - Debug.Assert(suffixes.ContainsKey(formatName)); + Debugging.Assert(() => suffixes.ContainsKey(formatName)); suffix = consumer.Suffix; } previousValue = field.PutAttribute(PER_FIELD_SUFFIX_KEY, Convert.ToString(suffix, CultureInfo.InvariantCulture)); - Debug.Assert(previousValue == null); + Debugging.Assert(() => previousValue == null); // TODO: we should only provide the "slice" of FIS // that this PF actually sees ... then stuff like @@ -218,7 +219,7 @@ public FieldsReader(PerFieldPostingsFormat outerInstance, SegmentReadState readS { // null formatName means the field is in fieldInfos, but has no postings! string suffix = fi.GetAttribute(PER_FIELD_SUFFIX_KEY); - Debug.Assert(suffix != null); + Debugging.Assert(() => suffix != null); PostingsFormat format = PostingsFormat.ForName(formatName); string segmentSuffix = GetSuffix(formatName, suffix); // LUCENENET: Eliminated extra lookup by using TryGetValue instead of ContainsKey diff --git a/src/Lucene.Net/Codecs/PostingsConsumer.cs b/src/Lucene.Net/Codecs/PostingsConsumer.cs index 93588a952f..aff9f0e810 100644 --- a/src/Lucene.Net/Codecs/PostingsConsumer.cs +++ b/src/Lucene.Net/Codecs/PostingsConsumer.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using System.Diagnostics; using System.Runtime.CompilerServices; @@ -150,7 +151,7 @@ public virtual TermStats Merge(MergeState mergeState, IndexOptions indexOptions, } else { - Debug.Assert(indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); + Debugging.Assert(() => indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); var postingsEnum = (DocsAndPositionsEnum)postings; while (true) { diff --git a/src/Lucene.Net/Codecs/TermVectorsWriter.cs b/src/Lucene.Net/Codecs/TermVectorsWriter.cs index 5a636ddca8..028e99ddbe 100644 --- a/src/Lucene.Net/Codecs/TermVectorsWriter.cs +++ b/src/Lucene.Net/Codecs/TermVectorsWriter.cs @@ -1,4 +1,5 @@ using J2N.Text; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.Diagnostics; @@ -285,7 +286,7 @@ protected void AddAllDocVectors(Fields vectors, MergeState mergeState) fieldCount++; FieldInfo fieldInfo = mergeState.FieldInfos.FieldInfo(fieldName); - Debug.Assert(lastFieldName == null || fieldName.CompareToOrdinal(lastFieldName) > 0, "lastFieldName=" + lastFieldName + " fieldName=" + fieldName); + Debugging.Assert(() => lastFieldName == null || fieldName.CompareToOrdinal(lastFieldName) > 0, () => "lastFieldName=" + lastFieldName + " fieldName=" + fieldName); lastFieldName = fieldName; Terms terms = vectors.GetTerms(fieldName); @@ -298,7 +299,7 @@ protected void AddAllDocVectors(Fields vectors, MergeState mergeState) bool hasPositions = terms.HasPositions; bool hasOffsets = terms.HasOffsets; bool hasPayloads = terms.HasPayloads; - Debug.Assert(!hasPayloads || hasPositions); + Debugging.Assert(() => !hasPayloads || hasPositions); int numTerms = (int)terms.Count; if (numTerms == -1) @@ -327,11 +328,11 @@ protected void AddAllDocVectors(Fields vectors, MergeState mergeState) if (hasPositions || hasOffsets) { docsAndPositionsEnum = termsEnum.DocsAndPositions(null, docsAndPositionsEnum); - Debug.Assert(docsAndPositionsEnum != null); + Debugging.Assert(() => docsAndPositionsEnum != null); int docID = docsAndPositionsEnum.NextDoc(); - Debug.Assert(docID != DocIdSetIterator.NO_MORE_DOCS); - Debug.Assert(docsAndPositionsEnum.Freq == freq); + Debugging.Assert(() => docID != DocIdSetIterator.NO_MORE_DOCS); + Debugging.Assert(() => docsAndPositionsEnum.Freq == freq); for (int posUpto = 0; posUpto < freq; posUpto++) { @@ -341,16 +342,16 @@ protected void AddAllDocVectors(Fields vectors, MergeState mergeState) BytesRef payload = docsAndPositionsEnum.GetPayload(); - Debug.Assert(!hasPositions || pos >= 0); + Debugging.Assert(() => !hasPositions || pos >= 0); AddPosition(pos, startOffset, endOffset, payload); } } FinishTerm(); } - Debug.Assert(termCount == numTerms); + Debugging.Assert(() => termCount == numTerms); FinishField(); } - Debug.Assert(fieldCount == numFields); + Debugging.Assert(() => fieldCount == numFields); FinishDocument(); } diff --git a/src/Lucene.Net/Codecs/TermsConsumer.cs b/src/Lucene.Net/Codecs/TermsConsumer.cs index b731f01277..08f4c4a780 100644 --- a/src/Lucene.Net/Codecs/TermsConsumer.cs +++ b/src/Lucene.Net/Codecs/TermsConsumer.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using System.Collections.Generic; using System.Diagnostics; @@ -100,7 +101,7 @@ protected internal TermsConsumer() public virtual void Merge(MergeState mergeState, IndexOptions indexOptions, TermsEnum termsEnum) { BytesRef term; - Debug.Assert(termsEnum != null); + Debugging.Assert(() => termsEnum != null); long sumTotalTermFreq = 0; long sumDocFreq = 0; long sumDFsinceLastAbortCheck = 0; @@ -156,7 +157,7 @@ public virtual void Merge(MergeState mergeState, IndexOptions indexOptions, Term // We can pass null for liveDocs, because the // mapping enum will skip the non-live docs: docsAndFreqsEnumIn = (MultiDocsEnum)termsEnum.Docs(null, docsAndFreqsEnumIn); - Debug.Assert(docsAndFreqsEnumIn != null); + Debugging.Assert(() => docsAndFreqsEnumIn != null); docsAndFreqsEnum.Reset(docsAndFreqsEnumIn); PostingsConsumer postingsConsumer = StartTerm(term); TermStats stats = postingsConsumer.Merge(mergeState, indexOptions, docsAndFreqsEnum, visitedDocs); @@ -187,7 +188,7 @@ public virtual void Merge(MergeState mergeState, IndexOptions indexOptions, Term // We can pass null for liveDocs, because the // mapping enum will skip the non-live docs: postingsEnumIn = (MultiDocsAndPositionsEnum)termsEnum.DocsAndPositions(null, postingsEnumIn, DocsAndPositionsFlags.PAYLOADS); - Debug.Assert(postingsEnumIn != null); + Debugging.Assert(() => postingsEnumIn != null); postingsEnum.Reset(postingsEnumIn); PostingsConsumer postingsConsumer = StartTerm(term); @@ -208,7 +209,7 @@ public virtual void Merge(MergeState mergeState, IndexOptions indexOptions, Term } else { - Debug.Assert(indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); + Debugging.Assert(() => indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); if (postingsEnum == null) { postingsEnum = new MappingMultiDocsAndPositionsEnum(); @@ -220,7 +221,7 @@ public virtual void Merge(MergeState mergeState, IndexOptions indexOptions, Term // We can pass null for liveDocs, because the // mapping enum will skip the non-live docs: postingsEnumIn = (MultiDocsAndPositionsEnum)termsEnum.DocsAndPositions(null, postingsEnumIn); - Debug.Assert(postingsEnumIn != null); + Debugging.Assert(() => postingsEnumIn != null); postingsEnum.Reset(postingsEnumIn); PostingsConsumer postingsConsumer = StartTerm(term); diff --git a/src/Lucene.Net/Index/AtomicReader.cs b/src/Lucene.Net/Index/AtomicReader.cs index 3e4f1eb2bf..8639d42a53 100644 --- a/src/Lucene.Net/Index/AtomicReader.cs +++ b/src/Lucene.Net/Index/AtomicReader.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System.Diagnostics; using System; @@ -210,8 +211,8 @@ public Terms GetTerms(string field) // LUCENENET specific: Renamed from Terms() /// public DocsEnum GetTermDocsEnum(Term term) // LUCENENET specific: Renamed from TermDocsEnum() { - Debug.Assert(term.Field != null); - Debug.Assert(term.Bytes != null); + Debugging.Assert(() => term.Field != null); + Debugging.Assert(() => term.Bytes != null); Fields fields = Fields; if (fields != null) { @@ -235,8 +236,8 @@ public DocsEnum GetTermDocsEnum(Term term) // LUCENENET specific: Renamed from T /// public DocsAndPositionsEnum GetTermPositionsEnum(Term term) // LUCENENET specific: Renamed from TermPositionsEnum() { - Debug.Assert(term.Field != null); - Debug.Assert(term.Bytes != null); + Debugging.Assert(() => term.Field != null); + Debugging.Assert(() => term.Bytes != null); Fields fields = Fields; if (fields != null) { diff --git a/src/Lucene.Net/Index/AtomicReaderContext.cs b/src/Lucene.Net/Index/AtomicReaderContext.cs index 0cffc46f75..ab0a60c479 100644 --- a/src/Lucene.Net/Index/AtomicReaderContext.cs +++ b/src/Lucene.Net/Index/AtomicReaderContext.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.Diagnostics; @@ -62,7 +63,7 @@ public override IList Leaves { throw new NotSupportedException("this is not a top-level context."); } - Debug.Assert(leaves != null); + Debugging.Assert(() => leaves != null); return leaves; } } diff --git a/src/Lucene.Net/Index/AutomatonTermsEnum.cs b/src/Lucene.Net/Index/AutomatonTermsEnum.cs index b505e86e4f..553e0be2dd 100644 --- a/src/Lucene.Net/Index/AutomatonTermsEnum.cs +++ b/src/Lucene.Net/Index/AutomatonTermsEnum.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.Diagnostics; @@ -90,7 +91,7 @@ public AutomatonTermsEnum(TermsEnum tenum, CompiledAutomaton compiled) { this.finite = compiled.Finite; this.runAutomaton = compiled.RunAutomaton; - Debug.Assert(this.runAutomaton != null); + Debugging.Assert(() => this.runAutomaton != null); this.commonSuffixRef = compiled.CommonSuffixRef; this.allTransitions = compiled.SortedTransitions; @@ -128,7 +129,7 @@ protected override BytesRef NextSeekTerm(BytesRef term) //System.out.println("ATE.nextSeekTerm term=" + term); if (term == null) { - Debug.Assert(seekBytesRef.Length == 0); + Debugging.Assert(() => seekBytesRef.Length == 0); // return the empty term, as its valid if (runAutomaton.IsAccept(runAutomaton.InitialState)) { @@ -158,14 +159,14 @@ protected override BytesRef NextSeekTerm(BytesRef term) /// private void SetLinear(int position) { - Debug.Assert(linear == false); + Debugging.Assert(() => linear == false); int state = runAutomaton.InitialState; int maxInterval = 0xff; for (int i = 0; i < position; i++) { state = runAutomaton.Step(state, seekBytesRef.Bytes[i] & 0xff); - Debug.Assert(state >= 0, "state=" + state); + Debugging.Assert(() => state >= 0, () => "state=" + state); } for (int i = 0; i < allTransitions[state].Length; i++) { diff --git a/src/Lucene.Net/Index/BitsSlice.cs b/src/Lucene.Net/Index/BitsSlice.cs index 06bbfea7d4..965b23432e 100644 --- a/src/Lucene.Net/Index/BitsSlice.cs +++ b/src/Lucene.Net/Index/BitsSlice.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System; using System.Diagnostics; @@ -39,7 +40,7 @@ public BitsSlice(IBits parent, ReaderSlice slice) this.parent = parent; this.start = slice.Start; this.length = slice.Length; - Debug.Assert(length >= 0, "length=" + length); + Debugging.Assert(() => length >= 0, () => "length=" + length); } public bool Get(int doc) @@ -48,7 +49,7 @@ public bool Get(int doc) { throw new Exception("doc " + doc + " is out of bounds 0 .. " + (length - 1)); } - Debug.Assert(doc < length, "doc=" + doc + " length=" + length); + Debugging.Assert(() => doc < length, () => "doc=" + doc + " length=" + length); return parent.Get(doc + start); } diff --git a/src/Lucene.Net/Index/BufferedUpdatesStream.cs b/src/Lucene.Net/Index/BufferedUpdatesStream.cs index 4c634ec689..f3eedccbb1 100644 --- a/src/Lucene.Net/Index/BufferedUpdatesStream.cs +++ b/src/Lucene.Net/Index/BufferedUpdatesStream.cs @@ -1,5 +1,6 @@ using J2N.Text; using J2N.Threading.Atomic; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; @@ -87,10 +88,10 @@ public virtual long Push(FrozenBufferedUpdates packet) * since deletes are applied to the wrong segments. */ packet.DelGen = nextGen++; - Debug.Assert(packet.Any()); - Debug.Assert(CheckDeleteStats()); - Debug.Assert(packet.DelGen < nextGen); - Debug.Assert(updates.Count == 0 || updates[updates.Count - 1].DelGen < packet.DelGen, "Delete packets must be in order"); + Debugging.Assert(packet.Any); + Debugging.Assert(CheckDeleteStats); + Debugging.Assert(() => packet.DelGen < nextGen); + Debugging.Assert(() => updates.Count == 0 || updates[updates.Count - 1].DelGen < packet.DelGen, () => "Delete packets must be in order"); updates.Add(packet); numTerms.AddAndGet(packet.numTermDeletes); bytesUsed.AddAndGet(packet.bytesUsed); @@ -98,7 +99,7 @@ public virtual long Push(FrozenBufferedUpdates packet) { infoStream.Message("BD", "push deletes " + packet + " delGen=" + packet.DelGen + " packetCount=" + updates.Count + " totBytesUsed=" + bytesUsed); } - Debug.Assert(CheckDeleteStats()); + Debugging.Assert(CheckDeleteStats); return packet.DelGen; } } @@ -174,7 +175,7 @@ public virtual ApplyDeletesResult ApplyDeletesAndUpdates(IndexWriter.ReaderPool return new ApplyDeletesResult(false, nextGen++, null); } - Debug.Assert(CheckDeleteStats()); + Debugging.Assert(CheckDeleteStats); if (!Any()) { @@ -235,11 +236,11 @@ public virtual ApplyDeletesResult ApplyDeletesAndUpdates(IndexWriter.ReaderPool } else if (packet != null && segGen == packet.DelGen) { - Debug.Assert(packet.isSegmentPrivate, "Packet and Segments deletegen can only match on a segment private del packet gen=" + segGen); + Debugging.Assert(() => packet.isSegmentPrivate, () => "Packet and Segments deletegen can only match on a segment private del packet gen=" + segGen); //System.out.println(" eq"); // Lock order: IW -> BD -> RP - Debug.Assert(readerPool.InfoIsLive(info)); + Debugging.Assert(() => readerPool.InfoIsLive(info)); ReadersAndUpdates rld = readerPool.Get(info, true); SegmentReader reader = rld.GetReader(IOContext.READ); int delCount = 0; @@ -266,7 +267,7 @@ public virtual ApplyDeletesResult ApplyDeletesAndUpdates(IndexWriter.ReaderPool rld.WriteFieldUpdates(info.Info.Dir, dvUpdates); } int fullDelCount = rld.Info.DelCount + rld.PendingDeleteCount; - Debug.Assert(fullDelCount <= rld.Info.Info.DocCount); + Debugging.Assert(() => fullDelCount <= rld.Info.Info.DocCount); segAllDeletes = fullDelCount == rld.Info.Info.DocCount; } finally @@ -311,7 +312,7 @@ public virtual ApplyDeletesResult ApplyDeletesAndUpdates(IndexWriter.ReaderPool if (coalescedUpdates != null) { // Lock order: IW -> BD -> RP - Debug.Assert(readerPool.InfoIsLive(info)); + Debugging.Assert(() => readerPool.InfoIsLive(info)); ReadersAndUpdates rld = readerPool.Get(info, true); SegmentReader reader = rld.GetReader(IOContext.READ); int delCount = 0; @@ -328,7 +329,7 @@ public virtual ApplyDeletesResult ApplyDeletesAndUpdates(IndexWriter.ReaderPool rld.WriteFieldUpdates(info.Info.Dir, dvUpdates); } int fullDelCount = rld.Info.DelCount + rld.PendingDeleteCount; - Debug.Assert(fullDelCount <= rld.Info.Info.DocCount); + Debugging.Assert(() => fullDelCount <= rld.Info.Info.DocCount); segAllDeletes = fullDelCount == rld.Info.Info.DocCount; } finally @@ -358,7 +359,7 @@ public virtual ApplyDeletesResult ApplyDeletesAndUpdates(IndexWriter.ReaderPool } } - Debug.Assert(CheckDeleteStats()); + Debugging.Assert(CheckDeleteStats); if (infoStream.IsEnabled("BD")) { infoStream.Message("BD", "applyDeletes took " + (Environment.TickCount - t0) + " msec"); @@ -388,7 +389,7 @@ public virtual void Prune(SegmentInfos segmentInfos) { lock (this) { - Debug.Assert(CheckDeleteStats()); + Debugging.Assert(CheckDeleteStats); long minGen = long.MaxValue; foreach (SegmentCommitInfo info in segmentInfos.Segments) { @@ -405,15 +406,15 @@ public virtual void Prune(SegmentInfos segmentInfos) if (updates[delIDX].DelGen >= minGen) { Prune(delIDX); - Debug.Assert(CheckDeleteStats()); + Debugging.Assert(CheckDeleteStats); return; } } // All deletes pruned Prune(limit); - Debug.Assert(!Any()); - Debug.Assert(CheckDeleteStats()); + Debugging.Assert(() => !Any()); + Debugging.Assert(CheckDeleteStats); } } @@ -431,9 +432,9 @@ private void Prune(int count) { FrozenBufferedUpdates packet = updates[delIDX]; numTerms.AddAndGet(-packet.numTermDeletes); - Debug.Assert(numTerms >= 0); + Debugging.Assert(() => numTerms >= 0); bytesUsed.AddAndGet(-packet.bytesUsed); - Debug.Assert(bytesUsed >= 0); + Debugging.Assert(() => bytesUsed >= 0); } updates.SubList(0, count).Clear(); } @@ -458,7 +459,7 @@ private long ApplyTermDeletes(IEnumerable termsIter, ReadersAndUpdates rld string currentField = null; DocsEnum docs = null; - Debug.Assert(CheckDeleteTerm(null)); + Debugging.Assert(() => CheckDeleteTerm(null)); bool any = false; @@ -470,7 +471,7 @@ private long ApplyTermDeletes(IEnumerable termsIter, ReadersAndUpdates rld // forwards if (!string.Equals(term.Field, currentField, StringComparison.Ordinal)) { - Debug.Assert(currentField == null || currentField.CompareToOrdinal(term.Field) < 0); + Debugging.Assert(() => currentField == null || currentField.CompareToOrdinal(term.Field) < 0); currentField = term.Field; Terms terms = fields.GetTerms(currentField); if (terms != null) @@ -487,7 +488,7 @@ private long ApplyTermDeletes(IEnumerable termsIter, ReadersAndUpdates rld { continue; } - Debug.Assert(CheckDeleteTerm(term)); + Debugging.Assert(() => CheckDeleteTerm(term)); // System.out.println(" term=" + term); @@ -681,7 +682,7 @@ private bool CheckDeleteTerm(Term term) { if (term != null) { - Debug.Assert(lastDeleteTerm == null || term.CompareTo(lastDeleteTerm) > 0, "lastTerm=" + lastDeleteTerm + " vs term=" + term); + Debugging.Assert(() => lastDeleteTerm == null || term.CompareTo(lastDeleteTerm) > 0, () => "lastTerm=" + lastDeleteTerm + " vs term=" + term); } // TODO: we re-use term now in our merged iterable, but we shouldn't clone, instead copy for this assert lastDeleteTerm = term == null ? null : new Term(term.Field, BytesRef.DeepCopyOf(term.Bytes)); @@ -698,8 +699,8 @@ private bool CheckDeleteStats() numTerms2 += packet.numTermDeletes; bytesUsed2 += packet.bytesUsed; } - Debug.Assert(numTerms2 == numTerms, "numTerms2=" + numTerms2 + " vs " + numTerms); - Debug.Assert(bytesUsed2 == bytesUsed, "bytesUsed2=" + bytesUsed2 + " vs " + bytesUsed); + Debugging.Assert(() => numTerms2 == numTerms, () => "numTerms2=" + numTerms2 + " vs " + numTerms); + Debugging.Assert(() => bytesUsed2 == bytesUsed, () => "bytesUsed2=" + bytesUsed2 + " vs " + bytesUsed); return true; } } diff --git a/src/Lucene.Net/Index/ByteSliceReader.cs b/src/Lucene.Net/Index/ByteSliceReader.cs index 47061ec91b..5380c5d7f3 100644 --- a/src/Lucene.Net/Index/ByteSliceReader.cs +++ b/src/Lucene.Net/Index/ByteSliceReader.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System; using System.Diagnostics; @@ -47,9 +48,9 @@ internal ByteSliceReader() { } // LUCENENET specific - made constructor internal public void Init(ByteBlockPool pool, int startIndex, int endIndex) { - Debug.Assert(endIndex - startIndex >= 0); - Debug.Assert(startIndex >= 0); - Debug.Assert(endIndex >= 0); + Debugging.Assert(() => endIndex - startIndex >= 0); + Debugging.Assert(() => startIndex >= 0); + Debugging.Assert(() => endIndex >= 0); this.pool = pool; this.EndIndex = endIndex; @@ -75,14 +76,14 @@ public void Init(ByteBlockPool pool, int startIndex, int endIndex) public bool Eof() { - Debug.Assert(upto + BufferOffset <= EndIndex); + Debugging.Assert(() => upto + BufferOffset <= EndIndex); return upto + BufferOffset == EndIndex; } public override byte ReadByte() { - Debug.Assert(!Eof()); - Debug.Assert(upto <= limit); + Debugging.Assert(() => !Eof()); + Debugging.Assert(() => upto <= limit); if (upto == limit) { NextSlice(); @@ -97,7 +98,7 @@ public long WriteTo(DataOutput @out) { if (limit + BufferOffset == EndIndex) { - Debug.Assert(EndIndex - BufferOffset >= upto); + Debugging.Assert(() => EndIndex - BufferOffset >= upto); @out.WriteBytes(buffer, upto, limit - upto); size += limit - upto; break; @@ -130,7 +131,7 @@ public void NextSlice() if (nextIndex + newSize >= EndIndex) { // We are advancing to the final slice - Debug.Assert(EndIndex - nextIndex > 0); + Debugging.Assert(() => EndIndex - nextIndex > 0); limit = EndIndex - BufferOffset; } else diff --git a/src/Lucene.Net/Index/ByteSliceWriter.cs b/src/Lucene.Net/Index/ByteSliceWriter.cs index b4dca87bb8..e51789e669 100644 --- a/src/Lucene.Net/Index/ByteSliceWriter.cs +++ b/src/Lucene.Net/Index/ByteSliceWriter.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System.Diagnostics; namespace Lucene.Net.Index @@ -46,26 +47,26 @@ public ByteSliceWriter(ByteBlockPool pool) public void Init(int address) { slice = pool.Buffers[address >> ByteBlockPool.BYTE_BLOCK_SHIFT]; - Debug.Assert(slice != null); + Debugging.Assert(() => slice != null); upto = address & ByteBlockPool.BYTE_BLOCK_MASK; offset0 = address; - Debug.Assert(upto < slice.Length); + Debugging.Assert(() => upto < slice.Length); } /// /// Write byte into byte slice stream public override void WriteByte(byte b) { - Debug.Assert(slice != null); + Debugging.Assert(() => slice != null); if (slice[upto] != 0) { upto = pool.AllocSlice(slice, upto); slice = pool.Buffer; offset0 = pool.ByteOffset; - Debug.Assert(slice != null); + Debugging.Assert(() => slice != null); } slice[upto++] = (byte)b; - Debug.Assert(upto != slice.Length); + Debugging.Assert(() => upto != slice.Length); } public override void WriteBytes(byte[] b, int offset, int len) @@ -82,7 +83,7 @@ public override void WriteBytes(byte[] b, int offset, int len) } slice[upto++] = (byte)b[offset++]; - Debug.Assert(upto != slice.Length); + Debugging.Assert(() => upto != slice.Length); } } diff --git a/src/Lucene.Net/Index/CheckIndex.cs b/src/Lucene.Net/Index/CheckIndex.cs index 11581c5f7e..2d5858162c 100644 --- a/src/Lucene.Net/Index/CheckIndex.cs +++ b/src/Lucene.Net/Index/CheckIndex.cs @@ -1,4 +1,5 @@ using J2N.Text; +using Lucene.Net.Diagnostics; using Lucene.Net.Store; using Lucene.Net.Support; using Lucene.Net.Support.IO; @@ -948,7 +949,7 @@ public static Status.FieldNormStatus TestFieldNorms(AtomicReader reader, TextWri if (info.HasNorms) { #pragma warning disable 612, 618 - Debug.Assert(reader.HasNorms(info.Name)); // deprecated path + Debugging.Assert(() => reader.HasNorms(info.Name)); // deprecated path #pragma warning restore 612, 618 CheckNorms(info, reader, infoStream); ++status.TotFields; @@ -956,7 +957,7 @@ public static Status.FieldNormStatus TestFieldNorms(AtomicReader reader, TextWri else { #pragma warning disable 612, 618 - Debug.Assert(!reader.HasNorms(info.Name)); // deprecated path + Debugging.Assert(() => !reader.HasNorms(info.Name)); // deprecated path #pragma warning restore 612, 618 if (reader.GetNormValues(info.Name) != null) { @@ -1101,7 +1102,7 @@ private static Status.TermIndexStatus CheckFields(Fields fields, IBits liveDocs, break; } - Debug.Assert(term.IsValid()); + Debugging.Assert(term.IsValid); // make sure terms arrive in order according to // the comp @@ -1236,7 +1237,7 @@ private static Status.TermIndexStatus CheckFields(Fields fields, IBits liveDocs, BytesRef payload = postings.GetPayload(); if (payload != null) { - Debug.Assert(payload.IsValid()); + Debugging.Assert(payload.IsValid); } if (payload != null && payload.Length < 1) { @@ -1454,7 +1455,7 @@ private static Status.TermIndexStatus CheckFields(Fields fields, IBits liveDocs, if (fieldTerms is BlockTreeTermsReader.FieldReader) { BlockTreeTermsReader.Stats stats = ((BlockTreeTermsReader.FieldReader)fieldTerms).ComputeStats(); - Debug.Assert(stats != null); + Debugging.Assert(() => stats != null); if (status.BlockTreeStats == null) { status.BlockTreeStats = new Dictionary(); @@ -1809,7 +1810,7 @@ private static void CheckBinaryDocValues(string fieldName, AtomicReader reader, for (int i = 0; i < reader.MaxDoc; i++) { dv.Get(i, scratch); - Debug.Assert(scratch.IsValid()); + Debugging.Assert(scratch.IsValid); if (docsWithField.Get(i) == false && scratch.Length > 0) { throw new Exception("dv for field: " + fieldName + " is missing but has value=" + scratch + " for doc: " + i); @@ -1860,7 +1861,7 @@ private static void CheckSortedDocValues(string fieldName, AtomicReader reader, for (int i = 0; i <= maxOrd; i++) { dv.LookupOrd(i, scratch); - Debug.Assert(scratch.IsValid()); + Debugging.Assert(scratch.IsValid); if (lastValue != null) { if (scratch.CompareTo(lastValue) <= 0) @@ -1952,7 +1953,7 @@ private static void CheckSortedSetDocValues(string fieldName, AtomicReader reade for (long i = 0; i <= maxOrd; i++) { dv.LookupOrd(i, scratch); - Debug.Assert(scratch.IsValid()); + Debugging.Assert(scratch.IsValid); if (lastValue != null) { if (scratch.CompareTo(lastValue) <= 0) @@ -2155,25 +2156,25 @@ public static Status.TermVectorStatus TestTermVectors(AtomicReader reader, TextW if (hasProx) { postings = termsEnum.DocsAndPositions(null, postings); - Debug.Assert(postings != null); + Debugging.Assert(() => postings != null); docs = null; } else { docs = termsEnum.Docs(null, docs); - Debug.Assert(docs != null); + Debugging.Assert(() => docs != null); postings = null; } DocsEnum docs2; if (hasProx) { - Debug.Assert(postings != null); + Debugging.Assert(() => postings != null); docs2 = postings; } else { - Debug.Assert(docs != null); + Debugging.Assert(() => docs != null); docs2 = docs; } @@ -2271,12 +2272,12 @@ public static Status.TermVectorStatus TestTermVectors(AtomicReader reader, TextW if (payload != null) { - Debug.Assert(vectorsHasPayload); + Debugging.Assert(() => vectorsHasPayload); } if (postingsHasPayload && vectorsHasPayload) { - Debug.Assert(postingsPostings != null); + Debugging.Assert(() => postingsPostings != null); if (payload == null) { @@ -2365,7 +2366,7 @@ private static bool TestAsserts() private static bool AssertsOn() { - Debug.Assert(TestAsserts()); + Debugging.Assert(TestAsserts); return assertsOn; } diff --git a/src/Lucene.Net/Index/CompositeReader.cs b/src/Lucene.Net/Index/CompositeReader.cs index 313e981156..430efbab54 100644 --- a/src/Lucene.Net/Index/CompositeReader.cs +++ b/src/Lucene.Net/Index/CompositeReader.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.Diagnostics; @@ -82,7 +83,7 @@ public override string ToString() } buffer.Append('('); var subReaders = GetSequentialSubReaders(); - Debug.Assert(subReaders != null); + Debugging.Assert(() => subReaders != null); if (subReaders.Count > 0) { buffer.Append(subReaders[0]); @@ -114,7 +115,7 @@ public override sealed IndexReaderContext Context // lazy init without thread safety for perf reasons: Building the readerContext twice does not hurt! if (readerContext == null) { - Debug.Assert(GetSequentialSubReaders() != null); + Debugging.Assert(() => GetSequentialSubReaders() != null); readerContext = CompositeReaderContext.Create(this); } return readerContext; diff --git a/src/Lucene.Net/Index/CompositeReaderContext.cs b/src/Lucene.Net/Index/CompositeReaderContext.cs index 1e19eada6f..e4188dcd95 100644 --- a/src/Lucene.Net/Index/CompositeReaderContext.cs +++ b/src/Lucene.Net/Index/CompositeReaderContext.cs @@ -1,4 +1,5 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.Diagnostics; @@ -69,7 +70,7 @@ public override IList Leaves { throw new NotSupportedException("this is not a top-level context."); } - Debug.Assert(leaves != null); + Debugging.Assert(() => leaves != null); return leaves; } } @@ -125,7 +126,7 @@ internal IndexReaderContext Build(CompositeReaderContext parent, IndexReader rea children[i] = Build(newParent, r, i, newDocBase); newDocBase += r.MaxDoc; } - Debug.Assert(newDocBase == cr.MaxDoc); + Debugging.Assert(() => newDocBase == cr.MaxDoc); return newParent; } } diff --git a/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs b/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs index 6be45effa6..f394b4f5ae 100644 --- a/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs +++ b/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs @@ -394,7 +394,7 @@ public override void Merge(IndexWriter writer, MergeTrigger trigger, bool newMer { lock (this) { - //Debug.Assert(!Thread.holdsLock(writer)); + //Debugging.Assert(!Thread.holdsLock(writer)); this.m_writer = writer; diff --git a/src/Lucene.Net/Index/DirectoryReader.cs b/src/Lucene.Net/Index/DirectoryReader.cs index dd7f211890..f34c2e5d97 100644 --- a/src/Lucene.Net/Index/DirectoryReader.cs +++ b/src/Lucene.Net/Index/DirectoryReader.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.Diagnostics; @@ -171,7 +172,7 @@ public abstract class DirectoryReader : BaseCompositeReader public static DirectoryReader OpenIfChanged(DirectoryReader oldReader) { DirectoryReader newReader = oldReader.DoOpenIfChanged(); - Debug.Assert(newReader != oldReader); + Debugging.Assert(() => newReader != oldReader); return newReader; } @@ -184,7 +185,7 @@ public static DirectoryReader OpenIfChanged(DirectoryReader oldReader) public static DirectoryReader OpenIfChanged(DirectoryReader oldReader, IndexCommit commit) { DirectoryReader newReader = oldReader.DoOpenIfChanged(commit); - Debug.Assert(newReader != oldReader); + Debugging.Assert(() => newReader != oldReader); return newReader; } @@ -251,7 +252,7 @@ public static DirectoryReader OpenIfChanged(DirectoryReader oldReader, IndexComm public static DirectoryReader OpenIfChanged(DirectoryReader oldReader, IndexWriter writer, bool applyAllDeletes) { DirectoryReader newReader = oldReader.DoOpenIfChanged(writer, applyAllDeletes); - Debug.Assert(newReader != oldReader); + Debugging.Assert(() => newReader != oldReader); return newReader; } diff --git a/src/Lucene.Net/Index/DocFieldProcessor.cs b/src/Lucene.Net/Index/DocFieldProcessor.cs index 898776d692..2cdce5ed63 100644 --- a/src/Lucene.Net/Index/DocFieldProcessor.cs +++ b/src/Lucene.Net/Index/DocFieldProcessor.cs @@ -1,4 +1,5 @@ using J2N.Text; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.Diagnostics; @@ -79,7 +80,7 @@ public override void Flush(SegmentWriteState state) childFields[f.FieldInfo.Name] = f; } - Debug.Assert(fields.Count == totalFieldCount); + Debugging.Assert(() => fields.Count == totalFieldCount); storedConsumer.Flush(state); consumer.Flush(childFields, state); @@ -166,14 +167,14 @@ public ICollection Fields() field = field.next; } } - Debug.Assert(fields.Count == totalFieldCount); + Debugging.Assert(() => fields.Count == totalFieldCount); return fields; } private void Rehash() { int newHashSize = (fieldHash.Length * 2); - Debug.Assert(newHashSize > fieldHash.Length); + Debugging.Assert(() => newHashSize > fieldHash.Length); DocFieldProcessorPerField[] newHashArray = new DocFieldProcessorPerField[newHashSize]; @@ -246,7 +247,7 @@ public override void ProcessDocument(FieldInfos.Builder fieldInfos) // need to addOrUpdate so that FieldInfos can update globalFieldNumbers // with the correct DocValue type (LUCENE-5192) FieldInfo fi = fieldInfos.AddOrUpdate(fieldName, field.IndexableFieldType); - Debug.Assert(fi == fp.fieldInfo, "should only have updated an existing FieldInfo instance"); + Debugging.Assert(() => fi == fp.fieldInfo, () => "should only have updated an existing FieldInfo instance"); } if (thisFieldGen != fp.lastGen) diff --git a/src/Lucene.Net/Index/DocTermOrds.cs b/src/Lucene.Net/Index/DocTermOrds.cs index 638a68ce06..eaa41ca4f5 100644 --- a/src/Lucene.Net/Index/DocTermOrds.cs +++ b/src/Lucene.Net/Index/DocTermOrds.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.Diagnostics; @@ -752,7 +753,7 @@ public OrdWrappedTermsEnum(DocTermOrds outerInstance, AtomicReader reader) this.outerInstance = outerInstance; InitializeInstanceFields(); - Debug.Assert(outerInstance.m_indexedTermsArray != null); + Debugging.Assert(() => outerInstance.m_indexedTermsArray != null); termsEnum = reader.Fields.GetTerms(outerInstance.m_field).GetIterator(null); } @@ -804,10 +805,10 @@ public override SeekStatus SeekCeil(BytesRef target) { // we hit the term exactly... lucky us! TermsEnum.SeekStatus seekStatus = termsEnum.SeekCeil(target); - Debug.Assert(seekStatus == TermsEnum.SeekStatus.FOUND); + Debugging.Assert(() => seekStatus == TermsEnum.SeekStatus.FOUND); ord = startIdx << outerInstance.indexIntervalBits; SetTerm(); - Debug.Assert(term != null); + Debugging.Assert(() => term != null); return SeekStatus.FOUND; } @@ -818,10 +819,10 @@ public override SeekStatus SeekCeil(BytesRef target) { // our target occurs *before* the first term TermsEnum.SeekStatus seekStatus = termsEnum.SeekCeil(target); - Debug.Assert(seekStatus == TermsEnum.SeekStatus.NOT_FOUND); + Debugging.Assert(() => seekStatus == TermsEnum.SeekStatus.NOT_FOUND); ord = 0; SetTerm(); - Debug.Assert(term != null); + Debugging.Assert(() => term != null); return SeekStatus.NOT_FOUND; } @@ -837,10 +838,10 @@ public override SeekStatus SeekCeil(BytesRef target) { // seek to the right block TermsEnum.SeekStatus seekStatus = termsEnum.SeekCeil(outerInstance.m_indexedTermsArray[startIdx]); - Debug.Assert(seekStatus == TermsEnum.SeekStatus.FOUND); + Debugging.Assert(() => seekStatus == TermsEnum.SeekStatus.FOUND); ord = startIdx << outerInstance.indexIntervalBits; SetTerm(); - Debug.Assert(term != null); // should be non-null since it's in the index + Debugging.Assert(() => term != null); // should be non-null since it's in the index } while (term != null && term.CompareTo(target) < 0) @@ -874,7 +875,7 @@ public override void SeekExact(long targetOrd) ord = idx << outerInstance.indexIntervalBits; delta = (int)(targetOrd - ord); TermsEnum.SeekStatus seekStatus = termsEnum.SeekCeil(@base); - Debug.Assert(seekStatus == TermsEnum.SeekStatus.FOUND); + Debugging.Assert(() => seekStatus == TermsEnum.SeekStatus.FOUND); } else { @@ -886,14 +887,14 @@ public override void SeekExact(long targetOrd) BytesRef br = termsEnum.Next(); if (br == null) { - Debug.Assert(false); + Debugging.Assert(() => false); return; } ord++; } SetTerm(); - Debug.Assert(term != null); + Debugging.Assert(() => term != null); } private BytesRef SetTerm() diff --git a/src/Lucene.Net/Index/DocValuesFieldUpdates.cs b/src/Lucene.Net/Index/DocValuesFieldUpdates.cs index 9134fe2dbd..b2eaf0a5d7 100644 --- a/src/Lucene.Net/Index/DocValuesFieldUpdates.cs +++ b/src/Lucene.Net/Index/DocValuesFieldUpdates.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.Diagnostics; @@ -113,14 +114,14 @@ internal virtual DocValuesFieldUpdates NewUpdates(string field, DocValuesFieldUp { case DocValuesFieldUpdatesType.NUMERIC: NumericDocValuesFieldUpdates numericUpdates; - Debug.Assert(!numericDVUpdates.TryGetValue(field, out numericUpdates)); + Debugging.Assert(() => !numericDVUpdates.TryGetValue(field, out numericUpdates)); numericUpdates = new NumericDocValuesFieldUpdates(field, maxDoc); numericDVUpdates[field] = numericUpdates; return numericUpdates; case DocValuesFieldUpdatesType.BINARY: BinaryDocValuesFieldUpdates binaryUpdates; - Debug.Assert(!binaryDVUpdates.TryGetValue(field, out binaryUpdates)); + Debugging.Assert(() => !binaryDVUpdates.TryGetValue(field, out binaryUpdates)); binaryUpdates = new BinaryDocValuesFieldUpdates(field, maxDoc); binaryDVUpdates[field] = binaryUpdates; return binaryUpdates; diff --git a/src/Lucene.Net/Index/DocValuesProcessor.cs b/src/Lucene.Net/Index/DocValuesProcessor.cs index 03a8c282e5..56c6eabbd7 100644 --- a/src/Lucene.Net/Index/DocValuesProcessor.cs +++ b/src/Lucene.Net/Index/DocValuesProcessor.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Documents.Extensions; using System; @@ -81,7 +82,7 @@ public override void AddField(int docID, IIndexableField field, FieldInfo fieldI } else { - Debug.Assert(false, "unrecognized DocValues.Type: " + dvType); + Debugging.Assert(() => false, () => "unrecognized DocValues.Type: " + dvType); } } } @@ -218,7 +219,7 @@ private string GetTypeDesc(DocValuesWriter obj) } else { - Debug.Assert(obj is SortedDocValuesWriter); + Debugging.Assert(() => obj is SortedDocValuesWriter); return "sorted"; } } diff --git a/src/Lucene.Net/Index/DocumentsWriter.cs b/src/Lucene.Net/Index/DocumentsWriter.cs index 9975f4cf77..72649029da 100644 --- a/src/Lucene.Net/Index/DocumentsWriter.cs +++ b/src/Lucene.Net/Index/DocumentsWriter.cs @@ -1,4 +1,5 @@ using J2N.Threading.Atomic; +using Lucene.Net.Diagnostics; using System; using System.Collections.Concurrent; using System.Collections.Generic; @@ -245,7 +246,7 @@ internal void Abort(IndexWriter writer) { lock (this) { - //Debug.Assert(!Thread.HoldsLock(writer), "IndexWriter lock should never be hold when aborting"); + //Debugging.Assert(!Thread.HoldsLock(writer), "IndexWriter lock should never be hold when aborting"); bool success = false; JCG.HashSet newFilesSet = new JCG.HashSet(); try @@ -288,7 +289,7 @@ internal void LockAndAbortAll(IndexWriter indexWriter) { lock (this) { - //Debug.Assert(indexWriter.HoldsFullFlushLock()); + //Debugging.Assert(indexWriter.HoldsFullFlushLock()); if (infoStream.IsEnabled("DW")) { infoStream.Message("DW", "lockAndAbortAll"); @@ -328,7 +329,7 @@ internal void LockAndAbortAll(IndexWriter indexWriter) private void AbortThreadState(ThreadState perThread, ISet newFiles) { - //Debug.Assert(perThread.HeldByCurrentThread); + //Debugging.Assert(perThread.HeldByCurrentThread); if (perThread.IsActive) // we might be closed { if (perThread.IsInitialized) @@ -351,7 +352,7 @@ private void AbortThreadState(ThreadState perThread, ISet newFiles) } else { - Debug.Assert(closed); + Debugging.Assert(() => closed); } } @@ -359,7 +360,7 @@ internal void UnlockAllAfterAbortAll(IndexWriter indexWriter) { lock (this) { - //Debug.Assert(indexWriter.HoldsFullFlushLock()); + //Debugging.Assert(indexWriter.HoldsFullFlushLock()); if (infoStream.IsEnabled("DW")) { infoStream.Message("DW", "unlockAll"); @@ -499,10 +500,10 @@ internal bool UpdateDocuments(IEnumerable> docs, An if (!perThread.IsActive) { EnsureOpen(); - Debug.Assert(false, "perThread is not active but we are still open"); + Debugging.Assert(() => false, () => "perThread is not active but we are still open"); } EnsureInitialized(perThread); - Debug.Assert(perThread.IsInitialized); + Debugging.Assert(() => perThread.IsInitialized); DocumentsWriterPerThread dwpt = perThread.dwpt; int dwptNumDocs = dwpt.NumDocsInRAM; try @@ -545,10 +546,10 @@ internal bool UpdateDocument(IEnumerable doc, Analyzer analyzer if (!perThread.IsActive) { EnsureOpen(); - Debug.Assert(false, "perThread is not active but we are still open"); + Debugging.Assert(() => false, () => "perThread is not active but we are still open"); } EnsureInitialized(perThread); - Debug.Assert(perThread.IsInitialized); + Debugging.Assert(() => perThread.IsInitialized); DocumentsWriterPerThread dwpt = perThread.dwpt; int dwptNumDocs = dwpt.NumDocsInRAM; try @@ -589,7 +590,7 @@ private bool DoFlush(DocumentsWriterPerThread flushingDWPT) SegmentFlushTicket ticket = null; try { - Debug.Assert(currentFullFlushDelQueue == null || flushingDWPT.deleteQueue == currentFullFlushDelQueue, "expected: " + currentFullFlushDelQueue + "but was: " + flushingDWPT.deleteQueue + " " + flushControl.IsFullFlush); + Debugging.Assert(() => currentFullFlushDelQueue == null || flushingDWPT.deleteQueue == currentFullFlushDelQueue, () => "expected: " + currentFullFlushDelQueue + "but was: " + flushingDWPT.deleteQueue + " " + flushControl.IsFullFlush); /* * Since with DWPT the flush process is concurrent and several DWPT * could flush at the same time we must maintain the order of the @@ -736,10 +737,10 @@ internal bool FlushAllThreads(IndexWriter indexWriter) * otherwise a new DWPT could sneak into the loop with an already flushing * delete queue */ flushControl.MarkForFullFlush(); // swaps the delQueue synced on FlushControl - Debug.Assert(SetFlushingDeleteQueue(flushingDeleteQueue)); + Debugging.Assert(() => SetFlushingDeleteQueue(flushingDeleteQueue)); } - Debug.Assert(currentFullFlushDelQueue != null); - Debug.Assert(currentFullFlushDelQueue != deleteQueue); + Debugging.Assert(() => currentFullFlushDelQueue != null); + Debugging.Assert(() => currentFullFlushDelQueue != deleteQueue); bool anythingFlushed = false; try @@ -761,11 +762,11 @@ internal bool FlushAllThreads(IndexWriter indexWriter) ticketQueue.AddDeletes(flushingDeleteQueue); } ticketQueue.ForcePurge(indexWriter); - Debug.Assert(!flushingDeleteQueue.AnyChanges() && !ticketQueue.HasTickets); + Debugging.Assert(() => !flushingDeleteQueue.AnyChanges() && !ticketQueue.HasTickets); } finally { - Debug.Assert(flushingDeleteQueue == currentFullFlushDelQueue); + Debugging.Assert(() => flushingDeleteQueue == currentFullFlushDelQueue); } return anythingFlushed; } @@ -778,7 +779,7 @@ internal void FinishFullFlush(bool success) { infoStream.Message("DW", Thread.CurrentThread.Name + " finishFullFlush success=" + success); } - Debug.Assert(SetFlushingDeleteQueue(null)); + Debugging.Assert(() => SetFlushingDeleteQueue(null)); if (success) { // Release the flush lock @@ -811,7 +812,7 @@ internal sealed class ApplyDeletesEvent : IEvent internal ApplyDeletesEvent() { - Debug.Assert(instCount == 0); + Debugging.Assert(() => instCount == 0); instCount++; } @@ -828,7 +829,7 @@ internal sealed class MergePendingEvent : IEvent internal MergePendingEvent() { - Debug.Assert(instCount == 0); + Debugging.Assert(() => instCount == 0); instCount++; } @@ -845,7 +846,7 @@ internal sealed class ForcedPurgeEvent : IEvent internal ForcedPurgeEvent() { - Debug.Assert(instCount == 0); + Debugging.Assert(() => instCount == 0); instCount++; } diff --git a/src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs b/src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs index 02de3dfc31..0eee1e087e 100644 --- a/src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs +++ b/src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using Lucene.Net.Support.Threading; using System; @@ -144,7 +145,7 @@ internal void Add(Term term, DeleteSlice slice) * competing updates wins! */ slice.sliceTail = termNode; - Debug.Assert(slice.sliceHead != slice.sliceTail, "slice head and tail must differ after add"); + Debugging.Assert(() => slice.sliceHead != slice.sliceTail, () => "slice head and tail must differ after add"); TryApplyGlobalSlice(); // TODO doing this each time is not necessary maybe // we can do it just every n times or so? } @@ -293,7 +294,7 @@ internal class DeleteSlice internal DeleteSlice(Node currentTail) { - Debug.Assert(currentTail != null); + Debugging.Assert(() => currentTail != null); /* * Initially this is a 0 length slice pointing to the 'current' tail of * the queue. Once we update the slice we only need to assign the tail and @@ -319,7 +320,7 @@ internal virtual void Apply(BufferedUpdates del, int docIDUpto) do { current = current.next; - Debug.Assert(current != null, "slice property violated between the head on the tail must not be a null node"); + Debugging.Assert(() => current != null, () => "slice property violated between the head on the tail must not be a null node"); current.Apply(del, docIDUpto); // System.out.println(Thread.currentThread().getName() + ": pull " + current + " docIDUpto=" + docIDUpto); } while (current != sliceTail); diff --git a/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs b/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs index d4b992472b..527413ed76 100644 --- a/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs +++ b/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs @@ -1,5 +1,6 @@ using J2N.Runtime.CompilerServices; using J2N.Threading.Atomic; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.Diagnostics; @@ -152,7 +153,7 @@ private bool AssertMemory() * fail. To prevent this we only assert if the the largest document seen * is smaller than the 1/2 of the maxRamBufferMB */ - Debug.Assert(ram <= expected, "actual mem: " + ram + " byte, expected mem: " + expected + " byte, flush mem: " + flushBytes + ", active mem: " + activeBytes + ", pending DWPT: " + numPending + ", flushing DWPT: " + NumFlushingDWPT + ", blocked DWPT: " + NumBlockedFlushes + ", peakDelta mem: " + peakDelta + " byte"); + Debugging.Assert(() => ram <= expected, () => "actual mem: " + ram + " byte, expected mem: " + expected + " byte, flush mem: " + flushBytes + ", active mem: " + activeBytes + ", pending DWPT: " + numPending + ", flushing DWPT: " + NumFlushingDWPT + ", blocked DWPT: " + NumBlockedFlushes + ", peakDelta mem: " + peakDelta + " byte"); } } return true; @@ -175,7 +176,7 @@ private void CommitPerThreadBytes(ThreadState perThread) { activeBytes += delta; } - Debug.Assert(UpdatePeaks(delta)); + Debugging.Assert(() => UpdatePeaks(delta)); } // only for asserts @@ -235,7 +236,7 @@ internal DocumentsWriterPerThread DoAfterDocument(ThreadState perThread, bool is finally { bool stalled = UpdateStallState(); - Debug.Assert(AssertNumDocsSinceStalled(stalled) && AssertMemory()); + Debugging.Assert(() => AssertNumDocsSinceStalled(stalled) && AssertMemory()); } } } @@ -264,14 +265,14 @@ internal void DoAfterFlush(DocumentsWriterPerThread dwpt) { lock (this) { - Debug.Assert(flushingWriters.ContainsKey(dwpt)); + Debugging.Assert(() => flushingWriters.ContainsKey(dwpt)); try { long? bytes = flushingWriters[dwpt]; flushingWriters.Remove(dwpt); flushBytes -= (long)bytes; perThreadPool.Recycle(dwpt); - Debug.Assert(AssertMemory()); + Debugging.Assert(AssertMemory); } finally { @@ -289,7 +290,7 @@ internal void DoAfterFlush(DocumentsWriterPerThread dwpt) private bool UpdateStallState() { - //Debug.Assert(Thread.holdsLock(this)); + //Debugging.Assert(Thread.holdsLock(this)); long limit = StallLimitBytes; /* * we block indexing threads if net byte grows due to slow flushes @@ -334,7 +335,7 @@ public void SetFlushPending(ThreadState perThread) { lock (this) { - Debug.Assert(!perThread.flushPending); + Debugging.Assert(() => !perThread.flushPending); if (perThread.dwpt.NumDocsInRAM > 0) { perThread.flushPending = true; // write access synced @@ -342,7 +343,7 @@ public void SetFlushPending(ThreadState perThread) flushBytes += bytes; activeBytes -= bytes; numPending++; // write access synced - Debug.Assert(AssertMemory()); + Debugging.Assert(AssertMemory); } // don't assert on numDocs since we could hit an abort excp. while selecting that dwpt for flushing } } @@ -361,7 +362,7 @@ internal void DoOnAbort(ThreadState state) { activeBytes -= state.bytesUsed; } - Debug.Assert(AssertMemory()); + Debugging.Assert(AssertMemory); // Take it out of the loop this DWPT is stale perThreadPool.Reset(state, closed); } @@ -376,7 +377,7 @@ internal DocumentsWriterPerThread TryCheckoutForFlush(ThreadState perThread) { lock (this) { - Debug.Assert(perThread.IsHeldByCurrentThread); // LUCENENET specific: Since .NET Core doesn't use unfair locking, we need to ensure the current thread has a lock before calling InternalTryCheckoutForFlush. + Debugging.Assert(() => perThread.IsHeldByCurrentThread); // LUCENENET specific: Since .NET Core doesn't use unfair locking, we need to ensure the current thread has a lock before calling InternalTryCheckoutForFlush. return perThread.flushPending ? InternalTryCheckOutForFlush(perThread) : null; } } @@ -386,8 +387,8 @@ private void CheckoutAndBlock(ThreadState perThread) perThread.@Lock(); try { - Debug.Assert(perThread.flushPending, "can not block non-pending threadstate"); - Debug.Assert(fullFlush, "can not block if fullFlush == false"); + Debugging.Assert(() => perThread.flushPending, () => "can not block non-pending threadstate"); + Debugging.Assert(() => fullFlush, () => "can not block if fullFlush == false"); DocumentsWriterPerThread dwpt; long bytes = perThread.bytesUsed; dwpt = perThreadPool.Reset(perThread, closed); @@ -403,9 +404,9 @@ private void CheckoutAndBlock(ThreadState perThread) private DocumentsWriterPerThread InternalTryCheckOutForFlush(ThreadState perThread) { // LUCENENET specific - Since we need to mimic the unfair behavior of ReentrantLock, we need to ensure that all threads that enter here hold the lock. - Debug.Assert(perThread.IsHeldByCurrentThread); - Debug.Assert(Monitor.IsEntered(this)); - Debug.Assert(perThread.flushPending); + Debugging.Assert(() => perThread.IsHeldByCurrentThread); + Debugging.Assert(() => Monitor.IsEntered(this)); + Debugging.Assert(() => perThread.flushPending); try { // LUCENENET specific - We removed the call to perThread.TryLock() and the try-finally below as they are no longer needed. @@ -413,12 +414,12 @@ private DocumentsWriterPerThread InternalTryCheckOutForFlush(ThreadState perThre // We are pending so all memory is already moved to flushBytes if (perThread.IsInitialized) { - Debug.Assert(perThread.IsHeldByCurrentThread); + Debugging.Assert(() => perThread.IsHeldByCurrentThread); DocumentsWriterPerThread dwpt; long bytes = perThread.bytesUsed; // do that before // replace! dwpt = perThreadPool.Reset(perThread, closed); - Debug.Assert(!flushingWriters.ContainsKey(dwpt), "DWPT is already flushing"); + Debugging.Assert(() => !flushingWriters.ContainsKey(dwpt), () => "DWPT is already flushing"); // Record the flushing DWPT to reduce flushBytes in doAfterFlush flushingWriters[dwpt] = bytes; numPending--; // write access synced @@ -613,8 +614,8 @@ internal void MarkForFullFlush() DocumentsWriterDeleteQueue flushingQueue; lock (this) { - Debug.Assert(!fullFlush, "called DWFC#markForFullFlush() while full flush is still running"); - Debug.Assert(fullFlushBuffer.Count == 0, "full flush buffer should be empty: " + fullFlushBuffer); + Debugging.Assert(() => !fullFlush, () => "called DWFC#markForFullFlush() while full flush is still running"); + Debugging.Assert(() => fullFlushBuffer.Count == 0, () => "full flush buffer should be empty: " + fullFlushBuffer); fullFlush = true; flushingQueue = documentsWriter.deleteQueue; // Set a new delete queue - all subsequent DWPT will use this queue until @@ -637,7 +638,7 @@ internal void MarkForFullFlush() } continue; } - Debug.Assert(next.dwpt.deleteQueue == flushingQueue || next.dwpt.deleteQueue == documentsWriter.deleteQueue, " flushingQueue: " + flushingQueue + " currentqueue: " + documentsWriter.deleteQueue + " perThread queue: " + next.dwpt.deleteQueue + " numDocsInRam: " + next.dwpt.NumDocsInRAM); + Debugging.Assert(() => next.dwpt.deleteQueue == flushingQueue || next.dwpt.deleteQueue == documentsWriter.deleteQueue, () => " flushingQueue: " + flushingQueue + " currentqueue: " + documentsWriter.deleteQueue + " perThread queue: " + next.dwpt.deleteQueue + " numDocsInRam: " + next.dwpt.NumDocsInRAM); if (next.dwpt.deleteQueue != flushingQueue) { // this one is already a new DWPT @@ -657,7 +658,7 @@ internal void MarkForFullFlush() * a chance that this happens since we marking DWPT for full flush without * blocking indexing.*/ PruneBlockedQueue(flushingQueue); - Debug.Assert(AssertBlockedFlushes(documentsWriter.deleteQueue)); + Debugging.Assert(() => AssertBlockedFlushes(documentsWriter.deleteQueue)); //FlushQueue.AddAll(FullFlushBuffer); foreach (var dwpt in fullFlushBuffer) { @@ -666,7 +667,7 @@ internal void MarkForFullFlush() fullFlushBuffer.Clear(); UpdateStallState(); } - Debug.Assert(AssertActiveDeleteQueue(documentsWriter.deleteQueue)); + Debugging.Assert(() => AssertActiveDeleteQueue(documentsWriter.deleteQueue)); } private bool AssertActiveDeleteQueue(DocumentsWriterDeleteQueue queue) @@ -678,7 +679,7 @@ private bool AssertActiveDeleteQueue(DocumentsWriterDeleteQueue queue) next.@Lock(); try { - Debug.Assert(!next.IsInitialized || next.dwpt.deleteQueue == queue, "isInitialized: " + next.IsInitialized + " numDocs: " + (next.IsInitialized ? next.dwpt.NumDocsInRAM : 0)); + Debugging.Assert(() => !next.IsInitialized || next.dwpt.deleteQueue == queue, () => "isInitialized: " + next.IsInitialized + " numDocs: " + (next.IsInitialized ? next.dwpt.NumDocsInRAM : 0)); } finally { @@ -697,10 +698,10 @@ internal void AddFlushableState(ThreadState perThread) infoStream.Message("DWFC", "addFlushableState " + perThread.dwpt); } DocumentsWriterPerThread dwpt = perThread.dwpt; - Debug.Assert(perThread.IsHeldByCurrentThread); - Debug.Assert(perThread.IsInitialized); - Debug.Assert(fullFlush); - Debug.Assert(dwpt.deleteQueue != documentsWriter.deleteQueue); + Debugging.Assert(() => perThread.IsHeldByCurrentThread); + Debugging.Assert(() => perThread.IsInitialized); + Debugging.Assert(() => fullFlush); + Debugging.Assert(() => dwpt.deleteQueue != documentsWriter.deleteQueue); if (dwpt.NumDocsInRAM > 0) { lock (this) @@ -710,8 +711,8 @@ internal void AddFlushableState(ThreadState perThread) SetFlushPending(perThread); } DocumentsWriterPerThread flushingDWPT = InternalTryCheckOutForFlush(perThread); - Debug.Assert(flushingDWPT != null, "DWPT must never be null here since we hold the lock and it holds documents"); - Debug.Assert(dwpt == flushingDWPT, "flushControl returned different DWPT"); + Debugging.Assert(() => flushingDWPT != null, () => "DWPT must never be null here since we hold the lock and it holds documents"); + Debugging.Assert(() => dwpt == flushingDWPT, () => "flushControl returned different DWPT"); fullFlushBuffer.Add(flushingDWPT); } } @@ -734,7 +735,7 @@ private void PruneBlockedQueue(DocumentsWriterDeleteQueue flushingQueue) if (blockedFlush.Dwpt.deleteQueue == flushingQueue) { blockedFlushes.Remove(node); - Debug.Assert(!flushingWriters.ContainsKey(blockedFlush.Dwpt), "DWPT is already flushing"); + Debugging.Assert(() => !flushingWriters.ContainsKey(blockedFlush.Dwpt), () => "DWPT is already flushing"); // Record the flushing DWPT to reduce flushBytes in doAfterFlush flushingWriters[blockedFlush.Dwpt] = blockedFlush.Bytes; // don't decr pending here - its already done when DWPT is blocked @@ -748,16 +749,16 @@ internal void FinishFullFlush() { lock (this) { - Debug.Assert(fullFlush); - Debug.Assert(flushQueue.Count == 0); - Debug.Assert(flushingWriters.Count == 0); + Debugging.Assert(() => fullFlush); + Debugging.Assert(() => flushQueue.Count == 0); + Debugging.Assert(() => flushingWriters.Count == 0); try { if (blockedFlushes.Count > 0) { - Debug.Assert(AssertBlockedFlushes(documentsWriter.deleteQueue)); + Debugging.Assert(() => AssertBlockedFlushes(documentsWriter.deleteQueue)); PruneBlockedQueue(documentsWriter.deleteQueue); - Debug.Assert(blockedFlushes.Count == 0); + Debugging.Assert(() => blockedFlushes.Count == 0); } } finally @@ -772,7 +773,7 @@ internal bool AssertBlockedFlushes(DocumentsWriterDeleteQueue flushingQueue) { foreach (BlockedFlush blockedFlush in blockedFlushes) { - Debug.Assert(blockedFlush.Dwpt.deleteQueue == flushingQueue); + Debugging.Assert(() => blockedFlush.Dwpt.deleteQueue == flushingQueue); } return true; } diff --git a/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs b/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs index 277eca4d75..67350ce826 100644 --- a/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs +++ b/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs @@ -1,4 +1,5 @@ using J2N.Threading.Atomic; +using Lucene.Net.Diagnostics; using Lucene.Net.Support.Threading; using System.Collections.Generic; using System.Diagnostics; @@ -62,13 +63,13 @@ internal virtual void AddDeletes(DocumentsWriterDeleteQueue deleteQueue) private void IncTickets() { int numTickets = ticketCount.IncrementAndGet(); - Debug.Assert(numTickets > 0); + Debugging.Assert(() => numTickets > 0); } private void DecTickets() { int numTickets = ticketCount.DecrementAndGet(); - Debug.Assert(numTickets >= 0); + Debugging.Assert(() => numTickets >= 0); } internal virtual SegmentFlushTicket AddFlushTicket(DocumentsWriterPerThread dwpt) @@ -120,14 +121,14 @@ internal virtual bool HasTickets { get { - Debug.Assert(ticketCount >= 0, "ticketCount should be >= 0 but was: " + ticketCount); + Debugging.Assert(() => ticketCount >= 0, () => "ticketCount should be >= 0 but was: " + ticketCount); return ticketCount != 0; } } private int InnerPurge(IndexWriter writer) { - //Debug.Assert(PurgeLock.HeldByCurrentThread); + //Debugging.Assert(PurgeLock.HeldByCurrentThread); int numPurged = 0; while (true) { @@ -158,7 +159,7 @@ private int InnerPurge(IndexWriter writer) // finally remove the published ticket from the queue FlushTicket poll = queue.Dequeue(); ticketCount.DecrementAndGet(); - Debug.Assert(poll == head); + Debugging.Assert(() => poll == head); } } } @@ -172,8 +173,8 @@ private int InnerPurge(IndexWriter writer) internal virtual int ForcePurge(IndexWriter writer) { - //Debug.Assert(!Thread.HoldsLock(this)); - //Debug.Assert(!Thread.holdsLock(writer)); + //Debugging.Assert(!Thread.HoldsLock(this)); + //Debugging.Assert(!Thread.holdsLock(writer)); purgeLock.@Lock(); try { @@ -187,8 +188,8 @@ internal virtual int ForcePurge(IndexWriter writer) internal virtual int TryPurge(IndexWriter writer) { - //Debug.Assert(!Thread.holdsLock(this)); - //Debug.Assert(!Thread.holdsLock(writer)); + //Debugging.Assert(!Thread.holdsLock(this)); + //Debugging.Assert(!Thread.holdsLock(writer)); if (purgeLock.TryLock()) { try @@ -221,7 +222,7 @@ internal abstract class FlushTicket protected FlushTicket(FrozenBufferedUpdates frozenUpdates) { - Debug.Assert(frozenUpdates != null); + Debugging.Assert(() => frozenUpdates != null); this.m_frozenUpdates = frozenUpdates; } @@ -237,8 +238,8 @@ protected FlushTicket(FrozenBufferedUpdates frozenUpdates) /// protected void PublishFlushedSegment(IndexWriter indexWriter, FlushedSegment newSegment, FrozenBufferedUpdates globalPacket) { - Debug.Assert(newSegment != null); - Debug.Assert(newSegment.segmentInfo != null); + Debugging.Assert(() => newSegment != null); + Debugging.Assert(() => newSegment.segmentInfo != null); FrozenBufferedUpdates segmentUpdates = newSegment.segmentUpdates; //System.out.println("FLUSH: " + newSegment.segmentInfo.info.name); if (indexWriter.infoStream.IsEnabled("DW")) @@ -259,7 +260,7 @@ protected void FinishFlush(IndexWriter indexWriter, FlushedSegment newSegment, F // Finish the flushed segment and publish it to IndexWriter if (newSegment == null) { - Debug.Assert(bufferedUpdates != null); + Debugging.Assert(() => bufferedUpdates != null); if (bufferedUpdates != null && bufferedUpdates.Any()) { indexWriter.PublishFrozenUpdates(bufferedUpdates); @@ -285,7 +286,7 @@ internal GlobalDeletesTicket(FrozenBufferedUpdates frozenUpdates) // LUCENENET N protected internal override void Publish(IndexWriter writer) { - Debug.Assert(!m_published, "ticket was already publised - can not publish twice"); + Debugging.Assert(() => !m_published, () => "ticket was already publised - can not publish twice"); m_published = true; // its a global ticket - no segment to publish FinishFlush(writer, null, m_frozenUpdates); @@ -306,20 +307,20 @@ internal SegmentFlushTicket(FrozenBufferedUpdates frozenDeletes) // LUCENENET NO protected internal override void Publish(IndexWriter writer) { - Debug.Assert(!m_published, "ticket was already publised - can not publish twice"); + Debugging.Assert(() => !m_published, () => "ticket was already publised - can not publish twice"); m_published = true; FinishFlush(writer, segment, m_frozenUpdates); } internal void SetSegment(FlushedSegment segment) // LUCENENET NOTE: Made internal rather than protected because class is sealed { - Debug.Assert(!failed); + Debugging.Assert(() => !failed); this.segment = segment; } internal void SetFailed() // LUCENENET NOTE: Made internal rather than protected because class is sealed { - Debug.Assert(segment == null); + Debugging.Assert(() => segment == null); failed = true; } diff --git a/src/Lucene.Net/Index/DocumentsWriterPerThread.cs b/src/Lucene.Net/Index/DocumentsWriterPerThread.cs index 9a520ffbfd..349848eaac 100644 --- a/src/Lucene.Net/Index/DocumentsWriterPerThread.cs +++ b/src/Lucene.Net/Index/DocumentsWriterPerThread.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Util; using System; using System.Collections.Generic; @@ -235,12 +236,12 @@ public DocumentsWriterPerThread(string segmentName, Directory directory, LiveInd pendingUpdates = new BufferedUpdates(); intBlockAllocator = new Int32BlockAllocator(bytesUsed); this.deleteQueue = deleteQueue; - Debug.Assert(numDocsInRAM == 0, "num docs " + numDocsInRAM); + Debugging.Assert(() => numDocsInRAM == 0, () => "num docs " + numDocsInRAM); pendingUpdates.Clear(); deleteSlice = deleteQueue.NewSlice(); segmentInfo = new SegmentInfo(directoryOrig, Constants.LUCENE_MAIN_VERSION, segmentName, -1, false, codec, null); - Debug.Assert(numDocsInRAM == 0); + Debugging.Assert(() => numDocsInRAM == 0); if (INFO_VERBOSE && infoStream.IsEnabled("DWPT")) { infoStream.Message("DWPT", Thread.CurrentThread.Name + " init seg=" + segmentName + " delQueue=" + deleteQueue); @@ -273,9 +274,8 @@ internal bool TestPoint(string message) public virtual void UpdateDocument(IEnumerable doc, Analyzer analyzer, Term delTerm) { - // LUCENENET: .NET doesn't support asserts in release mode - if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) TestPoint("DocumentsWriterPerThread addDocument start"); - Debug.Assert(deleteQueue != null); + Debugging.Assert(() => TestPoint("DocumentsWriterPerThread addDocument start")); + Debugging.Assert(() => deleteQueue != null); docState.doc = doc; docState.analyzer = analyzer; docState.docID = numDocsInRAM; @@ -330,9 +330,8 @@ public virtual void UpdateDocument(IEnumerable doc, Analyzer an public virtual int UpdateDocuments(IEnumerable> docs, Analyzer analyzer, Term delTerm) { - // LUCENENET: .NET doesn't support asserts in release mode - if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) TestPoint("DocumentsWriterPerThread addDocuments start"); - Debug.Assert(deleteQueue != null); + Debugging.Assert(() => TestPoint("DocumentsWriterPerThread addDocuments start")); + Debugging.Assert(() => deleteQueue != null); docState.analyzer = analyzer; if (INFO_VERBOSE && infoStream.IsEnabled("DWPT")) { @@ -395,7 +394,7 @@ public virtual int UpdateDocuments(IEnumerable> doc if (delTerm != null) { deleteQueue.Add(delTerm, deleteSlice); - Debug.Assert(deleteSlice.IsTailItem(delTerm), "expected the delete term as the tail item"); + Debugging.Assert(() => deleteSlice.IsTailItem(delTerm), () => "expected the delete term as the tail item"); deleteSlice.Apply(pendingUpdates, numDocsInRAM - docCount); } } @@ -434,7 +433,7 @@ private void FinishDocument(Term delTerm) if (delTerm != null) { deleteQueue.Add(delTerm, deleteSlice); - Debug.Assert(deleteSlice.IsTailItem(delTerm), "expected the delete term as the tail item"); + Debugging.Assert(() => deleteSlice.IsTailItem(delTerm), () => "expected the delete term as the tail item"); } else { @@ -485,7 +484,7 @@ internal virtual void DeleteDocID(int docIDUpto) /// internal virtual FrozenBufferedUpdates PrepareFlush() { - Debug.Assert(numDocsInRAM > 0); + Debugging.Assert(() => numDocsInRAM > 0); FrozenBufferedUpdates globalUpdates = deleteQueue.FreezeGlobalBuffer(deleteSlice); /* deleteSlice can possibly be null if we have hit non-aborting exceptions during indexing and never succeeded adding a document. */ @@ -493,7 +492,7 @@ adding a document. */ { // apply all deletes before we flush and release the delete slice deleteSlice.Apply(pendingUpdates, numDocsInRAM); - Debug.Assert(deleteSlice.IsEmpty); + Debugging.Assert(() => deleteSlice.IsEmpty); deleteSlice.Reset(); } return globalUpdates; @@ -504,8 +503,8 @@ adding a document. */ [MethodImpl(MethodImplOptions.NoInlining)] internal virtual FlushedSegment Flush() { - Debug.Assert(numDocsInRAM > 0); - Debug.Assert(deleteSlice.IsEmpty, "all deletes must be applied in prepareFlush"); + Debugging.Assert(() => numDocsInRAM > 0); + Debugging.Assert(() => deleteSlice.IsEmpty, () => "all deletes must be applied in prepareFlush"); segmentInfo.DocCount = numDocsInRAM; SegmentWriteState flushState = new SegmentWriteState(infoStream, directory, segmentInfo, fieldInfos.Finish(), indexWriterConfig.TermIndexInterval, pendingUpdates, new IOContext(new FlushInfo(numDocsInRAM, BytesUsed))); double startMBUsed = BytesUsed / 1024.0 / 1024.0; @@ -573,7 +572,7 @@ internal virtual FlushedSegment Flush() infoStream.Message("DWPT", "flushed: segment=" + segmentInfo.Name + " ramUsed=" + startMBUsed.ToString(nf) + " MB" + " newFlushedSize(includes docstores)=" + newSegmentSize.ToString(nf) + " MB" + " docs/MB=" + (flushState.SegmentInfo.DocCount / newSegmentSize).ToString(nf)); } - Debug.Assert(segmentInfo != null); + Debugging.Assert(() => segmentInfo != null); FlushedSegment fs = new FlushedSegment(segmentInfoPerCommit, flushState.FieldInfos, segmentDeletes, flushState.LiveDocs, flushState.DelCountOnFlush); SealFlushedSegment(fs); @@ -601,7 +600,7 @@ internal virtual FlushedSegment Flush() [MethodImpl(MethodImplOptions.NoInlining)] internal virtual void SealFlushedSegment(FlushedSegment flushedSegment) { - Debug.Assert(flushedSegment != null); + Debugging.Assert(() => flushedSegment != null); SegmentCommitInfo newSegment = flushedSegment.segmentInfo; @@ -633,7 +632,7 @@ internal virtual void SealFlushedSegment(FlushedSegment flushedSegment) if (flushedSegment.liveDocs != null) { int delCount = flushedSegment.delCount; - Debug.Assert(delCount > 0); + Debugging.Assert(() => delCount > 0); if (infoStream.IsEnabled("DWPT")) { infoStream.Message("DWPT", "flush: write " + delCount + " deletes gen=" + flushedSegment.segmentInfo.DelGen); diff --git a/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs b/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs index a27b0af7b1..b96f06354d 100644 --- a/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs +++ b/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support.Threading; using System; using System.Diagnostics; @@ -79,14 +80,14 @@ internal ThreadState(DocumentsWriterPerThread dpwt) /// internal void Deactivate() // LUCENENET NOTE: Made internal because it is called outside of this context { - //Debug.Assert(this.HeldByCurrentThread); + //Debugging.Assert(this.HeldByCurrentThread); isActive = false; Reset(); } internal void Reset() // LUCENENET NOTE: Made internal because it is called outside of this context { - //Debug.Assert(this.HeldByCurrentThread); + //Debugging.Assert(this.HeldByCurrentThread); this.dwpt = null; this.bytesUsed = 0; this.flushPending = false; @@ -98,11 +99,11 @@ internal void Reset() // LUCENENET NOTE: Made internal because it is called outs /// is already checked out for flush. /// internal bool IsActive => - //Debug.Assert(this.HeldByCurrentThread); + //Debugging.Assert(this.HeldByCurrentThread); isActive; internal bool IsInitialized => - //Debug.Assert(this.HeldByCurrentThread); + //Debugging.Assert(this.HeldByCurrentThread); IsActive && dwpt != null; /// @@ -110,7 +111,7 @@ internal void Reset() // LUCENENET NOTE: Made internal because it is called outs /// /// public long BytesUsedPerThread => - //Debug.Assert(this.HeldByCurrentThread); + //Debugging.Assert(this.HeldByCurrentThread); // public for FlushPolicy bytesUsed; @@ -118,7 +119,7 @@ internal void Reset() // LUCENENET NOTE: Made internal because it is called outs /// Returns this s /// public DocumentsWriterPerThread DocumentsWriterPerThread => - //Debug.Assert(this.HeldByCurrentThread); + //Debugging.Assert(this.HeldByCurrentThread); // public for FlushPolicy dwpt; @@ -203,12 +204,12 @@ public virtual ThreadState NewThreadState() { // unreleased thread states are deactivated during DW#close() numThreadStatesActive++; // increment will publish the ThreadState - Debug.Assert(threadState.dwpt == null); + Debugging.Assert(() => threadState.dwpt == null); unlock = false; return threadState; } // unlock since the threadstate is not active anymore - we are closed! - Debug.Assert(AssertUnreleasedThreadStatesInactive()); + Debugging.Assert(AssertUnreleasedThreadStatesInactive); return null; } finally @@ -230,10 +231,10 @@ private bool AssertUnreleasedThreadStatesInactive() { for (int i = numThreadStatesActive; i < threadStates.Length; i++) { - Debug.Assert(threadStates[i].TryLock(), "unreleased threadstate should not be locked"); + Debugging.Assert(() => threadStates[i].TryLock(), () => "unreleased threadstate should not be locked"); try { - Debug.Assert(!threadStates[i].IsInitialized, "expected unreleased thread state to be inactive"); + Debugging.Assert(() => !threadStates[i].IsInitialized, () => "expected unreleased thread state to be inactive"); } finally { @@ -269,7 +270,7 @@ internal virtual void DeactivateUnreleasedStates() internal virtual DocumentsWriterPerThread Reset(ThreadState threadState, bool closed) { - //Debug.Assert(threadState.HeldByCurrentThread); + //Debugging.Assert(threadState.HeldByCurrentThread); DocumentsWriterPerThread dwpt = threadState.dwpt; if (!closed) { @@ -359,7 +360,7 @@ internal virtual int NumDeactivatedThreadStates() /// the state to deactivate internal virtual void DeactivateThreadState(ThreadState threadState) { - Debug.Assert(threadState.IsActive); + Debugging.Assert(() => threadState.IsActive); threadState.Deactivate(); } } diff --git a/src/Lucene.Net/Index/DocumentsWriterStallControl.cs b/src/Lucene.Net/Index/DocumentsWriterStallControl.cs index 54b5d07b77..11c1f1b115 100644 --- a/src/Lucene.Net/Index/DocumentsWriterStallControl.cs +++ b/src/Lucene.Net/Index/DocumentsWriterStallControl.cs @@ -1,5 +1,6 @@ using J2N.Runtime.CompilerServices; using J2N.Threading; +using Lucene.Net.Diagnostics; using System.Collections.Generic; using System.Diagnostics; using System.Threading; @@ -87,10 +88,10 @@ internal void WaitIfStalled() // make sure not to run IncWaiters / DecrWaiters in Debug.Assert as that gets // removed at compile time if built in Release mode var result = IncWaiters(); - Debug.Assert(result); + Debugging.Assert(() => result); Monitor.Wait(this); result = DecrWaiters(); - Debug.Assert(result); + Debugging.Assert(() => result); //#if !NETSTANDARD1_6 // LUCENENET NOTE: Senseless to catch and rethrow the same exception type // } // catch (ThreadInterruptedException e) @@ -112,7 +113,7 @@ private bool IncWaiters() { numWaiting++; bool existed = waiting.ContainsKey(ThreadJob.CurrentThread); - Debug.Assert(!existed); + Debugging.Assert(() => !existed); waiting[ThreadJob.CurrentThread] = true; return numWaiting > 0; @@ -122,7 +123,7 @@ private bool DecrWaiters() { numWaiting--; bool removed = waiting.Remove(ThreadJob.CurrentThread); - Debug.Assert(removed); + Debugging.Assert(() => removed); return numWaiting >= 0; } diff --git a/src/Lucene.Net/Index/FieldInfo.cs b/src/Lucene.Net/Index/FieldInfo.cs index cba9c64178..841f800bc4 100644 --- a/src/Lucene.Net/Index/FieldInfo.cs +++ b/src/Lucene.Net/Index/FieldInfo.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.Diagnostics; @@ -86,28 +87,28 @@ public FieldInfo(string name, bool indexed, int number, bool storeTermVector, bo this.normType = DocValuesType.NONE; } this.attributes = attributes; - Debug.Assert(CheckConsistency()); + Debugging.Assert(CheckConsistency); } private bool CheckConsistency() { if (!indexed) { - Debug.Assert(!storeTermVector); - Debug.Assert(!storePayloads); - Debug.Assert(!omitNorms); - Debug.Assert(normType == DocValuesType.NONE); - Debug.Assert(indexOptions == IndexOptions.NONE); + Debugging.Assert(() => !storeTermVector); + Debugging.Assert(() => !storePayloads); + Debugging.Assert(() => !omitNorms); + Debugging.Assert(() => normType == DocValuesType.NONE); + Debugging.Assert(() => indexOptions == IndexOptions.NONE); } else { - Debug.Assert(indexOptions != IndexOptions.NONE); + Debugging.Assert(() => indexOptions != IndexOptions.NONE); if (omitNorms) { - Debug.Assert(normType == DocValuesType.NONE); + Debugging.Assert(() => normType == DocValuesType.NONE); } // Cannot store payloads unless positions are indexed: - Debug.Assert(indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !this.storePayloads); + Debugging.Assert(() => indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !this.storePayloads); } return true; @@ -159,7 +160,7 @@ internal void Update(bool indexed, bool storeTermVector, bool omitNorms, bool st } } } - Debug.Assert(CheckConsistency()); + Debugging.Assert(CheckConsistency); } public DocValuesType DocValuesType @@ -172,7 +173,7 @@ internal set throw new ArgumentException("cannot change DocValues type from " + docValueType + " to " + value + " for field \"" + Name + "\""); } docValueType = value; - Debug.Assert(CheckConsistency()); + Debugging.Assert(CheckConsistency); } } @@ -206,14 +207,14 @@ internal set throw new ArgumentException("cannot change Norm type from " + normType + " to " + value + " for field \"" + Name + "\""); } normType = value; - Debug.Assert(CheckConsistency()); + Debugging.Assert(CheckConsistency); } } internal void SetStoreTermVectors() { storeTermVector = true; - Debug.Assert(CheckConsistency()); + Debugging.Assert(CheckConsistency); } internal void SetStorePayloads() @@ -222,7 +223,7 @@ internal void SetStorePayloads() { storePayloads = true; } - Debug.Assert(CheckConsistency()); + Debugging.Assert(CheckConsistency); } /// diff --git a/src/Lucene.Net/Index/FieldInfos.cs b/src/Lucene.Net/Index/FieldInfos.cs index 46caee45f6..5c5ba9b87d 100644 --- a/src/Lucene.Net/Index/FieldInfos.cs +++ b/src/Lucene.Net/Index/FieldInfos.cs @@ -1,4 +1,5 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using System; using System.Collections; using System.Collections.Generic; @@ -135,7 +136,7 @@ public virtual int Count { get { - Debug.Assert(byNumber.Count == byName.Count); + Debugging.Assert(() => byNumber.Count == byName.Count); return byNumber.Count; } } @@ -313,7 +314,7 @@ internal void SetDocValuesType(int number, string name, DocValuesType dvType) { lock (this) { - Debug.Assert(ContainsConsistent(number, name, dvType)); + Debugging.Assert(() => ContainsConsistent(number, name, dvType)); docValuesType[name] = dvType; } } @@ -334,7 +335,7 @@ internal Builder() /// internal Builder(FieldNumbers globalFieldNumbers) { - Debug.Assert(globalFieldNumbers != null); + Debugging.Assert(() => globalFieldNumbers != null); this.globalFieldNumbers = globalFieldNumbers; } @@ -375,8 +376,8 @@ private FieldInfo AddOrUpdateInternal(string name, int preferredFieldNumber, boo // else we'll allocate a new one: int fieldNumber = globalFieldNumbers.AddOrGet(name, preferredFieldNumber, docValues); fi = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, docValues, normType, null); - Debug.Assert(!byName.ContainsKey(fi.Name)); - Debug.Assert(globalFieldNumbers.ContainsConsistent(fi.Number, fi.Name, fi.DocValuesType)); + Debugging.Assert(() => !byName.ContainsKey(fi.Name)); + Debugging.Assert(() => globalFieldNumbers.ContainsConsistent(fi.Number, fi.Name, fi.DocValuesType)); byName[fi.Name] = fi; } else diff --git a/src/Lucene.Net/Index/FilteredTermsEnum.cs b/src/Lucene.Net/Index/FilteredTermsEnum.cs index 734c0311ad..d2deb3b2c8 100644 --- a/src/Lucene.Net/Index/FilteredTermsEnum.cs +++ b/src/Lucene.Net/Index/FilteredTermsEnum.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.Diagnostics; @@ -96,7 +97,7 @@ public FilteredTermsEnum(TermsEnum tenum) /// start with seek public FilteredTermsEnum(TermsEnum tenum, bool startWithSeek) { - Debug.Assert(tenum != null); + Debugging.Assert(() => tenum != null); this.tenum = tenum; doSeek = startWithSeek; } @@ -206,7 +207,7 @@ public override void SeekExact(BytesRef term, TermState state) /// public override TermState GetTermState() { - Debug.Assert(tenum != null); + Debugging.Assert(() => tenum != null); return tenum.GetTermState(); } @@ -223,7 +224,7 @@ public override BytesRef Next() BytesRef t = NextSeekTerm(actualTerm); //System.out.println(" seek to t=" + (t == null ? "null" : t.utf8ToString()) + " tenum=" + tenum); // Make sure we always seek forward: - Debug.Assert(actualTerm == null || t == null || Comparer.Compare(t, actualTerm) > 0, "curTerm=" + actualTerm + " seekTerm=" + t); + Debugging.Assert(() => actualTerm == null || t == null || Comparer.Compare(t, actualTerm) > 0, () => "curTerm=" + actualTerm + " seekTerm=" + t); if (t == null || tenum.SeekCeil(t) == SeekStatus.END) { // no more terms to seek to or enum exhausted diff --git a/src/Lucene.Net/Index/FlushPolicy.cs b/src/Lucene.Net/Index/FlushPolicy.cs index 817a0d0453..dfef6ee36f 100644 --- a/src/Lucene.Net/Index/FlushPolicy.cs +++ b/src/Lucene.Net/Index/FlushPolicy.cs @@ -1,6 +1,5 @@ -using System; +using Lucene.Net.Diagnostics; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Index { @@ -114,11 +113,11 @@ protected internal virtual void Init(LiveIndexWriterConfig indexWriterConfig) /// protected virtual ThreadState FindLargestNonPendingWriter(DocumentsWriterFlushControl control, ThreadState perThreadState) { - Debug.Assert(perThreadState.dwpt.NumDocsInRAM > 0); + Debugging.Assert(() => perThreadState.dwpt.NumDocsInRAM > 0); long maxRamSoFar = perThreadState.bytesUsed; // the dwpt which needs to be flushed eventually ThreadState maxRamUsingThreadState = perThreadState; - Debug.Assert(!perThreadState.flushPending, "DWPT should have flushed"); + Debugging.Assert(() => !perThreadState.flushPending, () => "DWPT should have flushed"); IEnumerator activePerThreadsIterator = control.AllActiveThreadStates(); while (activePerThreadsIterator.MoveNext()) { @@ -133,7 +132,7 @@ protected virtual ThreadState FindLargestNonPendingWriter(DocumentsWriterFlushCo } } } - Debug.Assert(AssertMessage("set largest ram consuming thread pending on lower watermark")); + Debugging.Assert(() => AssertMessage("set largest ram consuming thread pending on lower watermark")); return maxRamUsingThreadState; } diff --git a/src/Lucene.Net/Index/FreqProxTermsWriter.cs b/src/Lucene.Net/Index/FreqProxTermsWriter.cs index 7b6c21d5c0..4f1b2c5954 100644 --- a/src/Lucene.Net/Index/FreqProxTermsWriter.cs +++ b/src/Lucene.Net/Index/FreqProxTermsWriter.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System.Collections.Generic; -using System.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Index @@ -89,7 +89,7 @@ public override void Flush(IDictionary fields fieldWriter.Flush(fieldInfo.Name, consumer, state); TermsHashPerField perField = fieldWriter.termsHashPerField; - Debug.Assert(termsHash == null || termsHash == perField.termsHash); + Debugging.Assert(() => termsHash == null || termsHash == perField.termsHash); termsHash = perField.termsHash; int numPostings = perField.bytesHash.Count; perField.Reset(); diff --git a/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs b/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs index 7bf54a2a2f..8e961a7c60 100644 --- a/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs +++ b/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs @@ -1,9 +1,9 @@ using J2N.Text; using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Index @@ -28,8 +28,6 @@ namespace Lucene.Net.Index using BytesRef = Lucene.Net.Util.BytesRef; using FieldsConsumer = Lucene.Net.Codecs.FieldsConsumer; using FixedBitSet = Lucene.Net.Util.FixedBitSet; - using OffsetAttribute = Lucene.Net.Analysis.TokenAttributes.OffsetAttribute; - using PayloadAttribute = Lucene.Net.Analysis.TokenAttributes.PayloadAttribute; using PostingsConsumer = Lucene.Net.Codecs.PostingsConsumer; using RamUsageEstimator = Lucene.Net.Util.RamUsageEstimator; using TermsConsumer = Lucene.Net.Codecs.TermsConsumer; @@ -155,7 +153,7 @@ internal override void Start(IIndexableField f) internal void WriteProx(int termID, int proxCode) { //System.out.println("writeProx termID=" + termID + " proxCode=" + proxCode); - Debug.Assert(hasProx); + Debugging.Assert(() => hasProx); BytesRef payload; if (payloadAttribute == null) { @@ -184,11 +182,11 @@ internal void WriteProx(int termID, int proxCode) internal void WriteOffsets(int termID, int offsetAccum) { - Debug.Assert(hasOffsets); + Debugging.Assert(() => hasOffsets); int startOffset = offsetAccum + offsetAttribute.StartOffset; int endOffset = offsetAccum + offsetAttribute.EndOffset; FreqProxPostingsArray postings = (FreqProxPostingsArray)termsHashPerField.postingsArray; - Debug.Assert(startOffset - postings.lastOffsets[termID] >= 0); + Debugging.Assert(() => startOffset - postings.lastOffsets[termID] >= 0); termsHashPerField.WriteVInt32(1, startOffset - postings.lastOffsets[termID]); termsHashPerField.WriteVInt32(1, endOffset - startOffset); @@ -222,7 +220,7 @@ internal override void NewTerm(int termID) } else { - Debug.Assert(!hasOffsets); + Debugging.Assert(() => !hasOffsets); } } fieldState.MaxTermFrequency = Math.Max(1, fieldState.MaxTermFrequency); @@ -231,19 +229,18 @@ internal override void NewTerm(int termID) internal override void AddTerm(int termID) { - // LUCENENET: .NET doesn't support asserts in release mode - if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) docState.TestPoint("FreqProxTermsWriterPerField.addTerm start"); + Debugging.Assert(() => docState.TestPoint("FreqProxTermsWriterPerField.addTerm start")); FreqProxPostingsArray postings = (FreqProxPostingsArray)termsHashPerField.postingsArray; - Debug.Assert(!hasFreq || postings.termFreqs[termID] > 0); + Debugging.Assert(() => !hasFreq || postings.termFreqs[termID] > 0); if (!hasFreq) { - Debug.Assert(postings.termFreqs == null); + Debugging.Assert(() => postings.termFreqs == null); if (docState.docID != postings.lastDocIDs[termID]) { - Debug.Assert(docState.docID > postings.lastDocIDs[termID]); + Debugging.Assert(() => docState.docID > postings.lastDocIDs[termID]); termsHashPerField.WriteVInt32(0, postings.lastDocCodes[termID]); postings.lastDocCodes[termID] = docState.docID - postings.lastDocIDs[termID]; postings.lastDocIDs[termID] = docState.docID; @@ -252,7 +249,7 @@ internal override void AddTerm(int termID) } else if (docState.docID != postings.lastDocIDs[termID]) { - Debug.Assert(docState.docID > postings.lastDocIDs[termID], "id: " + docState.docID + " postings ID: " + postings.lastDocIDs[termID] + " termID: " + termID); + Debugging.Assert(() => docState.docID > postings.lastDocIDs[termID], () => "id: " + docState.docID + " postings ID: " + postings.lastDocIDs[termID] + " termID: " + termID); // Term not yet seen in the current doc but previously // seen in other doc(s) since the last flush @@ -282,7 +279,7 @@ internal override void AddTerm(int termID) } else { - Debug.Assert(!hasOffsets); + Debugging.Assert(() => !hasOffsets); } fieldState.UniqueTermCount++; } @@ -326,7 +323,7 @@ public FreqProxPostingsArray(int size, bool writeFreqs, bool writeProx, bool wri } else { - Debug.Assert(!writeOffsets); + Debugging.Assert(() => !writeOffsets); } //System.out.println("PA init freqs=" + writeFreqs + " pos=" + writeProx + " offs=" + writeOffsets); } @@ -344,7 +341,7 @@ internal override ParallelPostingsArray NewInstance(int size) internal override void CopyTo(ParallelPostingsArray toArray, int numToCopy) { - Debug.Assert(toArray is FreqProxPostingsArray); + Debugging.Assert(() => toArray is FreqProxPostingsArray); FreqProxPostingsArray to = (FreqProxPostingsArray)toArray; base.CopyTo(toArray, numToCopy); @@ -353,17 +350,17 @@ internal override void CopyTo(ParallelPostingsArray toArray, int numToCopy) Array.Copy(lastDocCodes, 0, to.lastDocCodes, 0, numToCopy); if (lastPositions != null) { - Debug.Assert(to.lastPositions != null); + Debugging.Assert(() => to.lastPositions != null); Array.Copy(lastPositions, 0, to.lastPositions, 0, numToCopy); } if (lastOffsets != null) { - Debug.Assert(to.lastOffsets != null); + Debugging.Assert(() => to.lastOffsets != null); Array.Copy(lastOffsets, 0, to.lastOffsets, 0, numToCopy); } if (termFreqs != null) { - Debug.Assert(to.termFreqs != null); + Debugging.Assert(() => to.termFreqs != null); Array.Copy(termFreqs, 0, to.termFreqs, 0, numToCopy); } } @@ -420,7 +417,7 @@ internal void Flush(string fieldName, FieldsConsumer consumer, SegmentWriteState // new segment to the directory according to // currentFieldIndexOptions: IndexOptions currentFieldIndexOptions = fieldInfo.IndexOptions; - Debug.Assert(currentFieldIndexOptions != IndexOptions.NONE); + Debugging.Assert(() => currentFieldIndexOptions != IndexOptions.NONE); bool writeTermFreq = currentFieldIndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS) >= 0; bool writePositions = currentFieldIndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; @@ -433,11 +430,11 @@ internal void Flush(string fieldName, FieldsConsumer consumer, SegmentWriteState //System.out.println("flush readTF=" + readTermFreq + " readPos=" + readPositions + " readOffs=" + readOffsets); // Make sure FieldInfo.update is working correctly!: - Debug.Assert(!writeTermFreq || readTermFreq); - Debug.Assert(!writePositions || readPositions); - Debug.Assert(!writeOffsets || readOffsets); + Debugging.Assert(() => !writeTermFreq || readTermFreq); + Debugging.Assert(() => !writePositions || readPositions); + Debugging.Assert(() => !writeOffsets || readOffsets); - Debug.Assert(!writeOffsets || writePositions); + Debugging.Assert(() => !writeOffsets || writePositions); IDictionary segDeletes; if (state.SegUpdates != null && state.SegUpdates.terms.Count > 0) @@ -556,11 +553,11 @@ internal void Flush(string fieldName, FieldsConsumer consumer, SegmentWriteState } } - Debug.Assert(docID != postings.lastDocIDs[termID]); + Debugging.Assert(() => docID != postings.lastDocIDs[termID]); } docFreq++; - Debug.Assert(docID < state.SegmentInfo.DocCount, "doc=" + docID + " maxDoc=" + state.SegmentInfo.DocCount); + Debugging.Assert(() => docID < state.SegmentInfo.DocCount, () => "doc=" + docID + " maxDoc=" + state.SegmentInfo.DocCount); // NOTE: we could check here if the docID was // deleted, and skip it. However, this is somewhat @@ -645,7 +642,7 @@ internal void Flush(string fieldName, FieldsConsumer consumer, SegmentWriteState { if (writeOffsets) { - Debug.Assert(startOffset >= 0 && endOffset >= startOffset, "startOffset=" + startOffset + ",endOffset=" + endOffset + ",offset=" + offset); + Debugging.Assert(() => startOffset >= 0 && endOffset >= startOffset, () => "startOffset=" + startOffset + ",endOffset=" + endOffset + ",offset=" + offset); postingsConsumer.AddPosition(position, thisPayload, startOffset, endOffset); } else diff --git a/src/Lucene.Net/Index/FrozenBufferedUpdates.cs b/src/Lucene.Net/Index/FrozenBufferedUpdates.cs index 1e1080f7d2..fc085a4b06 100644 --- a/src/Lucene.Net/Index/FrozenBufferedUpdates.cs +++ b/src/Lucene.Net/Index/FrozenBufferedUpdates.cs @@ -1,8 +1,8 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using System; using System.Collections; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Index { @@ -67,7 +67,7 @@ internal class FrozenBufferedUpdates public FrozenBufferedUpdates(BufferedUpdates deletes, bool isSegmentPrivate) { this.isSegmentPrivate = isSegmentPrivate; - Debug.Assert(!isSegmentPrivate || deletes.terms.Count == 0, "segment private package should only have del queries"); + Debugging.Assert(() => !isSegmentPrivate || deletes.terms.Count == 0, () => "segment private package should only have del queries"); Term[] termsArray = deletes.terms.Keys.ToArray(/*new Term[deletes.terms.Count]*/); termCount = termsArray.Length; @@ -140,12 +140,12 @@ public virtual long DelGen { set { - Debug.Assert(this.gen == -1); + Debugging.Assert(() => this.gen == -1); this.gen = value; } get { - Debug.Assert(gen != -1); + Debugging.Assert(() => gen != -1); return gen; } } diff --git a/src/Lucene.Net/Index/IndexFileDeleter.cs b/src/Lucene.Net/Index/IndexFileDeleter.cs index f246e17555..6b38589558 100644 --- a/src/Lucene.Net/Index/IndexFileDeleter.cs +++ b/src/Lucene.Net/Index/IndexFileDeleter.cs @@ -1,11 +1,10 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Text.RegularExpressions; using System.Threading; -using System.Reflection; namespace Lucene.Net.Index { @@ -406,7 +405,7 @@ private void DeleteCommits() /// public void Refresh(string segmentName) { - Debug.Assert(IsLocked); + Debugging.Assert(() => IsLocked); string[] files = directory.ListAll(); string segmentPrefix1; @@ -447,7 +446,7 @@ public void Refresh() // Set to null so that we regenerate the list of pending // files; else we can accumulate same file more than // once - Debug.Assert(IsLocked); + Debugging.Assert(() => IsLocked); deletable = null; Refresh(null); } @@ -455,7 +454,7 @@ public void Refresh() public void Dispose() { // DecRef old files from the last checkpoint, if any: - Debug.Assert(IsLocked); + Debugging.Assert(() => IsLocked); if (lastFiles.Count > 0) { @@ -477,7 +476,7 @@ public void Dispose() /// internal void RevisitPolicy() { - Debug.Assert(IsLocked); + Debugging.Assert(() => IsLocked); if (infoStream.IsEnabled("IFD")) { infoStream.Message("IFD", "now revisitPolicy"); @@ -492,7 +491,7 @@ internal void RevisitPolicy() public void DeletePendingFiles() { - Debug.Assert(IsLocked); + Debugging.Assert(() => IsLocked); if (deletable != null) { IList oldDeletable = deletable; @@ -531,9 +530,9 @@ public void DeletePendingFiles() /// public void Checkpoint(SegmentInfos segmentInfos, bool isCommit) { - Debug.Assert(IsLocked); + Debugging.Assert(() => IsLocked); - //Debug.Assert(Thread.holdsLock(Writer)); + //Debugging.Assert(Thread.holdsLock(Writer)); long t0 = 0; if (infoStream.IsEnabled("IFD")) { @@ -577,7 +576,7 @@ public void Checkpoint(SegmentInfos segmentInfos, bool isCommit) internal void IncRef(SegmentInfos segmentInfos, bool isCommit) { - Debug.Assert(IsLocked); + Debugging.Assert(() => IsLocked); // If this is a commit point, also incRef the // segments_N file: foreach (string fileName in segmentInfos.GetFiles(directory, isCommit)) @@ -588,7 +587,7 @@ internal void IncRef(SegmentInfos segmentInfos, bool isCommit) internal void IncRef(ICollection files) { - Debug.Assert(IsLocked); + Debugging.Assert(() => IsLocked); foreach (string file in files) { IncRef(file); @@ -597,7 +596,7 @@ internal void IncRef(ICollection files) internal void IncRef(string fileName) { - Debug.Assert(IsLocked); + Debugging.Assert(() => IsLocked); RefCount rc = GetRefCount(fileName); if (infoStream.IsEnabled("IFD")) { @@ -611,7 +610,7 @@ internal void IncRef(string fileName) internal void DecRef(ICollection files) { - Debug.Assert(IsLocked); + Debugging.Assert(() => IsLocked); foreach (string file in files) { DecRef(file); @@ -620,7 +619,7 @@ internal void DecRef(ICollection files) internal void DecRef(string fileName) { - Debug.Assert(IsLocked); + Debugging.Assert(() => IsLocked); RefCount rc = GetRefCount(fileName); if (infoStream.IsEnabled("IFD")) { @@ -640,7 +639,7 @@ internal void DecRef(string fileName) internal void DecRef(SegmentInfos segmentInfos) { - Debug.Assert(IsLocked); + Debugging.Assert(() => IsLocked); foreach (string file in segmentInfos.GetFiles(directory, false)) { DecRef(file); @@ -649,14 +648,14 @@ internal void DecRef(SegmentInfos segmentInfos) public bool Exists(string fileName) { - Debug.Assert(IsLocked); + Debugging.Assert(() => IsLocked); // LUCENENET: Using TryGetValue to eliminate extra lookup return refCounts.TryGetValue(fileName, out RefCount value) ? value.count > 0 : false; } private RefCount GetRefCount(string fileName) { - Debug.Assert(IsLocked); + Debugging.Assert(() => IsLocked); // LUCENENET: Using TryGetValue to eliminate extra lookup if (!refCounts.TryGetValue(fileName, out RefCount rc)) { @@ -668,7 +667,7 @@ private RefCount GetRefCount(string fileName) internal void DeleteFiles(IList files) { - Debug.Assert(IsLocked); + Debugging.Assert(() => IsLocked); foreach (string file in files) { DeleteFile(file); @@ -681,7 +680,7 @@ internal void DeleteFiles(IList files) /// internal void DeleteNewFiles(ICollection files) { - Debug.Assert(IsLocked); + Debugging.Assert(() => IsLocked); foreach (string fileName in files) { // NOTE: it's very unusual yet possible for the @@ -706,7 +705,7 @@ internal void DeleteNewFiles(ICollection files) internal void DeleteFile(string fileName) { - Debug.Assert(IsLocked); + Debugging.Assert(() => IsLocked); EnsureOpen(); try { @@ -725,7 +724,7 @@ internal void DeleteFile(string fileName) // the file is open in another process, and queue // the file for subsequent deletion. - //Debug.Assert(e.Message.Contains("cannot delete")); + //Debugging.Assert(e.Message.Contains("cannot delete")); if (infoStream.IsEnabled("IFD")) { @@ -765,14 +764,14 @@ public int IncRef() } else { - Debug.Assert(count > 0, Thread.CurrentThread.Name + ": RefCount is 0 pre-increment for file \"" + fileName + "\""); + Debugging.Assert(() => count > 0, () => Thread.CurrentThread.Name + ": RefCount is 0 pre-increment for file \"" + fileName + "\""); } return ++count; } public int DecRef() { - Debug.Assert(count > 0, Thread.CurrentThread.Name + ": RefCount is 0 pre-decrement for file \"" + fileName + "\""); + Debugging.Assert(() => count > 0, () => Thread.CurrentThread.Name + ": RefCount is 0 pre-decrement for file \"" + fileName + "\""); return --count; } } diff --git a/src/Lucene.Net/Index/IndexFileNames.cs b/src/Lucene.Net/Index/IndexFileNames.cs index 77b8be6169..1891373915 100644 --- a/src/Lucene.Net/Index/IndexFileNames.cs +++ b/src/Lucene.Net/Index/IndexFileNames.cs @@ -1,6 +1,6 @@ using J2N; +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; using System.Text; using System.Text.RegularExpressions; @@ -106,7 +106,7 @@ public static string FileNameFromGeneration(string @base, string ext, long gen) } else { - Debug.Assert(gen > 0); + Debugging.Assert(() => gen > 0); // The '6' part in the length is: 1 for '.', 1 for '_' and 4 as estimate // to the gen length as string (hopefully an upper limit so SB won't // expand in the middle. @@ -139,7 +139,7 @@ public static string SegmentFileName(string segmentName, string segmentSuffix, s { if (ext.Length > 0 || segmentSuffix.Length > 0) { - Debug.Assert(!ext.StartsWith(".", StringComparison.Ordinal)); + Debugging.Assert(() => !ext.StartsWith(".", StringComparison.Ordinal)); StringBuilder sb = new StringBuilder(segmentName.Length + 2 + segmentSuffix.Length + ext.Length); sb.Append(segmentName); if (segmentSuffix.Length > 0) diff --git a/src/Lucene.Net/Index/IndexFormatTooNewException.cs b/src/Lucene.Net/Index/IndexFormatTooNewException.cs index e1e6095f65..4b6779407c 100644 --- a/src/Lucene.Net/Index/IndexFormatTooNewException.cs +++ b/src/Lucene.Net/Index/IndexFormatTooNewException.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; #if FEATURE_SERIALIZABLE_EXCEPTIONS using System.Runtime.Serialization; #endif @@ -48,7 +48,7 @@ public class IndexFormatTooNewException : CorruptIndexException public IndexFormatTooNewException(string resourceDesc, int version, int minVersion, int maxVersion) : base("Format version is not supported (resource: " + resourceDesc + "): " + version + " (needs to be between " + minVersion + " and " + maxVersion + ")") { - Debug.Assert(resourceDesc != null); + Debugging.Assert(() => resourceDesc != null); } /// diff --git a/src/Lucene.Net/Index/IndexFormatTooOldException.cs b/src/Lucene.Net/Index/IndexFormatTooOldException.cs index 9003e756ba..7cec690d0d 100644 --- a/src/Lucene.Net/Index/IndexFormatTooOldException.cs +++ b/src/Lucene.Net/Index/IndexFormatTooOldException.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; #if FEATURE_SERIALIZABLE_EXCEPTIONS using System.Runtime.Serialization; #endif @@ -46,7 +46,7 @@ public class IndexFormatTooOldException : CorruptIndexException public IndexFormatTooOldException(string resourceDesc, string version) : base("Format version is not supported (resource: " + resourceDesc + "): " + version + ". this version of Lucene only supports indexes created with release 3.0 and later.") { - Debug.Assert(resourceDesc != null); + Debugging.Assert(() => resourceDesc != null); } /// @@ -73,7 +73,7 @@ public IndexFormatTooOldException(DataInput input, string version) public IndexFormatTooOldException(string resourceDesc, int version, int minVersion, int maxVersion) : base("Format version is not supported (resource: " + resourceDesc + "): " + version + " (needs to be between " + minVersion + " and " + maxVersion + "). this version of Lucene only supports indexes created with release 3.0 and later.") { - Debug.Assert(resourceDesc != null); + Debugging.Assert(() => resourceDesc != null); } /// diff --git a/src/Lucene.Net/Index/IndexWriter.cs b/src/Lucene.Net/Index/IndexWriter.cs index c52595be32..0c23c0c41b 100644 --- a/src/Lucene.Net/Index/IndexWriter.cs +++ b/src/Lucene.Net/Index/IndexWriter.cs @@ -1,6 +1,7 @@ using J2N; using J2N.Threading; using J2N.Threading.Atomic; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Concurrent; @@ -462,8 +463,8 @@ public virtual bool InfoIsLive(SegmentCommitInfo info) lock (this) { int idx = outerInstance.segmentInfos.IndexOf(info); - Debug.Assert(idx != -1, "info=" + info + " isn't live"); - Debug.Assert(outerInstance.segmentInfos.Info(idx) == info, "info=" + info + " doesn't match live info in segmentInfos"); + Debugging.Assert(() => idx != -1, () => "info=" + info + " isn't live"); + Debugging.Assert(() => outerInstance.segmentInfos.Info(idx) == info, () => "info=" + info + " doesn't match live info in segmentInfos"); return true; } } @@ -476,7 +477,7 @@ public virtual void Drop(SegmentCommitInfo info) readerMap.TryGetValue(info, out rld); if (rld != null) { - Debug.Assert(info == rld.Info); + Debugging.Assert(() => info == rld.Info); // System.out.println("[" + Thread.currentThread().getName() + "] ReaderPool.drop: " + info); readerMap.Remove(info); rld.DropReaders(); @@ -516,7 +517,7 @@ public virtual void Release(ReadersAndUpdates rld, bool assertInfoLive) rld.DecRef(); // Pool still holds a ref: - Debug.Assert(rld.RefCount() >= 1); + Debugging.Assert(() => rld.RefCount() >= 1); if (!outerInstance.poolReaders && rld.RefCount() == 1) { @@ -526,7 +527,7 @@ public virtual void Release(ReadersAndUpdates rld, bool assertInfoLive) if (rld.WriteLiveDocs(outerInstance.directory)) { // Make sure we only write del docs for a live segment: - Debug.Assert(assertInfoLive == false || InfoIsLive(rld.Info)); + Debugging.Assert(() => assertInfoLive == false || InfoIsLive(rld.Info)); // Must checkpoint because we just // created new _X_N.del and field updates files; // don't call IW.checkpoint because that also @@ -575,7 +576,7 @@ internal virtual void DropAll(bool doSave) if (doSave && rld.WriteLiveDocs(outerInstance.directory)) // Throws IOException { // Make sure we only write del docs and field updates for a live segment: - Debug.Assert(InfoIsLive(rld.Info)); + Debugging.Assert(() => InfoIsLive(rld.Info)); // Must checkpoint because we just // created new _X_N.del and field updates files; // don't call IW.checkpoint because that also @@ -644,7 +645,7 @@ internal virtual void DropAll(bool doSave) // before possibly throwing an exception. readerMap.RemoveAll(toDelete); - Debug.Assert(readerMap.Count == 0); + Debugging.Assert(() => readerMap.Count == 0); IOUtils.ReThrow(priorE); } } @@ -663,11 +664,11 @@ public virtual void Commit(SegmentInfos infos) ReadersAndUpdates rld; if (readerMap.TryGetValue(info, out rld)) { - Debug.Assert(rld.Info == info); + Debugging.Assert(() => rld.Info == info); if (rld.WriteLiveDocs(outerInstance.directory)) { // Make sure we only write del docs for a live segment: - Debug.Assert(InfoIsLive(info)); + Debugging.Assert(() => InfoIsLive(info)); // Must checkpoint because we just // created new _X_N.del and field updates files; // don't call IW.checkpoint because that also @@ -691,7 +692,7 @@ public virtual ReadersAndUpdates Get(SegmentCommitInfo info, bool create) { lock (this) { - Debug.Assert(info.Info.Dir == outerInstance.directory, "info.dir=" + info.Info.Dir + " vs " + outerInstance.directory); + Debugging.Assert(() => info.Info.Dir == outerInstance.directory, () => "info.dir=" + info.Info.Dir + " vs " + outerInstance.directory); ReadersAndUpdates rld; readerMap.TryGetValue(info, out rld); @@ -707,7 +708,7 @@ public virtual ReadersAndUpdates Get(SegmentCommitInfo info, bool create) } else { - Debug.Assert(rld.Info == info, "Infos are not equal");//, "rld.info=" + rld.Info + " info=" + info + " isLive?=" + InfoIsLive(rld.Info) + " vs " + InfoIsLive(info)); + Debugging.Assert(() => rld.Info == info, () => "Infos are not equal");//, "rld.info=" + rld.Info + " info=" + info + " isLive?=" + InfoIsLive(rld.Info) + " vs " + InfoIsLive(info)); } if (create) @@ -716,7 +717,7 @@ public virtual ReadersAndUpdates Get(SegmentCommitInfo info, bool create) rld.IncRef(); } - Debug.Assert(NoDups()); + Debugging.Assert(NoDups); return rld; } @@ -731,7 +732,7 @@ private bool NoDups() JCG.HashSet seen = new JCG.HashSet(); foreach (SegmentCommitInfo info in readerMap.Keys) { - Debug.Assert(!seen.Contains(info.Info.Name)); + Debugging.Assert(() => !seen.Contains(info.Info.Name)); seen.Add(info.Info.Name); } return true; @@ -1095,7 +1096,7 @@ public virtual void Dispose(bool waitForMerges) // LUCENENET TODO: API - mark pr else { CloseInternal(waitForMerges, true); - Debug.Assert(AssertEventQueueAfterClose()); + Debugging.Assert(AssertEventQueueAfterClose); } } } @@ -1109,7 +1110,7 @@ private bool AssertEventQueueAfterClose() } foreach (IEvent e in eventQueue) { - Debug.Assert(e is DocumentsWriter.MergePendingEvent, e.ToString()); + Debugging.Assert(() => e is DocumentsWriter.MergePendingEvent, () => e.ToString()); } return true; } @@ -1276,7 +1277,7 @@ private void CloseInternal(bool waitForMerges, bool doFlush) { closed = true; } - Debug.Assert(docWriter.perThreadPool.NumDeactivatedThreadStates() == docWriter.perThreadPool.MaxThreadStates, "" + docWriter.perThreadPool.NumDeactivatedThreadStates() + " " + docWriter.perThreadPool.MaxThreadStates); + Debugging.Assert(() => docWriter.perThreadPool.NumDeactivatedThreadStates() == docWriter.perThreadPool.MaxThreadStates, () => "" + docWriter.perThreadPool.NumDeactivatedThreadStates() + " " + docWriter.perThreadPool.MaxThreadStates); } catch (OutOfMemoryException oom) { @@ -1642,8 +1643,8 @@ public virtual bool TryDeleteDocument(IndexReader readerIn, int docID) int subIndex = ReaderUtil.SubIndex(docID, leaves); reader = leaves[subIndex].AtomicReader; docID -= leaves[subIndex].DocBase; - Debug.Assert(docID >= 0); - Debug.Assert(docID < reader.MaxDoc); + Debugging.Assert(() => docID >= 0); + Debugging.Assert(() => docID < reader.MaxDoc); } if (!(reader is SegmentReader)) @@ -2383,8 +2384,8 @@ private bool UpdatePendingMerges(MergeTrigger trigger, int maxNumSegments) { lock (this) { - Debug.Assert(maxNumSegments == -1 || maxNumSegments > 0); - //Debug.Assert(trigger != null); // LUCENENET NOTE: Enum cannot be null in .NET + Debugging.Assert(() => maxNumSegments == -1 || maxNumSegments > 0); + //Debugging.Assert(trigger != null); // LUCENENET NOTE: Enum cannot be null in .NET if (stopMerges) { return false; @@ -2399,7 +2400,7 @@ private bool UpdatePendingMerges(MergeTrigger trigger, int maxNumSegments) MergePolicy.MergeSpecification spec; if (maxNumSegments != UNBOUNDED_MAX_MERGE_SEGMENTS) { - Debug.Assert(trigger == MergeTrigger.EXPLICIT || trigger == MergeTrigger.MERGE_FINISHED, "Expected EXPLICT or MERGE_FINISHED as trigger even with maxNumSegments set but was: " + trigger.ToString()); + Debugging.Assert(() => trigger == MergeTrigger.EXPLICIT || trigger == MergeTrigger.MERGE_FINISHED, () => "Expected EXPLICT or MERGE_FINISHED as trigger even with maxNumSegments set but was: " + trigger.ToString()); spec = mergePolicy.FindForcedMerges(segmentInfos, maxNumSegments, segmentsToMerge); newMergesFound = spec != null; if (newMergesFound) @@ -2585,7 +2586,7 @@ private void RollbackInternal() IOUtils.Dispose(writeLock); // release write lock writeLock = null; - Debug.Assert(docWriter.perThreadPool.NumDeactivatedThreadStates() == docWriter.perThreadPool.MaxThreadStates, "" + docWriter.perThreadPool.NumDeactivatedThreadStates() + " " + docWriter.perThreadPool.MaxThreadStates); + Debugging.Assert(() => docWriter.perThreadPool.NumDeactivatedThreadStates() == docWriter.perThreadPool.MaxThreadStates, () => "" + docWriter.perThreadPool.NumDeactivatedThreadStates() + " " + docWriter.perThreadPool.MaxThreadStates); } success = true; @@ -2770,7 +2771,7 @@ private void FinishMerges(bool waitForMerges) stopMerges = false; Monitor.PulseAll(this); - Debug.Assert(0 == mergingSegments.Count); + Debugging.Assert(() => 0 == mergingSegments.Count); if (infoStream.IsEnabled("IW")) { @@ -2810,7 +2811,7 @@ public virtual void WaitForMerges() } // sanity check - Debug.Assert(0 == mergingSegments.Count); + Debugging.Assert(() => 0 == mergingSegments.Count); if (infoStream.IsEnabled("IW")) { @@ -2863,7 +2864,7 @@ internal virtual void PublishFrozenUpdates(FrozenBufferedUpdates packet) { lock (this) { - Debug.Assert(packet != null && packet.Any()); + Debugging.Assert(() => packet != null && packet.Any()); lock (bufferedUpdatesStream) { bufferedUpdatesStream.Push(packet); @@ -3059,7 +3060,7 @@ public virtual void AddIndexes(params Directory[] dirs) JCG.HashSet copiedFiles = new JCG.HashSet(); foreach (SegmentCommitInfo info in sis.Segments) { - Debug.Assert(!infos.Contains(info), "dup info dir=" + info.Info.Dir + " name=" + info.Info.Name); + Debugging.Assert(() => !infos.Contains(info), () => "dup info dir=" + info.Info.Dir + " name=" + info.Info.Name); string newSegName = NewSegmentName(); @@ -3334,7 +3335,7 @@ private SegmentCommitInfo CopySegmentAsIs(SegmentCommitInfo info, string segName // because the DS might have been copied already, in which case we // just want to update the DS name of this SegmentInfo. string dsName = Lucene3xSegmentInfoFormat.GetDocStoreSegment(info.Info); - Debug.Assert(dsName != null); + Debugging.Assert(() => dsName != null); // LUCENENET: Eliminated extra lookup by using TryGetValue instead of ContainsKey if (!dsNames.TryGetValue(dsName, out string newDsName)) { @@ -3446,8 +3447,8 @@ private SegmentCommitInfo CopySegmentAsIs(SegmentCommitInfo info, string segName continue; } - Debug.Assert(!SlowFileExists(directory, newFileName), "file \"" + newFileName + "\" already exists; siFiles=" + string.Format(J2N.Text.StringFormatter.InvariantCulture, "{0}", siFiles)); - Debug.Assert(!copiedFiles.Contains(file), "file \"" + file + "\" is being copied more than once"); + Debugging.Assert(() => !SlowFileExists(directory, newFileName), () => "file \"" + newFileName + "\" already exists; siFiles=" + string.Format(J2N.Text.StringFormatter.InvariantCulture, "{0}", siFiles)); + Debugging.Assert(() => !copiedFiles.Contains(file), () => "file \"" + file + "\" is being copied more than once"); copiedFiles.Add(file); info.Info.Dir.Copy(directory, file, newFileName, context); } @@ -3983,7 +3984,7 @@ internal virtual DocumentsWriter DocsWriter bool test = false; // LUCENENET NOTE: Must set test outside of Debug.Assert!! bool isTest = test = true; - Debug.Assert(isTest); + Debugging.Assert(() => isTest); return test ? docWriter : null; } } @@ -4026,7 +4027,7 @@ private void SkipDeletedDoc(DocValuesFieldUpdates.Iterator[] updatesIters, int d // when entering the method, all iterators must already be beyond the // deleted document, or right on it, in which case we advance them over // and they must be beyond it now. - Debug.Assert(iter.Doc > deletedDoc, "updateDoc=" + iter.Doc + " deletedDoc=" + deletedDoc); + Debugging.Assert(() => iter.Doc > deletedDoc, () => "updateDoc=" + iter.Doc + " deletedDoc=" + deletedDoc); } } @@ -4046,7 +4047,7 @@ internal void Init(ReaderPool readerPool, MergePolicy.OneMerge merge, MergeState { mergedDeletesAndUpdates = readerPool.Get(merge.info, true); docMap = merge.GetDocMap(mergeState); - Debug.Assert(docMap.IsConsistent(merge.info.Info.DocCount)); + Debugging.Assert(() => docMap.IsConsistent(merge.info.Info.DocCount)); } if (initWritableLiveDocs && !initializedWritableLiveDocs) { @@ -4078,7 +4079,7 @@ private void MaybeApplyMergedDVUpdates(MergePolicy.OneMerge merge, MergeState me } else { - Debug.Assert(updatesIter.Doc > curDoc, "field=" + mergingFields[idx] + " updateDoc=" + updatesIter.Doc + " curDoc=" + curDoc); + Debugging.Assert(() => updatesIter.Doc > curDoc, () => "field=" + mergingFields[idx] + " updateDoc=" + updatesIter.Doc + " curDoc=" + curDoc); } } } @@ -4124,7 +4125,7 @@ private ReadersAndUpdates CommitMergedDeletesAndUpdates(MergePolicy.OneMerge mer IBits prevLiveDocs = merge.readers[i].LiveDocs; ReadersAndUpdates rld = readerPool.Get(info, false); // We hold a ref so it should still be in the pool: - Debug.Assert(rld != null, "seg=" + info.Info.Name); + Debugging.Assert(() => rld != null, () => "seg=" + info.Info.Name); IBits currentLiveDocs = rld.LiveDocs; IDictionary mergingFieldUpdates = rld.MergingFieldUpdates; string[] mergingFields; @@ -4163,9 +4164,9 @@ private ReadersAndUpdates CommitMergedDeletesAndUpdates(MergePolicy.OneMerge mer { // If we had deletions on starting the merge we must // still have deletions now: - Debug.Assert(currentLiveDocs != null); - Debug.Assert(prevLiveDocs.Length == docCount); - Debug.Assert(currentLiveDocs.Length == docCount); + Debugging.Assert(() => currentLiveDocs != null); + Debugging.Assert(() => prevLiveDocs.Length == docCount); + Debugging.Assert(() => currentLiveDocs.Length == docCount); // There were deletes on this segment when the merge // started. The merge has collapsed away those @@ -4188,7 +4189,7 @@ private ReadersAndUpdates CommitMergedDeletesAndUpdates(MergePolicy.OneMerge mer { if (!prevLiveDocs.Get(j)) { - Debug.Assert(!currentLiveDocs.Get(j)); + Debugging.Assert(() => !currentLiveDocs.Get(j)); } else { @@ -4238,7 +4239,7 @@ private ReadersAndUpdates CommitMergedDeletesAndUpdates(MergePolicy.OneMerge mer } else if (currentLiveDocs != null) { - Debug.Assert(currentLiveDocs.Length == docCount); + Debugging.Assert(() => currentLiveDocs.Length == docCount); // this segment had no deletes before but now it // does: for (int j = 0; j < docCount; j++) @@ -4279,7 +4280,7 @@ private ReadersAndUpdates CommitMergedDeletesAndUpdates(MergePolicy.OneMerge mer } } - Debug.Assert(docUpto == merge.info.Info.DocCount); + Debugging.Assert(() => docUpto == merge.info.Info.DocCount); if (mergedDVUpdates.Any()) { @@ -4347,7 +4348,7 @@ private bool CommitMerge(MergePolicy.OneMerge merge, MergeState mergeState) infoStream.Message("IW", "commitMerge: " + SegString(merge.Segments) + " index=" + SegString()); } - Debug.Assert(merge.registerDone); + Debugging.Assert(() => merge.registerDone); // If merge was explicitly aborted, or, if rollback() or // rollbackTransaction() had been called since our merge @@ -4383,7 +4384,7 @@ private bool CommitMerge(MergePolicy.OneMerge merge, MergeState mergeState) // started), then we will switch to the compound // format as well: - Debug.Assert(!segmentInfos.Contains(merge.info)); + Debugging.Assert(() => !segmentInfos.Contains(merge.info)); bool allDeleted = merge.Segments.Count == 0 || merge.info.Info.DocCount == 0 || (mergedUpdates != null && mergedUpdates.PendingDeleteCount == merge.info.Info.DocCount); @@ -4399,9 +4400,9 @@ private bool CommitMerge(MergePolicy.OneMerge merge, MergeState mergeState) // If we merged no segments then we better be dropping // the new segment: - Debug.Assert(merge.Segments.Count > 0 || dropSegment); + Debugging.Assert(() => merge.Segments.Count > 0 || dropSegment); - Debug.Assert(merge.info.Info.DocCount != 0 || keepFullyDeletedSegments || dropSegment); + Debugging.Assert(() => merge.info.Info.DocCount != 0 || keepFullyDeletedSegments || dropSegment); if (mergedUpdates != null) { @@ -4436,7 +4437,7 @@ private bool CommitMerge(MergePolicy.OneMerge merge, MergeState mergeState) if (dropSegment) { - Debug.Assert(!segmentInfos.Contains(merge.info)); + Debugging.Assert(() => !segmentInfos.Contains(merge.info)); readerPool.Drop(merge.info); deleter.DeleteNewFiles(merge.info.GetFiles()); } @@ -4625,7 +4626,7 @@ internal bool RegisterMerge(MergePolicy.OneMerge merge) { return true; } - Debug.Assert(merge.Segments.Count > 0); + Debugging.Assert(() => merge.Segments.Count > 0); if (stopMerges) { @@ -4702,14 +4703,14 @@ internal bool RegisterMerge(MergePolicy.OneMerge merge) mergingSegments.Add(info); } - Debug.Assert(merge.EstimatedMergeBytes == 0); - Debug.Assert(merge.totalMergeBytes == 0); + Debugging.Assert(() => merge.EstimatedMergeBytes == 0); + Debugging.Assert(() => merge.totalMergeBytes == 0); foreach (SegmentCommitInfo info in merge.Segments) { if (info.Info.DocCount > 0) { int delCount = NumDeletedDocs(info); - Debug.Assert(delCount <= info.Info.DocCount); + Debugging.Assert(() => delCount <= info.Info.DocCount); double delRatio = ((double)delCount) / info.Info.DocCount; merge.EstimatedMergeBytes += (long)(info.GetSizeInBytes() * (1.0 - delRatio)); merge.totalMergeBytes += info.GetSizeInBytes(); @@ -4755,11 +4756,10 @@ private void MergeInitImpl(MergePolicy.OneMerge merge) // LUCENENET specific: re { lock (this) { - // LUCENENET: .NET doesn't support asserts in release mode - if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) TestPoint("startMergeInit"); + Debugging.Assert(() => TestPoint("startMergeInit")); - Debug.Assert(merge.registerDone); - Debug.Assert(merge.MaxNumSegments == -1 || merge.MaxNumSegments > 0); + Debugging.Assert(() => merge.registerDone); + Debugging.Assert(() => merge.MaxNumSegments == -1 || merge.MaxNumSegments > 0); if (hitOOM) { @@ -4901,7 +4901,7 @@ private void CloseMergeReaders(MergePolicy.OneMerge merge, bool suppressExceptio { ReadersAndUpdates rld = readerPool.Get(sr.SegmentInfo, false); // We still hold a ref so it should not have been removed: - Debug.Assert(rld != null); + Debugging.Assert(() => rld != null); if (drop) { rld.DropChanges(); @@ -4988,8 +4988,8 @@ private int MergeMiddle(MergePolicy.OneMerge merge) liveDocs = rld.GetReadOnlyLiveDocs(); delCount = rld.PendingDeleteCount + info.DelCount; - Debug.Assert(reader != null); - Debug.Assert(rld.VerifyDocCounts()); + Debugging.Assert(() => reader != null); + Debugging.Assert(rld.VerifyDocCounts); if (infoStream.IsEnabled("IW")) { @@ -5015,7 +5015,7 @@ private int MergeMiddle(MergePolicy.OneMerge merge) if (reader.NumDeletedDocs != delCount) { // fix the reader's live docs and del count - Debug.Assert(delCount > reader.NumDeletedDocs); // beware of zombies + Debugging.Assert(() => delCount > reader.NumDeletedDocs); // beware of zombies SegmentReader newReader = new SegmentReader(info, reader, liveDocs, info.Info.DocCount - delCount); bool released = false; @@ -5036,7 +5036,7 @@ private int MergeMiddle(MergePolicy.OneMerge merge) } merge.readers.Add(reader); - Debug.Assert(delCount <= info.Info.DocCount, "delCount=" + delCount + " info.docCount=" + info.Info.DocCount + " rld.pendingDeleteCount=" + rld.PendingDeleteCount + " info.getDelCount()=" + info.DelCount); + Debugging.Assert(() => delCount <= info.Info.DocCount, () => "delCount=" + delCount + " info.docCount=" + info.Info.DocCount + " rld.pendingDeleteCount=" + rld.PendingDeleteCount + " info.getDelCount()=" + info.DelCount); segUpto++; } @@ -5074,7 +5074,7 @@ private int MergeMiddle(MergePolicy.OneMerge merge) } } } - Debug.Assert(mergeState.SegmentInfo == merge.info.Info); + Debugging.Assert(() => mergeState.SegmentInfo == merge.info.Info); merge.info.Info.SetFiles(new JCG.HashSet(dirWrapper.CreatedFiles)); // Record which codec was used to write the segment @@ -5260,7 +5260,7 @@ internal virtual void AddMergeException(MergePolicy.OneMerge merge) { lock (this) { - Debug.Assert(merge.Exception != null); + Debugging.Assert(() => merge.Exception != null); if (!mergeExceptions.Contains(merge) && mergeGen == merge.mergeGen) { mergeExceptions.Add(merge); @@ -5378,13 +5378,13 @@ private bool FilesExist(SegmentInfos toSync) ICollection files = toSync.GetFiles(directory, false); foreach (string fileName in files) { - Debug.Assert(SlowFileExists(directory, fileName), "file " + fileName + " does not exist; files=" + Arrays.ToString(directory.ListAll())); + Debugging.Assert(() => SlowFileExists(directory, fileName), () => "file " + fileName + " does not exist; files=" + Arrays.ToString(directory.ListAll())); // If this trips it means we are missing a call to // .checkpoint somewhere, because by the time we // are called, deleter should know about every // file referenced by the current head // segmentInfos: - Debug.Assert(deleter.Exists(fileName), "IndexFileDeleter doesn't know about file " + fileName); + Debugging.Assert(() => deleter.Exists(fileName), () => "IndexFileDeleter doesn't know about file " + fileName); } return true; } @@ -5424,9 +5424,8 @@ internal virtual SegmentInfos ToLiveInfos(SegmentInfos sis) /// private void StartCommit(SegmentInfos toSync) { - // LUCENENET: .NET doesn't support asserts in release mode - if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) TestPoint("startStartCommit"); - Debug.Assert(pendingCommit == null); + Debugging.Assert(() => TestPoint("startStartCommit")); + Debugging.Assert(() => pendingCommit == null); if (hitOOM) { @@ -5442,7 +5441,7 @@ private void StartCommit(SegmentInfos toSync) lock (this) { - Debug.Assert(lastCommitChangeCount <= changeCount, "lastCommitChangeCount=" + lastCommitChangeCount + " changeCount=" + changeCount); + Debugging.Assert(() => lastCommitChangeCount <= changeCount, () => "lastCommitChangeCount=" + lastCommitChangeCount + " changeCount=" + changeCount); if (pendingCommitChangeCount == lastCommitChangeCount) { @@ -5460,24 +5459,22 @@ private void StartCommit(SegmentInfos toSync) infoStream.Message("IW", "startCommit index=" + SegString(ToLiveInfos(toSync).Segments) + " changeCount=" + changeCount); } - Debug.Assert(FilesExist(toSync)); + Debugging.Assert(() => FilesExist(toSync)); } - // LUCENENET: .NET doesn't support asserts in release mode - if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) TestPoint("midStartCommit"); + Debugging.Assert(() => TestPoint("midStartCommit")); bool pendingCommitSet = false; try { - // LUCENENET: .NET doesn't support asserts in release mode - if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) TestPoint("midStartCommit2"); + Debugging.Assert(() => TestPoint("midStartCommit2")); lock (this) { - Debug.Assert(pendingCommit == null); + Debugging.Assert(() => pendingCommit == null); - Debug.Assert(segmentInfos.Generation == toSync.Generation); + Debugging.Assert(() => segmentInfos.Generation == toSync.Generation); // Exception here means nothing is prepared // (this method unwinds everything it did on @@ -5716,7 +5713,7 @@ internal static ICollection CreateCompoundFile(InfoStream infoStream, Di { infoStream.Message("IW", "create compound file " + fileName); } - Debug.Assert(Lucene3xSegmentInfoFormat.GetDocStoreOffset(info) == -1); + Debugging.Assert(() => Lucene3xSegmentInfoFormat.GetDocStoreOffset(info) == -1); // Now merge all added files ICollection files = info.GetFiles(); CompoundFileDirectory cfsDir = new CompoundFileDirectory(directory, fileName, context, true); diff --git a/src/Lucene.Net/Index/LogMergePolicy.cs b/src/Lucene.Net/Index/LogMergePolicy.cs index e77de9b7ca..74e4f30d98 100644 --- a/src/Lucene.Net/Index/LogMergePolicy.cs +++ b/src/Lucene.Net/Index/LogMergePolicy.cs @@ -1,7 +1,7 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; using System.Text; @@ -192,7 +192,7 @@ protected virtual long SizeDocs(SegmentCommitInfo info) if (m_calibrateSizeByDeletes) { int delCount = m_writer.Get().NumDeletedDocs(info); - Debug.Assert(delCount <= info.Info.DocCount); + Debugging.Assert(() => delCount <= info.Info.DocCount); return (info.Info.DocCount - (long)delCount); } else @@ -378,7 +378,7 @@ private MergeSpecification FindForcedMergesMaxNumSegments(SegmentInfos infos, in /// public override MergeSpecification FindForcedMerges(SegmentInfos infos, int maxNumSegments, IDictionary segmentsToMerge) { - Debug.Assert(maxNumSegments > 0); + Debugging.Assert(() => maxNumSegments > 0); if (IsVerbose) { Message("findForcedMerges: maxNumSegs=" + maxNumSegments + " segsToMerge=" + @@ -469,7 +469,7 @@ public override MergeSpecification FindForcedDeletesMerges(SegmentInfos segmentI var spec = new MergeSpecification(); int firstSegmentWithDeletions = -1; IndexWriter w = m_writer.Get(); - Debug.Assert(w != null); + Debugging.Assert(() => w != null); for (int i = 0; i < numSegments; i++) { SegmentCommitInfo info = segmentInfos.Info(i); @@ -692,7 +692,7 @@ public override MergeSpecification FindMerges(MergeTrigger mergeTrigger, Segment for (int i = start; i < end; i++) { mergeInfos.Add(levels[i].info); - Debug.Assert(infos.Contains(levels[i].info)); + Debugging.Assert(() => infos.Contains(levels[i].info)); } if (IsVerbose) { diff --git a/src/Lucene.Net/Index/MergePolicy.cs b/src/Lucene.Net/Index/MergePolicy.cs index fba557ccf3..877553ac66 100644 --- a/src/Lucene.Net/Index/MergePolicy.cs +++ b/src/Lucene.Net/Index/MergePolicy.cs @@ -1,8 +1,8 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using Lucene.Net.Util; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Runtime.CompilerServices; #if FEATURE_SERIALIZABLE_EXCEPTIONS @@ -93,12 +93,12 @@ internal virtual bool IsConsistent(int maxDoc) int target = Map(i); if (target < 0 || target >= maxDoc) { - Debug.Assert(false, "out of range: " + target + " not in [0-" + maxDoc + "["); + Debugging.Assert(() => false, () => "out of range: " + target + " not in [0-" + maxDoc + "["); return false; } else if (targets.Get(target)) { - Debug.Assert(false, target + " is already taken (" + i + ")"); + Debugging.Assert(() => false, () => target + " is already taken (" + i + ")"); return false; } } @@ -721,7 +721,7 @@ protected virtual long Size(SegmentCommitInfo info) long byteSize = info.GetSizeInBytes(); int delCount = m_writer.Get().NumDeletedDocs(info); double delRatio = (info.Info.DocCount <= 0 ? 0.0f : ((float)delCount / (float)info.Info.DocCount)); - Debug.Assert(delRatio <= 1.0); + Debugging.Assert(() => delRatio <= 1.0); return (info.Info.DocCount <= 0 ? byteSize : (long)(byteSize * (1.0 - delRatio))); } @@ -733,7 +733,7 @@ protected virtual long Size(SegmentCommitInfo info) protected bool IsMerged(SegmentInfos infos, SegmentCommitInfo info) { IndexWriter w = m_writer.Get(); - Debug.Assert(w != null); + Debugging.Assert(() => w != null); bool hasDeletions = w.NumDeletedDocs(info) > 0; return !hasDeletions #pragma warning disable 612, 618 diff --git a/src/Lucene.Net/Index/MergeState.cs b/src/Lucene.Net/Index/MergeState.cs index aeaac26ad6..8b7da8937d 100644 --- a/src/Lucene.Net/Index/MergeState.cs +++ b/src/Lucene.Net/Index/MergeState.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System.Collections.Generic; -using System.Diagnostics; using System.Diagnostics.CodeAnalysis; namespace Lucene.Net.Index @@ -22,8 +22,8 @@ namespace Lucene.Net.Index * limitations under the License. */ - using IBits = Lucene.Net.Util.IBits; using Directory = Lucene.Net.Store.Directory; + using IBits = Lucene.Net.Util.IBits; using InfoStream = Lucene.Net.Util.InfoStream; using MonotonicAppendingInt64Buffer = Lucene.Net.Util.Packed.MonotonicAppendingInt64Buffer; @@ -82,7 +82,7 @@ public static DocMap Build(AtomicReader reader) internal static DocMap Build(int maxDoc, IBits liveDocs) { - Debug.Assert(liveDocs != null); + Debugging.Assert(() => liveDocs != null); MonotonicAppendingInt64Buffer docMap = new MonotonicAppendingInt64Buffer(); int del = 0; for (int i = 0; i < maxDoc; ++i) @@ -95,7 +95,7 @@ internal static DocMap Build(int maxDoc, IBits liveDocs) } docMap.Freeze(); int numDeletedDocs = del; - Debug.Assert(docMap.Count == maxDoc); + Debugging.Assert(() => docMap.Count == maxDoc); return new DocMapAnonymousInnerClassHelper(maxDoc, liveDocs, docMap, numDeletedDocs); } diff --git a/src/Lucene.Net/Index/MultiBits.cs b/src/Lucene.Net/Index/MultiBits.cs index d5ee352d8b..6873dfed6a 100644 --- a/src/Lucene.Net/Index/MultiBits.cs +++ b/src/Lucene.Net/Index/MultiBits.cs @@ -1,4 +1,4 @@ -using System.Diagnostics; +using Lucene.Net.Diagnostics; using System.Text; namespace Lucene.Net.Index @@ -41,7 +41,7 @@ internal sealed class MultiBits : IBits public MultiBits(IBits[] subs, int[] starts, bool defaultValue) { - Debug.Assert(starts.Length == 1 + subs.Length); + Debugging.Assert(() => starts.Length == 1 + subs.Length); this.subs = subs; this.starts = starts; this.sefaultValue = defaultValue; @@ -50,14 +50,14 @@ public MultiBits(IBits[] subs, int[] starts, bool defaultValue) private bool CheckLength(int reader, int doc) { int length = starts[1 + reader] - starts[reader]; - Debug.Assert(doc - starts[reader] < length, "doc=" + doc + " reader=" + reader + " starts[reader]=" + starts[reader] + " length=" + length); + Debugging.Assert(() => doc - starts[reader] < length, () => "doc=" + doc + " reader=" + reader + " starts[reader]=" + starts[reader] + " length=" + length); return true; } public bool Get(int doc) { int reader = ReaderUtil.SubIndex(doc, starts); - Debug.Assert(reader != -1); + Debugging.Assert(() => reader != -1); IBits bits = subs[reader]; if (bits == null) { @@ -65,7 +65,7 @@ public bool Get(int doc) } else { - Debug.Assert(CheckLength(reader, doc)); + Debugging.Assert(() => CheckLength(reader, doc)); return bits.Get(doc - starts[reader]); } } @@ -114,8 +114,8 @@ public sealed class SubResult public SubResult GetMatchingSub(ReaderSlice slice) { int reader = ReaderUtil.SubIndex(slice.Start, starts); - Debug.Assert(reader != -1); - Debug.Assert(reader < subs.Length, "slice=" + slice + " starts[-1]=" + starts[starts.Length - 1]); + Debugging.Assert(() => reader != -1); + Debugging.Assert(() => reader < subs.Length, () => "slice=" + slice + " starts[-1]=" + starts[starts.Length - 1]); SubResult subResult = new SubResult(); if (starts[reader] == slice.Start && starts[1 + reader] == slice.Start + slice.Length) { diff --git a/src/Lucene.Net/Index/MultiDocValues.cs b/src/Lucene.Net/Index/MultiDocValues.cs index ae4b3cdb28..881b003a4c 100644 --- a/src/Lucene.Net/Index/MultiDocValues.cs +++ b/src/Lucene.Net/Index/MultiDocValues.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System.Collections.Generic; -using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.IO; @@ -24,8 +24,8 @@ namespace Lucene.Net.Index */ using AppendingPackedInt64Buffer = Lucene.Net.Util.Packed.AppendingPackedInt64Buffer; - using IBits = Lucene.Net.Util.IBits; using BytesRef = Lucene.Net.Util.BytesRef; + using IBits = Lucene.Net.Util.IBits; using MonotonicAppendingInt64Buffer = Lucene.Net.Util.Packed.MonotonicAppendingInt64Buffer; using PackedInt32s = Lucene.Net.Util.Packed.PackedInt32s; using TermsEnumIndex = Lucene.Net.Index.MultiTermsEnum.TermsEnumIndex; @@ -99,7 +99,7 @@ public static NumericDocValues GetNormValues(IndexReader r, string field) } starts[size] = r.MaxDoc; - Debug.Assert(anyReal); + Debugging.Assert(() => anyReal); return new NumericDocValuesAnonymousInnerClassHelper(values, starts); } @@ -591,8 +591,8 @@ public class MultiSortedDocValues : SortedDocValues /// Creates a new over internal MultiSortedDocValues(SortedDocValues[] values, int[] docStarts, OrdinalMap mapping) { - Debug.Assert(values.Length == mapping.ordDeltas.Length); - Debug.Assert(docStarts.Length == values.Length + 1); + Debugging.Assert(() => values.Length == mapping.ordDeltas.Length); + Debugging.Assert(() => docStarts.Length == values.Length + 1); this.values = values; this.docStarts = docStarts; this.mapping = mapping; @@ -650,8 +650,8 @@ public class MultiSortedSetDocValues : SortedSetDocValues /// Creates a new over internal MultiSortedSetDocValues(SortedSetDocValues[] values, int[] docStarts, OrdinalMap mapping) { - Debug.Assert(values.Length == mapping.ordDeltas.Length); - Debug.Assert(docStarts.Length == values.Length + 1); + Debugging.Assert(() => values.Length == mapping.ordDeltas.Length); + Debugging.Assert(() => docStarts.Length == values.Length + 1); this.values = values; this.docStarts = docStarts; this.mapping = mapping; diff --git a/src/Lucene.Net/Index/MultiDocsAndPositionsEnum.cs b/src/Lucene.Net/Index/MultiDocsAndPositionsEnum.cs index 5ae64259ed..73653436d9 100644 --- a/src/Lucene.Net/Index/MultiDocsAndPositionsEnum.cs +++ b/src/Lucene.Net/Index/MultiDocsAndPositionsEnum.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; -using System.Diagnostics; using System.Diagnostics.CodeAnalysis; namespace Lucene.Net.Index @@ -90,7 +90,7 @@ public override int Freq { get { - Debug.Assert(current != null); + Debugging.Assert(() => current != null); return current.Freq; } } @@ -99,7 +99,7 @@ public override int Freq public override int Advance(int target) { - Debug.Assert(target > doc); + Debugging.Assert(() => target > doc); while (true) { if (current != null) diff --git a/src/Lucene.Net/Index/MultiDocsEnum.cs b/src/Lucene.Net/Index/MultiDocsEnum.cs index 191ff3bb3f..500ae66928 100644 --- a/src/Lucene.Net/Index/MultiDocsEnum.cs +++ b/src/Lucene.Net/Index/MultiDocsEnum.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; -using System.Diagnostics; using System.Diagnostics.CodeAnalysis; namespace Lucene.Net.Index @@ -92,7 +92,7 @@ public bool CanReuse(MultiTermsEnum parent) public override int Advance(int target) { - Debug.Assert(target > doc); + Debugging.Assert(() => target > doc); while (true) { if (current != null) diff --git a/src/Lucene.Net/Index/MultiFields.cs b/src/Lucene.Net/Index/MultiFields.cs index dfda74826b..8e1471ff1e 100644 --- a/src/Lucene.Net/Index/MultiFields.cs +++ b/src/Lucene.Net/Index/MultiFields.cs @@ -1,8 +1,8 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using Lucene.Net.Util; using System.Collections.Concurrent; using System.Collections.Generic; -using System.Diagnostics; using JCG = J2N.Collections.Generic; namespace Lucene.Net.Index @@ -24,8 +24,8 @@ namespace Lucene.Net.Index * limitations under the License. */ - using IBits = Lucene.Net.Util.IBits; using BytesRef = Lucene.Net.Util.BytesRef; + using IBits = Lucene.Net.Util.IBits; /// /// Exposes flex API, merged from flex API of sub-segments. @@ -117,7 +117,7 @@ public static IBits GetLiveDocs(IndexReader reader) { IList leaves = reader.Leaves; int size = leaves.Count; - Debug.Assert(size > 0, "A reader with deletions must have at least one leave"); + Debugging.Assert(() => size > 0, () => "A reader with deletions must have at least one leave"); if (size == 1) { return leaves[0].AtomicReader.LiveDocs; @@ -175,8 +175,8 @@ public static DocsEnum GetTermDocsEnum(IndexReader r, IBits liveDocs, string fie /// public static DocsEnum GetTermDocsEnum(IndexReader r, IBits liveDocs, string field, BytesRef term, DocsFlags flags) { - Debug.Assert(field != null); - Debug.Assert(term != null); + Debugging.Assert(() => field != null); + Debugging.Assert(() => term != null); Terms terms = GetTerms(r, field); if (terms != null) { @@ -210,8 +210,8 @@ public static DocsAndPositionsEnum GetTermPositionsEnum(IndexReader r, IBits liv /// public static DocsAndPositionsEnum GetTermPositionsEnum(IndexReader r, IBits liveDocs, string field, BytesRef term, DocsAndPositionsFlags flags) { - Debug.Assert(field != null); - Debug.Assert(term != null); + Debugging.Assert(() => field != null); + Debugging.Assert(() => term != null); Terms terms = GetTerms(r, field); if (terms != null) { diff --git a/src/Lucene.Net/Index/MultiTerms.cs b/src/Lucene.Net/Index/MultiTerms.cs index 6cbcd43162..1c43e27f53 100644 --- a/src/Lucene.Net/Index/MultiTerms.cs +++ b/src/Lucene.Net/Index/MultiTerms.cs @@ -1,7 +1,7 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Index { @@ -54,7 +54,7 @@ public MultiTerms(Terms[] subs, ReaderSlice[] subSlices) this.subSlices = subSlices; IComparer _termComp = null; - Debug.Assert(subs.Length > 0, "inefficient: don't use MultiTerms over one sub"); + Debugging.Assert(() => subs.Length > 0, () => "inefficient: don't use MultiTerms over one sub"); bool _hasFreqs = true; bool _hasOffsets = true; bool _hasPositions = true; diff --git a/src/Lucene.Net/Index/MultiTermsEnum.cs b/src/Lucene.Net/Index/MultiTermsEnum.cs index ee15a7c9dc..7a8e6ff9a2 100644 --- a/src/Lucene.Net/Index/MultiTermsEnum.cs +++ b/src/Lucene.Net/Index/MultiTermsEnum.cs @@ -1,7 +1,7 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Diagnostics.CodeAnalysis; namespace Lucene.Net.Index @@ -23,8 +23,8 @@ namespace Lucene.Net.Index * limitations under the License. */ - using IBits = Lucene.Net.Util.IBits; using BytesRef = Lucene.Net.Util.BytesRef; + using IBits = Lucene.Net.Util.IBits; /// /// Exposes API, merged from API of sub-segments. @@ -107,7 +107,7 @@ public MultiTermsEnum(ReaderSlice[] slices) /// public TermsEnum Reset(TermsEnumIndex[] termsEnumsIndex) { - Debug.Assert(termsEnumsIndex.Length <= top.Length); + Debugging.Assert(() => termsEnumsIndex.Length <= top.Length); numSubs = 0; numTop = 0; termComp = null; @@ -115,7 +115,7 @@ public TermsEnum Reset(TermsEnumIndex[] termsEnumsIndex) for (int i = 0; i < termsEnumsIndex.Length; i++) { TermsEnumIndex termsEnumIndex = termsEnumsIndex[i]; - Debug.Assert(termsEnumIndex != null); + Debugging.Assert(() => termsEnumIndex != null); // init our term comp if (termComp == null) @@ -213,7 +213,7 @@ public override bool SeekExact(BytesRef term) { top[numTop++] = currentSubs[i]; current = currentSubs[i].Current = currentSubs[i].Terms.Term; - Debug.Assert(term.Equals(currentSubs[i].Current)); + Debugging.Assert(() => term.Equals(currentSubs[i].Current)); } } @@ -285,7 +285,7 @@ public override SeekStatus SeekCeil(BytesRef term) if (status == SeekStatus.NOT_FOUND) { currentSubs[i].Current = currentSubs[i].Terms.Term; - Debug.Assert(currentSubs[i].Current != null); + Debugging.Assert(() => currentSubs[i].Current != null); queue.Add(currentSubs[i]); } else @@ -326,7 +326,7 @@ private void PullTop() { // extract all subs from the queue that have the same // top term - Debug.Assert(numTop == 0); + Debugging.Assert(() => numTop == 0); while (true) { top[numTop++] = queue.Pop(); @@ -367,7 +367,7 @@ public override BytesRef Next() // most impls short-circuit if you SeekCeil to term // they are already on. SeekStatus status = SeekCeil(current); - Debug.Assert(status == SeekStatus.FOUND); + Debugging.Assert(() => status == SeekStatus.FOUND); lastSeekExact = false; } lastSeek = null; @@ -484,7 +484,7 @@ public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) b = null; } - Debug.Assert(entry.Index < docsEnum.subDocsEnum.Length, entry.Index + " vs " + docsEnum.subDocsEnum.Length + "; " + subs.Length); + Debugging.Assert(() => entry.Index < docsEnum.subDocsEnum.Length, () => entry.Index + " vs " + docsEnum.subDocsEnum.Length + "; " + subs.Length); DocsEnum subDocsEnum = entry.Terms.Docs(b, docsEnum.subDocsEnum[entry.Index], flags); if (subDocsEnum != null) { @@ -496,7 +496,7 @@ public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) else { // should this be an error? - Debug.Assert(false, "One of our subs cannot provide a docsenum"); + Debugging.Assert(() => false, () => "One of our subs cannot provide a docsenum"); } } @@ -576,7 +576,7 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPos b = null; } - Debug.Assert(entry.Index < docsAndPositionsEnum.subDocsAndPositionsEnum.Length, entry.Index + " vs " + docsAndPositionsEnum.subDocsAndPositionsEnum.Length + "; " + subs.Length); + Debugging.Assert(() => entry.Index < docsAndPositionsEnum.subDocsAndPositionsEnum.Length, () => entry.Index + " vs " + docsAndPositionsEnum.subDocsAndPositionsEnum.Length + "; " + subs.Length); DocsAndPositionsEnum subPostings = entry.Terms.DocsAndPositions(b, docsAndPositionsEnum.subDocsAndPositionsEnum[entry.Index], flags); if (subPostings != null) @@ -619,7 +619,7 @@ public TermsEnumWithSlice(int index, ReaderSlice subSlice) { this.SubSlice = subSlice; this.Index = index; - Debug.Assert(subSlice.Length >= 0, "length=" + subSlice.Length); + Debugging.Assert(() => subSlice.Length >= 0, () => "length=" + subSlice.Length); } public void Reset(TermsEnum terms, BytesRef term) diff --git a/src/Lucene.Net/Index/NormsConsumer.cs b/src/Lucene.Net/Index/NormsConsumer.cs index 178d8c116b..b1797993d5 100644 --- a/src/Lucene.Net/Index/NormsConsumer.cs +++ b/src/Lucene.Net/Index/NormsConsumer.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System.Collections.Generic; -using System.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Index @@ -50,7 +50,7 @@ internal override void Flush(IDictionary if (state.FieldInfos.HasNorms) { NormsFormat normsFormat = state.SegmentInfo.Codec.NormsFormat; - Debug.Assert(normsFormat != null); + Debugging.Assert(() => normsFormat != null); normsConsumer = normsFormat.NormsConsumer(state); foreach (FieldInfo fi in state.FieldInfos) @@ -63,11 +63,11 @@ internal override void Flush(IDictionary if (toWrite != null && !toWrite.IsEmpty) { toWrite.Flush(state, normsConsumer); - Debug.Assert(fi.NormType == DocValuesType.NUMERIC); + Debugging.Assert(() => fi.NormType == DocValuesType.NUMERIC); } else if (fi.IsIndexed) { - Debug.Assert(fi.NormType == DocValuesType.NONE, "got " + fi.NormType + "; field=" + fi.Name); + Debugging.Assert(() => fi.NormType == DocValuesType.NONE, () => "got " + fi.NormType + "; field=" + fi.Name); } } } diff --git a/src/Lucene.Net/Index/NumericDocValuesFieldUpdates.cs b/src/Lucene.Net/Index/NumericDocValuesFieldUpdates.cs index 3d7bb1bcb9..4a4df0e6fa 100644 --- a/src/Lucene.Net/Index/NumericDocValuesFieldUpdates.cs +++ b/src/Lucene.Net/Index/NumericDocValuesFieldUpdates.cs @@ -1,6 +1,6 @@ using Lucene.Net.Documents; using System; -using System.Diagnostics; +using Lucene.Net.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Index @@ -206,7 +206,7 @@ protected override int Compare(int i, int j) [MethodImpl(MethodImplOptions.NoInlining)] public override void Merge(DocValuesFieldUpdates other) { - Debug.Assert(other is NumericDocValuesFieldUpdates); + Debugging.Assert(() => other is NumericDocValuesFieldUpdates); NumericDocValuesFieldUpdates otherUpdates = (NumericDocValuesFieldUpdates)other; if (size + otherUpdates.size > int.MaxValue) { diff --git a/src/Lucene.Net/Index/OrdTermState.cs b/src/Lucene.Net/Index/OrdTermState.cs index 7b89b0d429..5c0b735aa0 100644 --- a/src/Lucene.Net/Index/OrdTermState.cs +++ b/src/Lucene.Net/Index/OrdTermState.cs @@ -1,4 +1,4 @@ -using System.Diagnostics; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Index { @@ -40,7 +40,7 @@ public OrdTermState() public override void CopyFrom(TermState other) { - Debug.Assert(other is OrdTermState, "can not copy from " + other.GetType().Name); + Debugging.Assert(() => other is OrdTermState, () => "can not copy from " + other.GetType().Name); this.Ord = ((OrdTermState)other).Ord; } diff --git a/src/Lucene.Net/Index/ParallelCompositeReader.cs b/src/Lucene.Net/Index/ParallelCompositeReader.cs index 8dd774348c..93eb17aa77 100644 --- a/src/Lucene.Net/Index/ParallelCompositeReader.cs +++ b/src/Lucene.Net/Index/ParallelCompositeReader.cs @@ -1,8 +1,8 @@ using J2N.Runtime.CompilerServices; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using JCG = J2N.Collections.Generic; @@ -145,7 +145,7 @@ private static IndexReader[] PrepareSubReaders(CompositeReader[] readers, Compos } else { - Debug.Assert(firstSubReaders[i] is CompositeReader); + Debugging.Assert(() => firstSubReaders[i] is CompositeReader); CompositeReader[] compositeSubs = new CompositeReader[readers.Length]; for (int j = 0; j < readers.Length; j++) { diff --git a/src/Lucene.Net/Index/PrefixCodedTerms.cs b/src/Lucene.Net/Index/PrefixCodedTerms.cs index 50e66dd709..b914b5fd4b 100644 --- a/src/Lucene.Net/Index/PrefixCodedTerms.cs +++ b/src/Lucene.Net/Index/PrefixCodedTerms.cs @@ -1,8 +1,8 @@ using J2N.Numerics; +using Lucene.Net.Diagnostics; using System; using System.Collections; using System.Collections.Generic; -using System.Diagnostics; using System.IO; namespace Lucene.Net.Index @@ -146,7 +146,7 @@ internal virtual void InitializeInstanceFields() /// add a term public virtual void Add(Term term) { - Debug.Assert(lastTerm.Equals(new Term("")) || term.CompareTo(lastTerm) > 0); + Debugging.Assert(() => lastTerm.Equals(new Term("")) || term.CompareTo(lastTerm) > 0); try { diff --git a/src/Lucene.Net/Index/ReadersAndUpdates.cs b/src/Lucene.Net/Index/ReadersAndUpdates.cs index 34f3f79a57..976d412950 100644 --- a/src/Lucene.Net/Index/ReadersAndUpdates.cs +++ b/src/Lucene.Net/Index/ReadersAndUpdates.cs @@ -1,8 +1,8 @@ using J2N.Threading.Atomic; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; using System.Runtime.CompilerServices; using System.Text; @@ -27,16 +27,16 @@ namespace Lucene.Net.Index */ using BinaryDocValuesField = BinaryDocValuesField; - using IBits = Lucene.Net.Util.IBits; using BytesRef = Lucene.Net.Util.BytesRef; using Codec = Lucene.Net.Codecs.Codec; using Directory = Lucene.Net.Store.Directory; using DocValuesConsumer = Lucene.Net.Codecs.DocValuesConsumer; using DocValuesFormat = Lucene.Net.Codecs.DocValuesFormat; + using IBits = Lucene.Net.Util.IBits; + using IMutableBits = Lucene.Net.Util.IMutableBits; using IOContext = Lucene.Net.Store.IOContext; using IOUtils = Lucene.Net.Util.IOUtils; using LiveDocsFormat = Lucene.Net.Codecs.LiveDocsFormat; - using IMutableBits = Lucene.Net.Util.IMutableBits; using NumericDocValuesField = NumericDocValuesField; using TrackingDirectoryWrapper = Lucene.Net.Store.TrackingDirectoryWrapper; @@ -104,19 +104,19 @@ public ReadersAndUpdates(IndexWriter writer, SegmentCommitInfo info) public virtual void IncRef() { int rc = refCount.IncrementAndGet(); - Debug.Assert(rc > 1); + Debugging.Assert(() => rc > 1); } public virtual void DecRef() { int rc = refCount.DecrementAndGet(); - Debug.Assert(rc >= 0); + Debugging.Assert(() => rc >= 0); } public virtual int RefCount() { int rc = refCount; - Debug.Assert(rc >= 0); + Debugging.Assert(() => rc >= 0); return rc; } @@ -153,7 +153,7 @@ public virtual bool VerifyDocCounts() count = Info.Info.DocCount; } - Debug.Assert(Info.Info.DocCount - Info.DelCount - pendingDeleteCount == count, "info.docCount=" + Info.Info.DocCount + " info.DelCount=" + Info.DelCount + " pendingDeleteCount=" + pendingDeleteCount + " count=" + count); + Debugging.Assert(() => Info.Info.DocCount - Info.DelCount - pendingDeleteCount == count, () => "info.docCount=" + Info.Info.DocCount + " info.DelCount=" + Info.DelCount + " pendingDeleteCount=" + pendingDeleteCount + " count=" + count); return true; } } @@ -220,7 +220,7 @@ public virtual void Release(SegmentReader sr) { lock (this) { - Debug.Assert(Info == sr.SegmentInfo); + Debugging.Assert(() => Info == sr.SegmentInfo); sr.DecRef(); } } @@ -229,10 +229,10 @@ public virtual bool Delete(int docID) { lock (this) { - Debug.Assert(liveDocs != null); - //Debug.Assert(Thread.holdsLock(Writer)); - Debug.Assert(docID >= 0 && docID < liveDocs.Length, "out of bounds: docid=" + docID + " liveDocsLength=" + liveDocs.Length + " seg=" + Info.Info.Name + " docCount=" + Info.Info.DocCount); - Debug.Assert(!liveDocsShared); + Debugging.Assert(() => liveDocs != null); + //Debugging.Assert(Thread.holdsLock(Writer)); + Debugging.Assert(() => docID >= 0 && docID < liveDocs.Length, () => "out of bounds: docid=" + docID + " liveDocsLength=" + liveDocs.Length + " seg=" + Info.Info.Name + " docCount=" + Info.Info.DocCount); + Debugging.Assert(() => !liveDocsShared); bool didDelete = liveDocs.Get(docID); if (didDelete) { @@ -298,7 +298,7 @@ public virtual SegmentReader GetReadOnlyClone(IOContext context) if (reader == null) { GetReader(context).DecRef(); - Debug.Assert(reader != null); + Debugging.Assert(() => reader != null); } liveDocsShared = true; if (liveDocs != null) @@ -307,7 +307,7 @@ public virtual SegmentReader GetReadOnlyClone(IOContext context) } else { - Debug.Assert(reader.LiveDocs == liveDocs); + Debugging.Assert(() => reader.LiveDocs == liveDocs); reader.IncRef(); return reader; } @@ -318,8 +318,8 @@ public virtual void InitWritableLiveDocs() { lock (this) { - //Debug.Assert(Thread.holdsLock(Writer)); - Debug.Assert(Info.Info.DocCount > 0); + //Debugging.Assert(Thread.holdsLock(Writer)); + Debugging.Assert(() => Info.Info.DocCount > 0); //System.out.println("initWritableLivedocs seg=" + info + " liveDocs=" + liveDocs + " shared=" + shared); if (liveDocsShared) { @@ -348,7 +348,7 @@ public virtual IBits LiveDocs { lock (this) { - //Debug.Assert(Thread.holdsLock(Writer)); + //Debugging.Assert(Thread.holdsLock(Writer)); return liveDocs; } } @@ -359,7 +359,7 @@ public virtual IBits GetReadOnlyLiveDocs() lock (this) { //System.out.println("getROLiveDocs seg=" + info); - //Debug.Assert(Thread.holdsLock(Writer)); + //Debugging.Assert(Thread.holdsLock(Writer)); liveDocsShared = true; //if (liveDocs != null) { //System.out.println(" liveCount=" + liveDocs.count()); @@ -393,7 +393,7 @@ public virtual bool WriteLiveDocs(Directory dir) { lock (this) { - //Debug.Assert(Thread.holdsLock(Writer)); + //Debugging.Assert(Thread.holdsLock(Writer)); //System.out.println("rld.writeLiveDocs seg=" + info + " pendingDelCount=" + pendingDeleteCount + " numericUpdates=" + numericUpdates); if (pendingDeleteCount == 0) { @@ -401,7 +401,7 @@ public virtual bool WriteLiveDocs(Directory dir) } // We have new deletes - Debug.Assert(liveDocs.Length == Info.Info.DocCount); + Debugging.Assert(() => liveDocs.Length == Info.Info.DocCount); // Do this so we can delete any created files on // exception; this saves all codecs from having to do @@ -458,10 +458,10 @@ public virtual void WriteFieldUpdates(Directory dir, DocValuesFieldUpdates.Conta { lock (this) { - //Debug.Assert(Thread.holdsLock(Writer)); + //Debugging.Assert(Thread.holdsLock(Writer)); //System.out.println("rld.writeFieldUpdates: seg=" + info + " numericFieldUpdates=" + numericFieldUpdates); - Debug.Assert(dvUpdates.Any()); + Debugging.Assert(dvUpdates.Any); // Do this so we can delete any created files on // exception; this saves all codecs from having to do @@ -523,7 +523,7 @@ public virtual void WriteFieldUpdates(Directory dir, DocValuesFieldUpdates.Conta string field = e.Key; NumericDocValuesFieldUpdates fieldUpdates = e.Value; FieldInfo fieldInfo = fieldInfos.FieldInfo(field); - Debug.Assert(fieldInfo != null); + Debugging.Assert(() => fieldInfo != null); fieldInfo.DocValuesGen = nextFieldInfosGen; // write the numeric updates to a new gen'd docvalues file @@ -536,7 +536,7 @@ public virtual void WriteFieldUpdates(Directory dir, DocValuesFieldUpdates.Conta string field = e.Key; BinaryDocValuesFieldUpdates dvFieldUpdates = e.Value; FieldInfo fieldInfo = fieldInfos.FieldInfo(field); - Debug.Assert(fieldInfo != null); + Debugging.Assert(() => fieldInfo != null); // System.out.println("[" + Thread.currentThread().getName() + "] RAU.writeFieldUpdates: applying binary updates; seg=" + info + " f=" + dvFieldUpdates + ", updates=" + dvFieldUpdates); @@ -746,7 +746,7 @@ internal virtual SegmentReader GetReaderForMerge(IOContext context) { lock (this) { - //Debug.Assert(Thread.holdsLock(Writer)); + //Debugging.Assert(Thread.holdsLock(Writer)); // must execute these two statements as atomic operation, otherwise we // could lose updates if e.g. another thread calls writeFieldUpdates in // between, or the updates are applied to the obtained reader, but then diff --git a/src/Lucene.Net/Index/SegmentCoreReaders.cs b/src/Lucene.Net/Index/SegmentCoreReaders.cs index 0874836f30..35fe24c5b3 100644 --- a/src/Lucene.Net/Index/SegmentCoreReaders.cs +++ b/src/Lucene.Net/Index/SegmentCoreReaders.cs @@ -1,9 +1,9 @@ using J2N.Threading.Atomic; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using Lucene.Net.Util; using System; using System.Collections.Generic; -using System.Diagnostics; using JCG = J2N.Collections.Generic; namespace Lucene.Net.Index @@ -106,7 +106,7 @@ internal SegmentCoreReaders(SegmentReader owner, Directory dir, SegmentCommitInf SegmentReadState segmentReadState = new SegmentReadState(cfsDir, si.Info, fieldInfos, context, termsIndexDivisor); // Ask codec for its Fields fields = format.FieldsProducer(segmentReadState); - Debug.Assert(fields != null); + Debugging.Assert(() => fields != null); // ask codec for its Norms: // TODO: since we don't write any norms file if there are no norms, // kinda jaky to assume the codec handles the case of no norms file at all gracefully?! @@ -114,7 +114,7 @@ internal SegmentCoreReaders(SegmentReader owner, Directory dir, SegmentCommitInf if (fieldInfos.HasNorms) { normsProducer = codec.NormsFormat.NormsProducer(segmentReadState); - Debug.Assert(normsProducer != null); + Debugging.Assert(() => normsProducer != null); } else { @@ -160,7 +160,7 @@ internal void IncRef() internal NumericDocValues GetNormValues(FieldInfo fi) { - Debug.Assert(normsProducer != null); + Debugging.Assert(() => normsProducer != null); IDictionary normFields = normsLocal.Value; diff --git a/src/Lucene.Net/Index/SegmentDocValues.cs b/src/Lucene.Net/Index/SegmentDocValues.cs index a9846f161d..305949b096 100644 --- a/src/Lucene.Net/Index/SegmentDocValues.cs +++ b/src/Lucene.Net/Index/SegmentDocValues.cs @@ -1,8 +1,8 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using Lucene.Net.Util; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; namespace Lucene.Net.Index @@ -85,7 +85,7 @@ internal DocValuesProducer GetDocValuesProducer(long? gen, SegmentCommitInfo si, if (!(genDVProducers.TryGetValue(gen, out dvp))) { dvp = NewDocValuesProducer(si, context, dir, dvFormat, gen, infos, termsIndexDivisor); - Debug.Assert(dvp != null); + Debugging.Assert(() => dvp != null); genDVProducers[gen] = dvp; } else @@ -108,7 +108,7 @@ internal void DecRef(IList dvProducersGens) foreach (long? gen in dvProducersGens) { RefCount dvp = genDVProducers[gen]; - Debug.Assert(dvp != null, "gen=" + gen); + Debugging.Assert(() => dvp != null, () => "gen=" + gen); try { dvp.DecRef(); diff --git a/src/Lucene.Net/Index/SegmentInfo.cs b/src/Lucene.Net/Index/SegmentInfo.cs index 9092993b91..6740156866 100644 --- a/src/Lucene.Net/Index/SegmentInfo.cs +++ b/src/Lucene.Net/Index/SegmentInfo.cs @@ -1,7 +1,7 @@ using J2N.Collections.Generic.Extensions; using System; using System.Collections.Generic; -using System.Diagnostics; +using Lucene.Net.Diagnostics; using System.Text; using System.Text.RegularExpressions; @@ -102,7 +102,7 @@ public SegmentInfo(Directory dir, string version, string name, int docCount, boo /// public SegmentInfo(Directory dir, string version, string name, int docCount, bool isCompoundFile, Codec codec, IDictionary diagnostics, IDictionary attributes) { - Debug.Assert(!(dir is TrackingDirectoryWrapper)); + Debugging.Assert(() => !(dir is TrackingDirectoryWrapper)); this.Dir = dir; this.version = version; this.Name = name; @@ -137,7 +137,7 @@ public Codec Codec get => codec; set { - Debug.Assert(this.codec == null); + Debugging.Assert(() => this.codec == null); if (value == null) { throw new ArgumentException("codec must be non-null"); diff --git a/src/Lucene.Net/Index/SegmentInfos.cs b/src/Lucene.Net/Index/SegmentInfos.cs index c9a613986a..14d0633f03 100644 --- a/src/Lucene.Net/Index/SegmentInfos.cs +++ b/src/Lucene.Net/Index/SegmentInfos.cs @@ -1,11 +1,11 @@ +using J2N; using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using Lucene.Net.Support.IO; using System; using System.Collections; -using J2N; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Runtime.CompilerServices; using System.Text; @@ -543,7 +543,7 @@ private void Write(Directory directory) segnOutput.WriteInt64(e.Key); segnOutput.WriteStringSet(e.Value); } - Debug.Assert(si.Dir == directory); + Debugging.Assert(() => si.Dir == directory); // If this segment is pre-4.x, perform a one-time // "ugprade" to write the .si file for it: @@ -710,7 +710,7 @@ public object Clone() sis.segments = new List(Count); foreach (SegmentCommitInfo info in segments) { - Debug.Assert(info.Info.Codec != null); + Debugging.Assert(() => info.Info.Codec != null); // dont directly access segments, use add method!!! sis.Add((SegmentCommitInfo)(info.Clone())); } @@ -1153,7 +1153,7 @@ public ICollection GetFiles(Directory dir, bool includeSegmentsFile) for (int i = 0; i < size; i++) { var info = Info(i); - Debug.Assert(info.Info.Dir == dir); + Debugging.Assert(() => info.Info.Dir == dir); if (info.Info.Dir == dir) { files.UnionWith(info.GetFiles()); @@ -1341,7 +1341,7 @@ internal void ApplyMergeChanges(MergePolicy.OneMerge merge, bool dropSegment) int newSegIdx = 0; for (int segIdx = 0, cnt = segments.Count; segIdx < cnt; segIdx++) { - Debug.Assert(segIdx >= newSegIdx); + Debugging.Assert(() => segIdx >= newSegIdx); SegmentCommitInfo info = segments[segIdx]; if (mergedAway.Contains(info)) { @@ -1378,7 +1378,7 @@ internal IList CreateBackupSegmentInfos() var list = new List(Count); foreach (var info in segments) { - Debug.Assert(info.Info.Codec != null); + Debugging.Assert(() => info.Info.Codec != null); list.Add((SegmentCommitInfo)(info.Clone())); } return list; diff --git a/src/Lucene.Net/Index/SegmentMerger.cs b/src/Lucene.Net/Index/SegmentMerger.cs index 8a2128fbfb..d1828651a1 100644 --- a/src/Lucene.Net/Index/SegmentMerger.cs +++ b/src/Lucene.Net/Index/SegmentMerger.cs @@ -1,8 +1,8 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Runtime.CompilerServices; @@ -25,12 +25,12 @@ namespace Lucene.Net.Index * limitations under the License. */ - using IBits = Lucene.Net.Util.IBits; using Codec = Lucene.Net.Codecs.Codec; using Directory = Lucene.Net.Store.Directory; using DocValuesConsumer = Lucene.Net.Codecs.DocValuesConsumer; using FieldInfosWriter = Lucene.Net.Codecs.FieldInfosWriter; using FieldsConsumer = Lucene.Net.Codecs.FieldsConsumer; + using IBits = Lucene.Net.Util.IBits; using InfoStream = Lucene.Net.Util.InfoStream; using IOContext = Lucene.Net.Store.IOContext; using IOUtils = Lucene.Net.Util.IOUtils; @@ -110,7 +110,7 @@ internal MergeState Merge() long t1 = Time.NanoTime(); mergeState.InfoStream.Message("SM", ((t1 - t0) / 1000000) + " msec to merge stored fields [" + numMerged + " docs]"); } - Debug.Assert(numMerged == mergeState.SegmentInfo.DocCount); + Debugging.Assert(() => numMerged == mergeState.SegmentInfo.DocCount); SegmentWriteState segmentWriteState = new SegmentWriteState(mergeState.InfoStream, directory, mergeState.SegmentInfo, mergeState.FieldInfos, termIndexInterval, null, context); if (mergeState.InfoStream.IsEnabled("SM")) @@ -164,7 +164,7 @@ internal MergeState Merge() long t1 = Time.NanoTime(); mergeState.InfoStream.Message("SM", ((t1 - t0) / 1000000) + " msec to merge vectors [" + numMerged + " docs]"); } - Debug.Assert(numMerged == mergeState.SegmentInfo.DocCount); + Debugging.Assert(() => numMerged == mergeState.SegmentInfo.DocCount); } // write the merged infos diff --git a/src/Lucene.Net/Index/SegmentReader.cs b/src/Lucene.Net/Index/SegmentReader.cs index 30321426b1..9f393dfc78 100644 --- a/src/Lucene.Net/Index/SegmentReader.cs +++ b/src/Lucene.Net/Index/SegmentReader.cs @@ -1,8 +1,8 @@ using J2N.Runtime.CompilerServices; +using Lucene.Net.Diagnostics; using Lucene.Net.Util; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; using System.IO; using JCG = J2N.Collections.Generic; @@ -100,7 +100,7 @@ public SegmentReader(SegmentCommitInfo si, int termInfosIndexDivisor, IOContext } else { - Debug.Assert(si.DelCount == 0); + Debugging.Assert(() => si.DelCount == 0); liveDocs = null; } numDocs = si.Info.DocCount - si.DelCount; @@ -464,7 +464,7 @@ public override NumericDocValues GetNumericDocValues(string field) { DocValuesProducer dvProducer; dvProducersByField.TryGetValue(field, out dvProducer); - Debug.Assert(dvProducer != null); + Debugging.Assert(() => dvProducer != null); dvs = dvProducer.GetNumeric(fi); dvFields[field] = dvs; } @@ -494,7 +494,7 @@ public override IBits GetDocsWithField(string field) { DocValuesProducer dvProducer; dvProducersByField.TryGetValue(field, out dvProducer); - Debug.Assert(dvProducer != null); + Debugging.Assert(() => dvProducer != null); dvs = dvProducer.GetDocsWithField(fi); dvFields[field] = dvs; } @@ -520,7 +520,7 @@ public override BinaryDocValues GetBinaryDocValues(string field) if (dvs == null) { dvProducersByField.TryGetValue(field, out DocValuesProducer dvProducer); - Debug.Assert(dvProducer != null); + Debugging.Assert(() => dvProducer != null); dvs = dvProducer.GetBinary(fi); dvFields[field] = dvs; } @@ -546,7 +546,7 @@ public override SortedDocValues GetSortedDocValues(string field) if (dvs == null) { dvProducersByField.TryGetValue(field, out DocValuesProducer dvProducer); - Debug.Assert(dvProducer != null); + Debugging.Assert(() => dvProducer != null); dvs = dvProducer.GetSorted(fi); dvFields[field] = dvs; } @@ -572,7 +572,7 @@ public override SortedSetDocValues GetSortedSetDocValues(string field) if (dvs == null) { dvProducersByField.TryGetValue(field, out DocValuesProducer dvProducer); - Debug.Assert(dvProducer != null); + Debugging.Assert(() => dvProducer != null); dvs = dvProducer.GetSortedSet(fi); dvFields[field] = dvs; } diff --git a/src/Lucene.Net/Index/SimpleMergedSegmentWarmer.cs b/src/Lucene.Net/Index/SimpleMergedSegmentWarmer.cs index f1f32a506f..aff4f93ea8 100644 --- a/src/Lucene.Net/Index/SimpleMergedSegmentWarmer.cs +++ b/src/Lucene.Net/Index/SimpleMergedSegmentWarmer.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; namespace Lucene.Net.Index { @@ -80,7 +80,7 @@ public override void Warm(AtomicReader reader) break; default: - Debug.Assert(false); // unknown dv type + Debugging.Assert(() => false); // unknown dv type break; } docValuesCount++; diff --git a/src/Lucene.Net/Index/SingletonSortedSetDocValues.cs b/src/Lucene.Net/Index/SingletonSortedSetDocValues.cs index 5b9c8a21c0..de57702b43 100644 --- a/src/Lucene.Net/Index/SingletonSortedSetDocValues.cs +++ b/src/Lucene.Net/Index/SingletonSortedSetDocValues.cs @@ -1,4 +1,4 @@ -using System.Diagnostics; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Index { @@ -39,7 +39,7 @@ internal sealed class SingletonSortedSetDocValues : SortedSetDocValues public SingletonSortedSetDocValues(SortedDocValues @in) { this.@in = @in; - Debug.Assert(NO_MORE_ORDS == -1); // this allows our nextOrd() to work for missing values without a check + Debugging.Assert(() => NO_MORE_ORDS == -1); // this allows our nextOrd() to work for missing values without a check } /// diff --git a/src/Lucene.Net/Index/SlowCompositeReaderWrapper.cs b/src/Lucene.Net/Index/SlowCompositeReaderWrapper.cs index 8429766bcb..428ef0b718 100644 --- a/src/Lucene.Net/Index/SlowCompositeReaderWrapper.cs +++ b/src/Lucene.Net/Index/SlowCompositeReaderWrapper.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Index { @@ -60,7 +60,7 @@ public static AtomicReader Wrap(IndexReader reader) } else { - Debug.Assert(reader is AtomicReader); + Debugging.Assert(() => reader is AtomicReader); return (AtomicReader)reader; } } @@ -174,7 +174,7 @@ public override SortedSetDocValues GetSortedSetDocValues(string field) { return null; } - Debug.Assert(map != null); + Debugging.Assert(() => map != null); int size = @in.Leaves.Count; var values = new SortedSetDocValues[size]; int[] starts = new int[size + 1]; diff --git a/src/Lucene.Net/Index/SnapshotDeletionPolicy.cs b/src/Lucene.Net/Index/SnapshotDeletionPolicy.cs index 57a4e25ec7..c1378a3d63 100644 --- a/src/Lucene.Net/Index/SnapshotDeletionPolicy.cs +++ b/src/Lucene.Net/Index/SnapshotDeletionPolicy.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Index { @@ -130,7 +130,7 @@ protected internal virtual void ReleaseGen(long gen) throw new ArgumentException("commit gen=" + gen + " is not currently snapshotted"); } int refCountInt = (int)refCount; - Debug.Assert(refCountInt > 0); + Debugging.Assert(() => refCountInt > 0); refCountInt--; if (refCountInt == 0) { diff --git a/src/Lucene.Net/Index/SortedDocValuesTermsEnum.cs b/src/Lucene.Net/Index/SortedDocValuesTermsEnum.cs index 329431b45b..6619ca6dd2 100644 --- a/src/Lucene.Net/Index/SortedDocValuesTermsEnum.cs +++ b/src/Lucene.Net/Index/SortedDocValuesTermsEnum.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Index { @@ -21,8 +21,8 @@ namespace Lucene.Net.Index * limitations under the License. */ - using IBits = Lucene.Net.Util.IBits; using BytesRef = Lucene.Net.Util.BytesRef; + using IBits = Lucene.Net.Util.IBits; /// /// Implements a wrapping a provided @@ -94,7 +94,7 @@ public override bool SeekExact(BytesRef text) public override void SeekExact(long ord) { - Debug.Assert(ord >= 0 && ord < values.ValueCount); + Debugging.Assert(() => ord >= 0 && ord < values.ValueCount); currentOrd = (int)ord; values.LookupOrd(currentOrd, term); } @@ -132,7 +132,7 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPos public override void SeekExact(BytesRef term, TermState state) { - Debug.Assert(state != null && state is OrdTermState); + Debugging.Assert(() => state != null && state is OrdTermState); this.SeekExact(((OrdTermState)state).Ord); } diff --git a/src/Lucene.Net/Index/SortedDocValuesWriter.cs b/src/Lucene.Net/Index/SortedDocValuesWriter.cs index 108be9ac5b..17072c52e6 100644 --- a/src/Lucene.Net/Index/SortedDocValuesWriter.cs +++ b/src/Lucene.Net/Index/SortedDocValuesWriter.cs @@ -1,9 +1,9 @@ using Lucene.Net.Codecs; +using Lucene.Net.Diagnostics; using Lucene.Net.Util; using Lucene.Net.Util.Packed; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Index @@ -114,7 +114,7 @@ public override void Flush(SegmentWriteState state, DocValuesConsumer dvConsumer { int maxDoc = state.SegmentInfo.DocCount; - Debug.Assert(pending.Count == maxDoc); + Debugging.Assert(() => pending.Count == maxDoc); int valueCount = hash.Count; int[] sortedValues = hash.Sort(BytesRef.UTF8SortedAsUnicodeComparer); diff --git a/src/Lucene.Net/Index/SortedSetDocValuesTermsEnum.cs b/src/Lucene.Net/Index/SortedSetDocValuesTermsEnum.cs index 6e64736f6d..fe2fbfbba3 100644 --- a/src/Lucene.Net/Index/SortedSetDocValuesTermsEnum.cs +++ b/src/Lucene.Net/Index/SortedSetDocValuesTermsEnum.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Index { @@ -21,8 +21,8 @@ namespace Lucene.Net.Index * limitations under the License. */ - using IBits = Lucene.Net.Util.IBits; using BytesRef = Lucene.Net.Util.BytesRef; + using IBits = Lucene.Net.Util.IBits; /// /// Implements a wrapping a provided @@ -94,7 +94,7 @@ public override bool SeekExact(BytesRef text) public override void SeekExact(long ord) { - Debug.Assert(ord >= 0 && ord < values.ValueCount); + Debugging.Assert(() => ord >= 0 && ord < values.ValueCount); currentOrd = (int)ord; values.LookupOrd(currentOrd, term); } @@ -132,7 +132,7 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPos public override void SeekExact(BytesRef term, TermState state) { - Debug.Assert(state != null && state is OrdTermState); + Debugging.Assert(() => state != null && state is OrdTermState); this.SeekExact(((OrdTermState)state).Ord); } diff --git a/src/Lucene.Net/Index/SortedSetDocValuesWriter.cs b/src/Lucene.Net/Index/SortedSetDocValuesWriter.cs index 82c793322d..a85d348043 100644 --- a/src/Lucene.Net/Index/SortedSetDocValuesWriter.cs +++ b/src/Lucene.Net/Index/SortedSetDocValuesWriter.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Index @@ -164,7 +164,7 @@ public override void Flush(SegmentWriteState state, DocValuesConsumer dvConsumer { int maxDoc = state.SegmentInfo.DocCount; int maxCountPerDoc = maxCount; - Debug.Assert(pendingCounts.Count == maxDoc); + Debugging.Assert(() => pendingCounts.Count == maxDoc); int valueCount = hash.Count; int[] sortedValues = hash.Sort(BytesRef.UTF8SortedAsUnicodeComparer); @@ -203,7 +203,7 @@ private IEnumerable GetBytesRefEnumberable(int valueCount, int[] sorte { AppendingDeltaPackedInt64Buffer.Iterator iter = pendingCounts.GetIterator(); - Debug.Assert(maxDoc == pendingCounts.Count, "MaxDoc: " + maxDoc + ", pending.Count: " + pending.Count); + Debugging.Assert(() => maxDoc == pendingCounts.Count, () => "MaxDoc: " + maxDoc + ", pending.Count: " + pending.Count); for (int i = 0; i < maxDoc; ++i) { diff --git a/src/Lucene.Net/Index/StandardDirectoryReader.cs b/src/Lucene.Net/Index/StandardDirectoryReader.cs index dd8c99a4fb..86776c7a8f 100644 --- a/src/Lucene.Net/Index/StandardDirectoryReader.cs +++ b/src/Lucene.Net/Index/StandardDirectoryReader.cs @@ -1,7 +1,7 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Text; @@ -117,7 +117,7 @@ internal static DirectoryReader Open(IndexWriter writer, SegmentInfos infos, boo // actual instance of SegmentInfoPerCommit in // IndexWriter's segmentInfos: SegmentCommitInfo info = infos.Info(i); - Debug.Assert(info.Info.Dir == dir); + Debugging.Assert(() => info.Info.Dir == dir); ReadersAndUpdates rld = writer.readerPool.Get(info, true); try { @@ -235,8 +235,8 @@ private static DirectoryReader Open(Directory directory, SegmentInfos infos, ILi // there are changes to the reader, either liveDocs or DV updates readerShared[i] = false; // Steal the ref returned by SegmentReader ctor: - Debug.Assert(infos.Info(i).Info.Dir == newReaders[i].SegmentInfo.Info.Dir); - Debug.Assert(infos.Info(i).HasDeletions || infos.Info(i).HasFieldUpdates); + Debugging.Assert(() => infos.Info(i).Info.Dir == newReaders[i].SegmentInfo.Info.Dir); + Debugging.Assert(() => infos.Info(i).HasDeletions || infos.Info(i).HasFieldUpdates); if (newReaders[i].SegmentInfo.DelGen == infos.Info(i).DelGen) { // only DV updates diff --git a/src/Lucene.Net/Index/TermContext.cs b/src/Lucene.Net/Index/TermContext.cs index 2a35ceb204..922b9ae6de 100644 --- a/src/Lucene.Net/Index/TermContext.cs +++ b/src/Lucene.Net/Index/TermContext.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; namespace Lucene.Net.Index { @@ -57,7 +57,7 @@ public sealed class TermContext /// public TermContext(IndexReaderContext context) { - Debug.Assert(context != null && context.IsTopLevel); + Debugging.Assert(() => context != null && context.IsTopLevel); TopReaderContext = context; docFreq = 0; int len; @@ -92,7 +92,7 @@ public TermContext(IndexReaderContext context, TermState state, int ord, int doc /// public static TermContext Build(IndexReaderContext context, Term term) { - Debug.Assert(context != null && context.IsTopLevel); + Debugging.Assert(() => context != null && context.IsTopLevel); string field = term.Field; BytesRef bytes = term.Bytes; TermContext perReaderTermState = new TermContext(context); @@ -135,9 +135,9 @@ public void Clear() /// public void Register(TermState state, int ord, int docFreq, long totalTermFreq) { - Debug.Assert(state != null, "state must not be null"); - Debug.Assert(ord >= 0 && ord < states.Length); - Debug.Assert(states[ord] == null, "state for ord: " + ord + " already registered"); + Debugging.Assert(() => state != null, () => "state must not be null"); + Debugging.Assert(() => ord >= 0 && ord < states.Length); + Debugging.Assert(() => states[ord] == null, () => "state for ord: " + ord + " already registered"); this.docFreq += docFreq; if (this.totalTermFreq >= 0 && totalTermFreq >= 0) { @@ -160,7 +160,7 @@ public void Register(TermState state, int ord, int docFreq, long totalTermFreq) /// for the reader was registered public TermState Get(int ord) { - Debug.Assert(ord >= 0 && ord < states.Length); + Debugging.Assert(() => ord >= 0 && ord < states.Length); return states[ord]; } diff --git a/src/Lucene.Net/Index/TermVectorsConsumer.cs b/src/Lucene.Net/Index/TermVectorsConsumer.cs index 492c90a1a7..94c48e7048 100644 --- a/src/Lucene.Net/Index/TermVectorsConsumer.cs +++ b/src/Lucene.Net/Index/TermVectorsConsumer.cs @@ -1,8 +1,8 @@ using J2N.Text; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Index @@ -61,12 +61,12 @@ public override void Flush(IDictionary fields if (writer != null) { int numDocs = state.SegmentInfo.DocCount; - Debug.Assert(numDocs > 0); + Debugging.Assert(() => numDocs > 0); // At least one doc in this run had term vectors enabled try { Fill(numDocs); - Debug.Assert(state.SegmentInfo != null); + Debugging.Assert(() => state.SegmentInfo != null); writer.Finish(state.FieldInfos, numDocs); } finally @@ -134,14 +134,13 @@ internal override void FinishDocument(TermsHash termsHash) } writer.FinishDocument(); - Debug.Assert(lastDocID == docState.docID, "lastDocID=" + lastDocID + " docState.docID=" + docState.docID); + Debugging.Assert(() => lastDocID == docState.docID, () => "lastDocID=" + lastDocID + " docState.docID=" + docState.docID); lastDocID++; termsHash.Reset(); Reset(); - // LUCENENET: .NET doesn't support asserts in release mode - if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) docWriter.TestPoint("TermVectorsTermsWriter.finishDocument end"); + Debugging.Assert(() => docWriter.TestPoint("TermVectorsTermsWriter.finishDocument end")); } [MethodImpl(MethodImplOptions.NoInlining)] @@ -186,7 +185,7 @@ internal void AddFieldToFlush(TermVectorsConsumerPerField fieldToFlush) internal override void StartDocument() { - Debug.Assert(ClearLastVectorFieldName()); + Debugging.Assert(ClearLastVectorFieldName); Reset(); } diff --git a/src/Lucene.Net/Index/TermVectorsConsumerPerField.cs b/src/Lucene.Net/Index/TermVectorsConsumerPerField.cs index 7e37b36ae7..f8de489682 100644 --- a/src/Lucene.Net/Index/TermVectorsConsumerPerField.cs +++ b/src/Lucene.Net/Index/TermVectorsConsumerPerField.cs @@ -1,7 +1,7 @@ using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Index @@ -25,8 +25,6 @@ namespace Lucene.Net.Index using ByteBlockPool = Lucene.Net.Util.ByteBlockPool; using BytesRef = Lucene.Net.Util.BytesRef; - using OffsetAttribute = Lucene.Net.Analysis.TokenAttributes.OffsetAttribute; - using PayloadAttribute = Lucene.Net.Analysis.TokenAttributes.PayloadAttribute; using RamUsageEstimator = Lucene.Net.Util.RamUsageEstimator; using TermVectorsWriter = Lucene.Net.Codecs.TermVectorsWriter; @@ -166,14 +164,13 @@ internal override void Finish() [MethodImpl(MethodImplOptions.NoInlining)] internal void FinishDocument() { - // LUCENENET: .NET doesn't support asserts in release mode - if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) docState.TestPoint("TermVectorsTermsWriterPerField.finish start"); + Debugging.Assert(() => docState.TestPoint("TermVectorsTermsWriterPerField.finish start")); int numPostings = termsHashPerField.bytesHash.Count; BytesRef flushTerm = termsWriter.flushTerm; - Debug.Assert(numPostings >= 0); + Debugging.Assert(() => numPostings >= 0); if (numPostings > maxNumPostings) { @@ -184,7 +181,7 @@ internal void FinishDocument() // of a given field in the doc. At this point we flush // our hash into the DocWriter. - Debug.Assert(termsWriter.VectorFieldsInOrder(fieldInfo)); + Debugging.Assert(() => termsWriter.VectorFieldsInOrder(fieldInfo)); TermVectorsPostingsArray postings = (TermVectorsPostingsArray)termsHashPerField.postingsArray; TermVectorsWriter tv = termsWriter.writer; @@ -349,7 +346,7 @@ internal override ParallelPostingsArray NewInstance(int size) internal override void CopyTo(ParallelPostingsArray toArray, int numToCopy) { - Debug.Assert(toArray is TermVectorsPostingsArray); + Debugging.Assert(() => toArray is TermVectorsPostingsArray); TermVectorsPostingsArray to = (TermVectorsPostingsArray)toArray; base.CopyTo(toArray, numToCopy); diff --git a/src/Lucene.Net/Index/TermsHashPerField.cs b/src/Lucene.Net/Index/TermsHashPerField.cs index cd5c1dfe15..1e08470823 100644 --- a/src/Lucene.Net/Index/TermsHashPerField.cs +++ b/src/Lucene.Net/Index/TermsHashPerField.cs @@ -1,7 +1,7 @@ using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Index @@ -112,7 +112,7 @@ public override void Abort() public void InitReader(ByteSliceReader reader, int termID, int stream) { - Debug.Assert(stream < streamCount); + Debugging.Assert(() => stream < streamCount); int intStart = postingsArray.intStarts[termID]; int[] ints = intPool.Buffers[intStart >> Int32BlockPool.INT32_BLOCK_SHIFT]; int upto = intStart & Int32BlockPool.INT32_BLOCK_MASK; @@ -291,7 +291,7 @@ internal void WriteByte(int stream, byte b) { int upto = intUptos[intUptoStart + stream]; var bytes = bytePool.Buffers[upto >> ByteBlockPool.BYTE_BLOCK_SHIFT]; - Debug.Assert(bytes != null); + Debugging.Assert(() => bytes != null); int offset = upto & ByteBlockPool.BYTE_BLOCK_MASK; if (bytes[offset] != 0) { @@ -319,7 +319,7 @@ public void WriteBytes(int stream, byte[] b, int offset, int len) /// internal void WriteVInt32(int stream, int i) { - Debug.Assert(stream < streamCount); + Debugging.Assert(() => stream < streamCount); while ((i & ~0x7F) != 0) { WriteByte(stream, (sbyte)((i & 0x7f) | 0x80)); diff --git a/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs b/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs index b7cfb6cae1..3327ee9672 100644 --- a/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs +++ b/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using System.Collections.Concurrent; using System.Collections.Generic; using System.Diagnostics; @@ -44,7 +45,7 @@ internal class ThreadAffinityDocumentsWriterThreadPool : DocumentsWriterPerThrea public ThreadAffinityDocumentsWriterThreadPool(int maxNumPerThreads) : base(maxNumPerThreads) { - Debug.Assert(MaxThreadStates >= 1); + Debugging.Assert(() => MaxThreadStates >= 1); } public override ThreadState GetAndLock(Thread requestingThread, DocumentsWriter documentsWriter) @@ -66,7 +67,7 @@ we should somehow prevent this. */ ThreadState newState = NewThreadState(); // state is already locked if non-null if (newState != null) { - //Debug.Assert(newState.HeldByCurrentThread); + //Debugging.Assert(newState.HeldByCurrentThread); threadBindings[requestingThread] = newState; return newState; } @@ -80,7 +81,7 @@ we should somehow prevent this. */ minThreadState = MinContendedThreadState(); } } - Debug.Assert(minThreadState != null, "ThreadState is null"); + Debugging.Assert(() => minThreadState != null, () => "ThreadState is null"); minThreadState.@Lock(); return minThreadState; diff --git a/src/Lucene.Net/Lucene.Net.csproj b/src/Lucene.Net/Lucene.Net.csproj index 5942e2d9fb..e218980298 100644 --- a/src/Lucene.Net/Lucene.Net.csproj +++ b/src/Lucene.Net/Lucene.Net.csproj @@ -108,6 +108,7 @@ + diff --git a/src/Lucene.Net/Search/CachingWrapperFilter.cs b/src/Lucene.Net/Search/CachingWrapperFilter.cs index eccffe2ae2..683c9f17e3 100644 --- a/src/Lucene.Net/Search/CachingWrapperFilter.cs +++ b/src/Lucene.Net/Search/CachingWrapperFilter.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System.Collections.Generic; -using System.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Search @@ -120,7 +120,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo { missCount++; docIdSet = DocIdSetToCache(_filter.GetDocIdSet(context, null), reader); - Debug.Assert(docIdSet.IsCacheable); + Debugging.Assert(() => docIdSet.IsCacheable); #if FEATURE_CONDITIONALWEAKTABLE_ADDORUPDATE _cache.AddOrUpdate(key, docIdSet); #else diff --git a/src/Lucene.Net/Search/CollectionStatistics.cs b/src/Lucene.Net/Search/CollectionStatistics.cs index 4ca504aead..107aefedda 100644 --- a/src/Lucene.Net/Search/CollectionStatistics.cs +++ b/src/Lucene.Net/Search/CollectionStatistics.cs @@ -1,4 +1,4 @@ -using System.Diagnostics; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Search { @@ -37,10 +37,10 @@ public class CollectionStatistics /// public CollectionStatistics(string field, long maxDoc, long docCount, long sumTotalTermFreq, long sumDocFreq) { - Debug.Assert(maxDoc >= 0); - Debug.Assert(docCount >= -1 && docCount <= maxDoc); // #docs with field must be <= #docs - Debug.Assert(sumDocFreq == -1 || sumDocFreq >= docCount); // #postings must be >= #docs with field - Debug.Assert(sumTotalTermFreq == -1 || sumTotalTermFreq >= sumDocFreq); // #positions must be >= #postings + Debugging.Assert(() => maxDoc >= 0); + Debugging.Assert(() => docCount >= -1 && docCount <= maxDoc); // #docs with field must be <= #docs + Debugging.Assert(() => sumDocFreq == -1 || sumDocFreq >= docCount); // #postings must be >= #docs with field + Debugging.Assert(() => sumTotalTermFreq == -1 || sumTotalTermFreq >= sumDocFreq); // #positions must be >= #postings this.field = field; this.maxDoc = maxDoc; this.docCount = docCount; diff --git a/src/Lucene.Net/Search/ConstantScoreAutoRewrite.cs b/src/Lucene.Net/Search/ConstantScoreAutoRewrite.cs index 574154e0e2..11b4ac6cbf 100644 --- a/src/Lucene.Net/Search/ConstantScoreAutoRewrite.cs +++ b/src/Lucene.Net/Search/ConstantScoreAutoRewrite.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; namespace Lucene.Net.Search { @@ -163,7 +163,7 @@ public override bool Collect(BytesRef bytes) } TermState termState = termsEnum.GetTermState(); - Debug.Assert(termState != null); + Debugging.Assert(() => termState != null); if (pos < 0) { pos = (-pos) - 1; @@ -235,7 +235,7 @@ public override int[] Init() { int[] ord = base.Init(); termState = new TermContext[ArrayUtil.Oversize(ord.Length, RamUsageEstimator.NUM_BYTES_OBJECT_REF)]; - Debug.Assert(termState.Length >= ord.Length); + Debugging.Assert(() => termState.Length >= ord.Length); return ord; } @@ -248,7 +248,7 @@ public override int[] Grow() Array.Copy(termState, 0, tmpTermState, 0, termState.Length); termState = tmpTermState; } - Debug.Assert(termState.Length >= ord.Length); + Debugging.Assert(() => termState.Length >= ord.Length); return ord; } diff --git a/src/Lucene.Net/Search/ConstantScoreQuery.cs b/src/Lucene.Net/Search/ConstantScoreQuery.cs index 1f73ff97eb..1234c6d447 100644 --- a/src/Lucene.Net/Search/ConstantScoreQuery.cs +++ b/src/Lucene.Net/Search/ConstantScoreQuery.cs @@ -1,7 +1,7 @@ using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Diagnostics; +using Lucene.Net.Diagnostics; using System.Text; namespace Lucene.Net.Search @@ -84,7 +84,7 @@ public override Query Rewrite(IndexReader reader) } else { - Debug.Assert(m_filter != null); + Debugging.Assert(() => m_filter != null); // Fix outdated usage pattern from Lucene 2.x/early-3.x: // because ConstantScoreQuery only accepted filters, // QueryWrapperFilter was used to wrap queries. @@ -154,12 +154,12 @@ public override BulkScorer GetBulkScorer(AtomicReaderContext context, bool score //DocIdSetIterator disi; if (outerInstance.m_filter != null) { - Debug.Assert(outerInstance.m_query == null); + Debugging.Assert(() => outerInstance.m_query == null); return base.GetBulkScorer(context, scoreDocsInOrder, acceptDocs); } else { - Debug.Assert(outerInstance.m_query != null && innerWeight != null); + Debugging.Assert(() => outerInstance.m_query != null && innerWeight != null); BulkScorer bulkScorer = innerWeight.GetBulkScorer(context, scoreDocsInOrder, acceptDocs); if (bulkScorer == null) { @@ -174,7 +174,7 @@ public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) DocIdSetIterator disi; if (outerInstance.m_filter != null) { - Debug.Assert(outerInstance.m_query == null); + Debugging.Assert(() => outerInstance.m_query == null); DocIdSet dis = outerInstance.m_filter.GetDocIdSet(context, acceptDocs); if (dis == null) { @@ -184,7 +184,7 @@ public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) } else { - Debug.Assert(outerInstance.m_query != null && innerWeight != null); + Debugging.Assert(() => outerInstance.m_query != null && innerWeight != null); disi = innerWeight.GetScorer(context, acceptDocs); } @@ -310,7 +310,7 @@ public override int NextDoc() public override float GetScore() { - Debug.Assert(docIdSetIterator.DocID != NO_MORE_DOCS); + Debugging.Assert(() => docIdSetIterator.DocID != NO_MORE_DOCS); return theScore; } diff --git a/src/Lucene.Net/Search/DisjunctionScorer.cs b/src/Lucene.Net/Search/DisjunctionScorer.cs index ec23a6f083..2c6f3ab02d 100644 --- a/src/Lucene.Net/Search/DisjunctionScorer.cs +++ b/src/Lucene.Net/Search/DisjunctionScorer.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Search { @@ -145,7 +145,7 @@ public override long GetCost() public override int NextDoc() { - Debug.Assert(m_doc != NO_MORE_DOCS); + Debugging.Assert(() => m_doc != NO_MORE_DOCS); while (true) { if (m_subScorers[0].NextDoc() != NO_MORE_DOCS) @@ -170,7 +170,7 @@ public override int NextDoc() public override int Advance(int target) { - Debug.Assert(m_doc != NO_MORE_DOCS); + Debugging.Assert(() => m_doc != NO_MORE_DOCS); while (true) { if (m_subScorers[0].Advance(target) != NO_MORE_DOCS) diff --git a/src/Lucene.Net/Search/DocIdSetIterator.cs b/src/Lucene.Net/Search/DocIdSetIterator.cs index 6a87bbe619..7675b5e0b5 100644 --- a/src/Lucene.Net/Search/DocIdSetIterator.cs +++ b/src/Lucene.Net/Search/DocIdSetIterator.cs @@ -1,4 +1,4 @@ -using System.Diagnostics; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Search { @@ -45,8 +45,8 @@ public DocIdSetIteratorAnonymousInnerClassHelper() public override int Advance(int target) { - Debug.Assert(!exhausted); - Debug.Assert(target >= 0); + Debugging.Assert(() => !exhausted); + Debugging.Assert(() => target >= 0); exhausted = true; return NO_MORE_DOCS; } @@ -55,7 +55,7 @@ public override int Advance(int target) public override int NextDoc() { - Debug.Assert(!exhausted); + Debugging.Assert(() => !exhausted); exhausted = true; return NO_MORE_DOCS; } @@ -139,7 +139,7 @@ public override long GetCost() /// protected internal int SlowAdvance(int target) { - Debug.Assert(DocID == NO_MORE_DOCS || DocID < target); // can happen when the enum is not positioned yet + Debugging.Assert(() => DocID == NO_MORE_DOCS || DocID < target); // can happen when the enum is not positioned yet int doc; do { diff --git a/src/Lucene.Net/Search/DocTermOrdsRangeFilter.cs b/src/Lucene.Net/Search/DocTermOrdsRangeFilter.cs index d5d6fb7f87..6fe02f16cc 100644 --- a/src/Lucene.Net/Search/DocTermOrdsRangeFilter.cs +++ b/src/Lucene.Net/Search/DocTermOrdsRangeFilter.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; using System.Text; namespace Lucene.Net.Search @@ -22,8 +22,8 @@ namespace Lucene.Net.Search */ using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext; - using IBits = Lucene.Net.Util.IBits; using BytesRef = Lucene.Net.Util.BytesRef; + using IBits = Lucene.Net.Util.IBits; using SortedSetDocValues = Lucene.Net.Index.SortedSetDocValues; /// @@ -123,7 +123,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo return null; } - Debug.Assert(inclusiveLowerPoint >= 0 && inclusiveUpperPoint >= 0); + Debugging.Assert(() => inclusiveLowerPoint >= 0 && inclusiveUpperPoint >= 0); return new FieldCacheDocIdSetAnonymousInnerClassHelper(this, context.AtomicReader.MaxDoc, acceptDocs, docTermOrds, inclusiveLowerPoint, inclusiveUpperPoint); } diff --git a/src/Lucene.Net/Search/DocTermOrdsRewriteMethod.cs b/src/Lucene.Net/Search/DocTermOrdsRewriteMethod.cs index e4a9efbdb5..b90bac9a5c 100644 --- a/src/Lucene.Net/Search/DocTermOrdsRewriteMethod.cs +++ b/src/Lucene.Net/Search/DocTermOrdsRewriteMethod.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Search { @@ -21,8 +21,8 @@ namespace Lucene.Net.Search */ using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext; - using IBits = Lucene.Net.Util.IBits; using BytesRef = Lucene.Net.Util.BytesRef; + using IBits = Lucene.Net.Util.IBits; using IndexReader = Lucene.Net.Index.IndexReader; using Int64BitSet = Lucene.Net.Util.Int64BitSet; using SortedSetDocValues = Lucene.Net.Index.SortedSetDocValues; @@ -100,7 +100,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo Int64BitSet termSet = new Int64BitSet(docTermOrds.ValueCount); TermsEnum termsEnum = m_query.GetTermsEnum(new TermsAnonymousInnerClassHelper(this, docTermOrds)); - Debug.Assert(termsEnum != null); + Debugging.Assert(() => termsEnum != null); if (termsEnum.Next() != null) { // fill into a bitset diff --git a/src/Lucene.Net/Search/ExactPhraseScorer.cs b/src/Lucene.Net/Search/ExactPhraseScorer.cs index 7cf88b625b..74f86d0773 100644 --- a/src/Lucene.Net/Search/ExactPhraseScorer.cs +++ b/src/Lucene.Net/Search/ExactPhraseScorer.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Support; -using System.Diagnostics; namespace Lucene.Net.Search { @@ -263,7 +263,7 @@ private int PhraseFreq() cs.LastPos = cs.Pos; int posIndex = cs.Pos - chunkStart; counts[posIndex] = 1; - Debug.Assert(gens[posIndex] != gen); + Debugging.Assert(() => gens[posIndex] != gen); gens[posIndex] = gen; } diff --git a/src/Lucene.Net/Search/FieldCacheImpl.cs b/src/Lucene.Net/Search/FieldCacheImpl.cs index 42c9be4d22..c4bdc5bc6e 100644 --- a/src/Lucene.Net/Search/FieldCacheImpl.cs +++ b/src/Lucene.Net/Search/FieldCacheImpl.cs @@ -1,11 +1,11 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Support; using Lucene.Net.Support.IO; using System; using System.Collections.Concurrent; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Runtime.CompilerServices; @@ -173,7 +173,7 @@ public ReaderClosedListenerAnonymousInnerClassHelper(FieldCacheImpl outerInstanc public void OnClose(IndexReader owner) { - Debug.Assert(owner is AtomicReader); + Debugging.Assert(() => owner is AtomicReader); outerInstance.PurgeByCacheKey(((AtomicReader)owner).CoreCacheKey); } } @@ -409,7 +409,7 @@ public virtual void DoUninvert(AtomicReader reader, string field, bool setDocsWi if (setDocsWithField) { int termsDocCount = terms.DocCount; - Debug.Assert(termsDocCount <= maxDoc); + Debugging.Assert(() => termsDocCount <= maxDoc); if (termsDocCount == maxDoc) { // Fast case: all docs have this field: @@ -475,7 +475,7 @@ internal virtual void SetDocsWithField(AtomicReader reader, string field, IBits if (numSet >= maxDoc) { // The cardinality of the BitSet is maxDoc if all documents have a value. - Debug.Assert(numSet == maxDoc); + Debugging.Assert(() => numSet == maxDoc); bits = new Lucene.Net.Util.Bits.MatchAllBits(maxDoc); } else @@ -907,7 +907,7 @@ internal class Int32sFromArray : FieldCache.Int32s public Int32sFromArray(PackedInt32s.Reader values, int minValue) { - Debug.Assert(values.BitsPerValue <= 32); + Debugging.Assert(() => values.BitsPerValue <= 32); this.values = values; this.minValue = minValue; } @@ -1095,7 +1095,7 @@ protected override object CreateValue(AtomicReader reader, CacheKey key, bool se if (terms != null) { int termsDocCount = terms.DocCount; - Debug.Assert(termsDocCount <= maxDoc); + Debugging.Assert(() => termsDocCount <= maxDoc); if (termsDocCount == maxDoc) { // Fast case: all docs have this field: @@ -1137,7 +1137,7 @@ protected override object CreateValue(AtomicReader reader, CacheKey key, bool se if (numSet >= maxDoc) { // The cardinality of the BitSet is maxDoc if all documents have a value. - Debug.Assert(numSet == maxDoc); + Debugging.Assert(() => numSet == maxDoc); return new Lucene.Net.Util.Bits.MatchAllBits(maxDoc); } return res; diff --git a/src/Lucene.Net/Search/FieldCacheRangeFilter.cs b/src/Lucene.Net/Search/FieldCacheRangeFilter.cs index d736210c16..13438e032e 100644 --- a/src/Lucene.Net/Search/FieldCacheRangeFilter.cs +++ b/src/Lucene.Net/Search/FieldCacheRangeFilter.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; using System.Text; namespace Lucene.Net.Search @@ -22,8 +22,8 @@ namespace Lucene.Net.Search */ using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext; - using IBits = Lucene.Net.Util.IBits; using BytesRef = Lucene.Net.Util.BytesRef; + using IBits = Lucene.Net.Util.IBits; using NumericUtils = Lucene.Net.Util.NumericUtils; using SortedDocValues = Lucene.Net.Index.SortedDocValues; @@ -142,7 +142,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo return null; } - Debug.Assert(inclusiveLowerPoint >= 0 && inclusiveUpperPoint >= 0); + Debugging.Assert(() => inclusiveLowerPoint >= 0 && inclusiveUpperPoint >= 0); return new AnonymousClassFieldCacheDocIdSet(fcsi, inclusiveLowerPoint, inclusiveUpperPoint, context.Reader.MaxDoc, acceptDocs); } diff --git a/src/Lucene.Net/Search/FieldCacheRewriteMethod.cs b/src/Lucene.Net/Search/FieldCacheRewriteMethod.cs index e5340426f7..ac0bd8c60d 100644 --- a/src/Lucene.Net/Search/FieldCacheRewriteMethod.cs +++ b/src/Lucene.Net/Search/FieldCacheRewriteMethod.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Search { @@ -21,8 +21,8 @@ namespace Lucene.Net.Search */ using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext; - using IBits = Lucene.Net.Util.IBits; using BytesRef = Lucene.Net.Util.BytesRef; + using IBits = Lucene.Net.Util.IBits; using IndexReader = Lucene.Net.Index.IndexReader; using Int64BitSet = Lucene.Net.Util.Int64BitSet; using SortedDocValues = Lucene.Net.Index.SortedDocValues; @@ -100,7 +100,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo Int64BitSet termSet = new Int64BitSet(fcsi.ValueCount); TermsEnum termsEnum = m_query.GetTermsEnum(new TermsAnonymousInnerClassHelper(this, fcsi)); - Debug.Assert(termsEnum != null); + Debugging.Assert(() => termsEnum != null); if (termsEnum.Next() != null) { // fill into a bitset diff --git a/src/Lucene.Net/Search/FieldComparator.cs b/src/Lucene.Net/Search/FieldComparator.cs index 4f7703ff20..ae30170032 100644 --- a/src/Lucene.Net/Search/FieldComparator.cs +++ b/src/Lucene.Net/Search/FieldComparator.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; using System.IO; using JCG = J2N.Collections.Generic; @@ -927,7 +927,7 @@ public override int Compare(int slot1, int slot2) public override int CompareBottom(int doc) { float score = scorer.GetScore(); - Debug.Assert(!float.IsNaN(score)); + Debugging.Assert(() => !float.IsNaN(score)); // LUCENENET specific special case: // In case of zero, we may have a "positive 0" or "negative 0" @@ -938,7 +938,7 @@ public override int CompareBottom(int doc) public override void Copy(int slot, int doc) { scores[slot] = scorer.GetScore(); - Debug.Assert(!float.IsNaN(scores[slot])); + Debugging.Assert(() => !float.IsNaN(scores[slot])); } public override FieldComparer SetNextReader(AtomicReaderContext context) @@ -988,7 +988,7 @@ public override int CompareValues(float first, float second) public override int CompareTop(int doc) { float docValue = scorer.GetScore(); - Debug.Assert(!float.IsNaN(docValue)); + Debugging.Assert(() => !float.IsNaN(docValue)); // LUCENENET specific special case: // In case of zero, we may have a "positive 0" or "negative 0" @@ -1216,7 +1216,7 @@ public override int Compare(int slot1, int slot2) public override int CompareBottom(int doc) { - Debug.Assert(bottomSlot != -1); + Debugging.Assert(() => bottomSlot != -1); int docOrd = termsIndex.GetOrd(doc); if (docOrd == -1) { @@ -1250,7 +1250,7 @@ public override void Copy(int slot, int doc) } else { - Debug.Assert(ord >= 0); + Debugging.Assert(() => ord >= 0); if (values[slot] == null) { values[slot] = new BytesRef(); @@ -1319,7 +1319,7 @@ public override void SetBottom(int slot) if (bottomValue == null) { // missingOrd is null for all segments - Debug.Assert(ords[bottomSlot] == missingOrd); + Debugging.Assert(() => ords[bottomSlot] == missingOrd); bottomOrd = missingOrd; bottomSameReader = true; readerGen[bottomSlot] = currentReaderGen; diff --git a/src/Lucene.Net/Search/FieldValueHitQueue.cs b/src/Lucene.Net/Search/FieldValueHitQueue.cs index 550812445f..10983a51cc 100644 --- a/src/Lucene.Net/Search/FieldValueHitQueue.cs +++ b/src/Lucene.Net/Search/FieldValueHitQueue.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.IO; @@ -72,8 +72,8 @@ public OneComparerFieldValueHitQueue(SortField[] fields, int size) /// true if document a should be sorted after document b. protected internal override bool LessThan(T hitA, T hitB) { - Debug.Assert(hitA != hitB); - Debug.Assert(hitA.Slot != hitB.Slot); + Debugging.Assert(() => hitA != hitB); + Debugging.Assert(() => hitA.Slot != hitB.Slot); int c = oneReverseMul * m_firstComparer.Compare(hitA.Slot, hitB.Slot); if (c != 0) @@ -107,8 +107,8 @@ public MultiComparersFieldValueHitQueue(SortField[] fields, int size) protected internal override bool LessThan(T hitA, T hitB) { - Debug.Assert(hitA != hitB); - Debug.Assert(hitA.Slot != hitB.Slot); + Debugging.Assert(() => hitA != hitB); + Debugging.Assert(() => hitA.Slot != hitB.Slot); int numComparers = m_comparers.Length; for (int i = 0; i < numComparers; ++i) diff --git a/src/Lucene.Net/Search/FilteredQuery.cs b/src/Lucene.Net/Search/FilteredQuery.cs index 9ff294b309..786b81e096 100644 --- a/src/Lucene.Net/Search/FilteredQuery.cs +++ b/src/Lucene.Net/Search/FilteredQuery.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Text; @@ -139,7 +139,7 @@ public override Explanation Explain(AtomicReaderContext ir, int i) // return a filtering scorer public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) { - Debug.Assert(outerInstance.filter != null); + Debugging.Assert(() => outerInstance.filter != null); DocIdSet filterDocIdSet = outerInstance.filter.GetDocIdSet(context, acceptDocs); if (filterDocIdSet == null) @@ -154,7 +154,7 @@ public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) // return a filtering top scorer public override BulkScorer GetBulkScorer(AtomicReaderContext context, bool scoreDocsInOrder, IBits acceptDocs) { - Debug.Assert(outerInstance.filter != null); + Debugging.Assert(() => outerInstance.filter != null); DocIdSet filterDocIdSet = outerInstance.filter.GetDocIdSet(context, acceptDocs); if (filterDocIdSet == null) @@ -449,7 +449,7 @@ public override bool Equals(object o) { return false; } - Debug.Assert(o is FilteredQuery); + Debugging.Assert(() => o is FilteredQuery); FilteredQuery fq = (FilteredQuery)o; return fq.query.Equals(this.query) && fq.filter.Equals(this.filter) && fq.strategy.Equals(this.strategy); } @@ -594,7 +594,7 @@ public override Scorer FilteredScorer(AtomicReaderContext context, Weight weight } else { - Debug.Assert(firstFilterDoc > -1); + Debugging.Assert(() => firstFilterDoc > -1); // we are gonna advance() this scorer, so we set inorder=true/toplevel=false // we pass null as acceptDocs, as our filter has already respected acceptDocs, no need to do twice Scorer scorer = weight.GetScorer(context, null); diff --git a/src/Lucene.Net/Search/FuzzyTermsEnum.cs b/src/Lucene.Net/Search/FuzzyTermsEnum.cs index 41ab164dae..c095147826 100644 --- a/src/Lucene.Net/Search/FuzzyTermsEnum.cs +++ b/src/Lucene.Net/Search/FuzzyTermsEnum.cs @@ -1,11 +1,11 @@ using J2N; using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Support; using Lucene.Net.Util; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using JCG = J2N.Collections.Generic; @@ -33,13 +33,13 @@ namespace Lucene.Net.Search using Automaton = Lucene.Net.Util.Automaton.Automaton; using BasicAutomata = Lucene.Net.Util.Automaton.BasicAutomata; using BasicOperations = Lucene.Net.Util.Automaton.BasicOperations; - using IBits = Lucene.Net.Util.IBits; using ByteRunAutomaton = Lucene.Net.Util.Automaton.ByteRunAutomaton; using BytesRef = Lucene.Net.Util.BytesRef; using CompiledAutomaton = Lucene.Net.Util.Automaton.CompiledAutomaton; using DocsAndPositionsEnum = Lucene.Net.Index.DocsAndPositionsEnum; using DocsEnum = Lucene.Net.Index.DocsEnum; using FilteredTermsEnum = Lucene.Net.Index.FilteredTermsEnum; + using IBits = Lucene.Net.Util.IBits; using LevenshteinAutomata = Lucene.Net.Util.Automaton.LevenshteinAutomata; using Term = Lucene.Net.Index.Term; using Terms = Lucene.Net.Index.Terms; @@ -251,7 +251,7 @@ protected virtual void MaxEditDistanceChanged(BytesRef lastTerm, int maxEdits, b // assert newEnum != null; if (newEnum == null) { - Debug.Assert(maxEdits > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE); + Debugging.Assert(() => maxEdits > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE); throw new ArgumentException("maxEdits cannot be > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE"); } SetEnum(newEnum); diff --git a/src/Lucene.Net/Search/IndexSearcher.cs b/src/Lucene.Net/Search/IndexSearcher.cs index 63b4615a6e..d79b6460f9 100644 --- a/src/Lucene.Net/Search/IndexSearcher.cs +++ b/src/Lucene.Net/Search/IndexSearcher.cs @@ -1,9 +1,9 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using Lucene.Net.Support.Threading; using System; using System.Collections; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Threading; using System.Threading.Tasks; @@ -135,7 +135,7 @@ public IndexSearcher(IndexReader r, TaskScheduler executor) /// public IndexSearcher(IndexReaderContext context, TaskScheduler executor) { - Debug.Assert(context.IsTopLevel, "IndexSearcher's ReaderContext must be topLevel for reader" + context.Reader); + Debugging.Assert(() => context.IsTopLevel, () => "IndexSearcher's ReaderContext must be topLevel for reader" + context.Reader); reader = context.Reader; this.executor = executor; this.m_readerContext = context; @@ -801,7 +801,7 @@ public SearcherCallableWithSort(ReentrantLock @lock, IndexSearcher searcher, Lea public TopFieldDocs Call() { - Debug.Assert(slice.Leaves.Length == 1); + Debugging.Assert(() => slice.Leaves.Length == 1); TopFieldDocs docs = searcher.Search(slice.Leaves, weight, after, nDocs, sort, true, doDocScores || sort.NeedsScores, doMaxScore); @lock.Lock(); try @@ -966,7 +966,7 @@ public virtual CollectionStatistics CollectionStatistics(string field) long sumTotalTermFreq; long sumDocFreq; - Debug.Assert(field != null); + Debugging.Assert(() => field != null); Terms terms = MultiFields.GetTerms(reader, field); if (terms == null) diff --git a/src/Lucene.Net/Search/MinShouldMatchSumScorer.cs b/src/Lucene.Net/Search/MinShouldMatchSumScorer.cs index 3742c1c93d..b8a1567247 100644 --- a/src/Lucene.Net/Search/MinShouldMatchSumScorer.cs +++ b/src/Lucene.Net/Search/MinShouldMatchSumScorer.cs @@ -1,8 +1,8 @@ using J2N; using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Search { @@ -116,7 +116,7 @@ public MinShouldMatchSumScorer(Weight weight, IList subScorers, int mini this.subScorers[i] = this.sortedSubScorers[mm - 1 + i]; } MinheapHeapify(); - Debug.Assert(MinheapCheck()); + Debugging.Assert(MinheapCheck); } /// @@ -140,7 +140,7 @@ public override sealed ICollection GetChildren() public override int NextDoc() { - Debug.Assert(doc != NO_MORE_DOCS); + Debugging.Assert(() => doc != NO_MORE_DOCS); while (true) { // to remove current doc, call next() on all subScorers on current doc within heap diff --git a/src/Lucene.Net/Search/MultiPhraseQuery.cs b/src/Lucene.Net/Search/MultiPhraseQuery.cs index 03816ce864..a4872d5107 100644 --- a/src/Lucene.Net/Search/MultiPhraseQuery.cs +++ b/src/Lucene.Net/Search/MultiPhraseQuery.cs @@ -1,8 +1,8 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Text; using JCG = J2N.Collections.Generic; @@ -25,13 +25,15 @@ namespace Lucene.Net.Search * limitations under the License. */ + using J2N.Collections.Generic.Extensions; + using System.Collections; using ArrayUtil = Lucene.Net.Util.ArrayUtil; using AtomicReader = Lucene.Net.Index.AtomicReader; using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext; - using IBits = Lucene.Net.Util.IBits; using BytesRef = Lucene.Net.Util.BytesRef; using DocsAndPositionsEnum = Lucene.Net.Index.DocsAndPositionsEnum; using DocsEnum = Lucene.Net.Index.DocsEnum; + using IBits = Lucene.Net.Util.IBits; using IndexReader = Lucene.Net.Index.IndexReader; using IndexReaderContext = Lucene.Net.Index.IndexReaderContext; using Similarity = Lucene.Net.Search.Similarities.Similarity; @@ -42,8 +44,6 @@ namespace Lucene.Net.Search using TermsEnum = Lucene.Net.Index.TermsEnum; using TermState = Lucene.Net.Index.TermState; using ToStringUtils = Lucene.Net.Util.ToStringUtils; - using System.Collections; - using J2N.Collections.Generic.Extensions; /// /// is a generalized version of , with an added @@ -223,7 +223,7 @@ public override void Normalize(float queryNorm, float topLevelBoost) public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) { - Debug.Assert(outerInstance.termArrays.Count > 0); + Debugging.Assert(() => outerInstance.termArrays.Count > 0); AtomicReader reader = (context.AtomicReader); IBits liveDocs = acceptDocs; @@ -286,7 +286,7 @@ public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) if (postingsEnum == null) { // term does exist, but has no positions - Debug.Assert(termsEnum.Docs(liveDocs, null, DocsFlags.NONE) != null, "termstate found but no term exists in reader"); + Debugging.Assert(() => termsEnum.Docs(liveDocs, null, DocsFlags.NONE) != null, () => "termstate found but no term exists in reader"); throw new InvalidOperationException("field \"" + term.Field + "\" was indexed without position data; cannot run PhraseQuery (term=" + term.Text() + ")"); } diff --git a/src/Lucene.Net/Search/MultiTermQueryWrapperFilter.cs b/src/Lucene.Net/Search/MultiTermQueryWrapperFilter.cs index ca053af0ff..9d7143502e 100644 --- a/src/Lucene.Net/Search/MultiTermQueryWrapperFilter.cs +++ b/src/Lucene.Net/Search/MultiTermQueryWrapperFilter.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; -using System.Diagnostics; namespace Lucene.Net.Search { @@ -22,10 +22,10 @@ namespace Lucene.Net.Search using AtomicReader = Lucene.Net.Index.AtomicReader; using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext; - using IBits = Lucene.Net.Util.IBits; using DocsEnum = Lucene.Net.Index.DocsEnum; using Fields = Lucene.Net.Index.Fields; using FixedBitSet = Lucene.Net.Util.FixedBitSet; + using IBits = Lucene.Net.Util.IBits; using Terms = Lucene.Net.Index.Terms; using TermsEnum = Lucene.Net.Index.TermsEnum; @@ -109,7 +109,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo } TermsEnum termsEnum = m_query.GetTermsEnum(terms); - Debug.Assert(termsEnum != null); + Debugging.Assert(() => termsEnum != null); if (termsEnum.Next() != null) { // fill into a FixedBitSet diff --git a/src/Lucene.Net/Search/NumericRangeQuery.cs b/src/Lucene.Net/Search/NumericRangeQuery.cs index aeef50b596..0bbe3f9da8 100644 --- a/src/Lucene.Net/Search/NumericRangeQuery.cs +++ b/src/Lucene.Net/Search/NumericRangeQuery.cs @@ -1,7 +1,7 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; using System.Text; @@ -320,7 +320,7 @@ internal NumericRangeTermsEnum(NumericRangeQuery outerInstance, TermsEnum ten } else { - Debug.Assert(this.outerInstance.dataType == NumericType.DOUBLE); + Debugging.Assert(() => this.outerInstance.dataType == NumericType.DOUBLE); minBound = (this.outerInstance.min == null) ? INT64_NEGATIVE_INFINITY : NumericUtils.DoubleToSortableInt64(Convert.ToDouble(this.outerInstance.min.Value, CultureInfo.InvariantCulture)); } if (!this.outerInstance.minInclusive && this.outerInstance.min != null) @@ -340,7 +340,7 @@ internal NumericRangeTermsEnum(NumericRangeQuery outerInstance, TermsEnum ten } else { - Debug.Assert(this.outerInstance.dataType == NumericType.DOUBLE); + Debugging.Assert(() => this.outerInstance.dataType == NumericType.DOUBLE); maxBound = (this.outerInstance.max == null) ? INT64_POSITIVE_INFINITY : NumericUtils.DoubleToSortableInt64(Convert.ToDouble(this.outerInstance.max, CultureInfo.InvariantCulture)); } if (!this.outerInstance.maxInclusive && this.outerInstance.max != null) @@ -367,7 +367,7 @@ internal NumericRangeTermsEnum(NumericRangeQuery outerInstance, TermsEnum ten } else { - Debug.Assert(this.outerInstance.dataType == NumericType.SINGLE); + Debugging.Assert(() => this.outerInstance.dataType == NumericType.SINGLE); minBound = (this.outerInstance.min == null) ? INT32_NEGATIVE_INFINITY : NumericUtils.SingleToSortableInt32(Convert.ToSingle(this.outerInstance.min, CultureInfo.InvariantCulture)); } if (!this.outerInstance.minInclusive && this.outerInstance.min != null) @@ -387,7 +387,7 @@ internal NumericRangeTermsEnum(NumericRangeQuery outerInstance, TermsEnum ten } else { - Debug.Assert(this.outerInstance.dataType == NumericType.SINGLE); + Debugging.Assert(() => this.outerInstance.dataType == NumericType.SINGLE); maxBound = (this.outerInstance.max == null) ? INT32_POSITIVE_INFINITY : NumericUtils.SingleToSortableInt32(Convert.ToSingle(this.outerInstance.max, CultureInfo.InvariantCulture)); } if (!this.outerInstance.maxInclusive && this.outerInstance.max != null) @@ -445,10 +445,10 @@ public override sealed void AddRange(BytesRef minPrefixCoded, BytesRef maxPrefix private void NextRange() { - Debug.Assert(rangeBounds.Count % 2 == 0); + Debugging.Assert(() => rangeBounds.Count % 2 == 0); currentLowerBound = rangeBounds.Dequeue(); - Debug.Assert(currentUpperBound == null || termComp.Compare(currentUpperBound, currentLowerBound) <= 0, "The current upper bound must be <= the new lower bound"); + Debugging.Assert(() => currentUpperBound == null || termComp.Compare(currentUpperBound, currentLowerBound) <= 0, () => "The current upper bound must be <= the new lower bound"); currentUpperBound = rangeBounds.Dequeue(); } @@ -469,7 +469,7 @@ protected override sealed BytesRef NextSeekTerm(BytesRef term) } // no more sub-range enums available - Debug.Assert(rangeBounds.Count == 0); + Debugging.Assert(() => rangeBounds.Count == 0); currentLowerBound = currentUpperBound = null; return null; } diff --git a/src/Lucene.Net/Search/PhraseQuery.cs b/src/Lucene.Net/Search/PhraseQuery.cs index 0ebb334d9c..17c10da0f5 100644 --- a/src/Lucene.Net/Search/PhraseQuery.cs +++ b/src/Lucene.Net/Search/PhraseQuery.cs @@ -1,10 +1,10 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Support; using System; using System.Collections; using System.Collections.Generic; -using System.Diagnostics; using System.Text; using JCG = J2N.Collections.Generic; @@ -337,7 +337,7 @@ public override void Normalize(float queryNorm, float topLevelBoost) public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) { - Debug.Assert(outerInstance.terms.Count > 0); + Debugging.Assert(() => outerInstance.terms.Count > 0); AtomicReader reader = context.AtomicReader; IBits liveDocs = acceptDocs; PostingsAndFreq[] postingsFreqs = new PostingsAndFreq[outerInstance.terms.Count]; @@ -357,7 +357,7 @@ public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) TermState state = states[i].Get(context.Ord); if (state == null) // term doesnt exist in this segment { - Debug.Assert(TermNotInReader(reader, t), "no termstate found but term exists in reader"); + Debugging.Assert(() => TermNotInReader(reader, t), () => "no termstate found but term exists in reader"); return null; } te.SeekExact(t.Bytes, state); @@ -367,7 +367,7 @@ public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) // positions. if (postingsEnum == null) { - Debug.Assert(te.SeekExact(t.Bytes), "termstate found but no term exists in reader"); + Debugging.Assert(() => te.SeekExact(t.Bytes), () => "termstate found but no term exists in reader"); // term does exist, but has no positions throw new InvalidOperationException("field \"" + t.Field + "\" was indexed without position data; cannot run PhraseQuery (term=" + t.Text() + ")"); } diff --git a/src/Lucene.Net/Search/QueryRescorer.cs b/src/Lucene.Net/Search/QueryRescorer.cs index 655c326e8f..3bca239e8d 100644 --- a/src/Lucene.Net/Search/QueryRescorer.cs +++ b/src/Lucene.Net/Search/QueryRescorer.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Search { @@ -101,7 +101,7 @@ public override TopDocs Rescore(IndexSearcher searcher, TopDocs firstPassTopDocs else { // Query did not match this doc: - Debug.Assert(actualDoc > targetDoc); + Debugging.Assert(() => actualDoc > targetDoc); hit.Score = Combine(hit.Score, false, 0.0f); } diff --git a/src/Lucene.Net/Search/ReferenceManager.cs b/src/Lucene.Net/Search/ReferenceManager.cs index 8d7718f30e..06f62928fa 100644 --- a/src/Lucene.Net/Search/ReferenceManager.cs +++ b/src/Lucene.Net/Search/ReferenceManager.cs @@ -1,8 +1,8 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using Lucene.Net.Support.Threading; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; namespace Lucene.Net.Search @@ -117,7 +117,7 @@ public G Acquire() } if (GetRefCount(@ref) == 0 && (object)current == (object)@ref) { - Debug.Assert(@ref != null); + Debugging.Assert(() => @ref != null); /* if we can't increment the reader but we are still the current reference the RM is in a illegal states since we can't make any progress @@ -200,7 +200,7 @@ private void DoMaybeRefresh() G newReference = RefreshIfNeeded(reference); if (newReference != null) { - Debug.Assert((object)newReference != (object)reference, "refreshIfNeeded should return null if refresh wasn't needed"); + Debugging.Assert(() => (object)newReference != (object)reference, () => "refreshIfNeeded should return null if refresh wasn't needed"); try { SwapReference(newReference); @@ -311,7 +311,7 @@ protected virtual void AfterMaybeRefresh() /// If the release operation on the given resource throws an public void Release(G reference) { - Debug.Assert(reference != null); + Debugging.Assert(() => reference != null); DecRef(reference); } diff --git a/src/Lucene.Net/Search/ReqOptSumScorer.cs b/src/Lucene.Net/Search/ReqOptSumScorer.cs index 2260a45bdf..63b9236d5e 100644 --- a/src/Lucene.Net/Search/ReqOptSumScorer.cs +++ b/src/Lucene.Net/Search/ReqOptSumScorer.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Search { @@ -43,8 +43,8 @@ internal class ReqOptSumScorer : Scorer public ReqOptSumScorer(Scorer reqScorer, Scorer optScorer) : base(reqScorer.m_weight) { - Debug.Assert(reqScorer != null); - Debug.Assert(optScorer != null); + Debugging.Assert(() => reqScorer != null); + Debugging.Assert(() => optScorer != null); this.reqScorer = reqScorer; this.optScorer = optScorer; } diff --git a/src/Lucene.Net/Search/ScoringRewrite.cs b/src/Lucene.Net/Search/ScoringRewrite.cs index b928086e77..3d4ba71300 100644 --- a/src/Lucene.Net/Search/ScoringRewrite.cs +++ b/src/Lucene.Net/Search/ScoringRewrite.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; namespace Lucene.Net.Search { @@ -134,7 +134,7 @@ public override Query Rewrite(IndexReader reader, MultiTermQuery query) { int pos = sort[i]; Term term = new Term(query.Field, col.terms.Get(pos, new BytesRef())); - Debug.Assert(reader.DocFreq(term) == termStates[pos].DocFreq); + Debugging.Assert(() => reader.DocFreq(term) == termStates[pos].DocFreq); AddClause(result, term, termStates[pos].DocFreq, query.Boost * boost[pos], termStates[pos]); } } @@ -173,13 +173,13 @@ public override bool Collect(BytesRef bytes) { int e = terms.Add(bytes); TermState state = termsEnum.GetTermState(); - Debug.Assert(state != null); + Debugging.Assert(() => state != null); if (e < 0) { // duplicate term: update docFreq int pos = (-e) - 1; array.termState[pos].Register(state, m_readerContext.Ord, termsEnum.DocFreq, termsEnum.TotalTermFreq); - Debug.Assert(array.boost[pos] == boostAtt.Boost, "boost should be equal in all segment TermsEnums"); + Debugging.Assert(() => array.boost[pos] == boostAtt.Boost, () => "boost should be equal in all segment TermsEnums"); } else { @@ -209,7 +209,7 @@ public override int[] Init() int[] ord = base.Init(); boost = new float[ArrayUtil.Oversize(ord.Length, RamUsageEstimator.NUM_BYTES_SINGLE)]; termState = new TermContext[ArrayUtil.Oversize(ord.Length, RamUsageEstimator.NUM_BYTES_OBJECT_REF)]; - Debug.Assert(termState.Length >= ord.Length && boost.Length >= ord.Length); + Debugging.Assert(() => termState.Length >= ord.Length && boost.Length >= ord.Length); return ord; } @@ -223,7 +223,7 @@ public override int[] Grow() Array.Copy(termState, 0, tmpTermState, 0, termState.Length); termState = tmpTermState; } - Debug.Assert(termState.Length >= ord.Length && boost.Length >= ord.Length); + Debugging.Assert(() => termState.Length >= ord.Length && boost.Length >= ord.Length); return ord; } diff --git a/src/Lucene.Net/Search/SearcherManager.cs b/src/Lucene.Net/Search/SearcherManager.cs index 60c5b74577..59829a44fd 100644 --- a/src/Lucene.Net/Search/SearcherManager.cs +++ b/src/Lucene.Net/Search/SearcherManager.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; using System.IO; namespace Lucene.Net.Search @@ -120,7 +120,7 @@ protected override void DecRef(IndexSearcher reference) protected override IndexSearcher RefreshIfNeeded(IndexSearcher referenceToRefresh) { IndexReader r = referenceToRefresh.IndexReader; - Debug.Assert(r is DirectoryReader, "searcher's IndexReader should be a DirectoryReader, but got " + r); + Debugging.Assert(() => r is DirectoryReader, () => "searcher's IndexReader should be a DirectoryReader, but got " + r); IndexReader newReader = DirectoryReader.OpenIfChanged((DirectoryReader)r); if (newReader == null) { @@ -152,7 +152,7 @@ public bool IsSearcherCurrent() try { IndexReader r = searcher.IndexReader; - Debug.Assert(r is DirectoryReader, "searcher's IndexReader should be a DirectoryReader, but got " + r); + Debugging.Assert(() => r is DirectoryReader, () => "searcher's IndexReader should be a DirectoryReader, but got " + r); return ((DirectoryReader)r).IsCurrent(); } finally diff --git a/src/Lucene.Net/Search/Similarities/SimilarityBase.cs b/src/Lucene.Net/Search/Similarities/SimilarityBase.cs index d89640905a..4640dc6937 100644 --- a/src/Lucene.Net/Search/Similarities/SimilarityBase.cs +++ b/src/Lucene.Net/Search/Similarities/SimilarityBase.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext; using BytesRef = Lucene.Net.Util.BytesRef; using FieldInvertState = Lucene.Net.Index.FieldInvertState; @@ -101,7 +101,7 @@ protected internal virtual BasicStats NewStats(string field, float queryBoost) protected internal virtual void FillBasicStats(BasicStats stats, CollectionStatistics collectionStats, TermStatistics termStats) { // #positions(field) must be >= #positions(term) - Debug.Assert(collectionStats.SumTotalTermFreq == -1 || collectionStats.SumTotalTermFreq >= termStats.TotalTermFreq); + Debugging.Assert(() => collectionStats.SumTotalTermFreq == -1 || collectionStats.SumTotalTermFreq >= termStats.TotalTermFreq); long numberOfDocuments = collectionStats.MaxDoc; long docFreq = termStats.DocFreq; diff --git a/src/Lucene.Net/Search/SloppyPhraseScorer.cs b/src/Lucene.Net/Search/SloppyPhraseScorer.cs index c78836d5f1..6e2d33019b 100644 --- a/src/Lucene.Net/Search/SloppyPhraseScorer.cs +++ b/src/Lucene.Net/Search/SloppyPhraseScorer.cs @@ -1,7 +1,7 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using JCG = J2N.Collections.Generic; namespace Lucene.Net.Search @@ -506,7 +506,7 @@ private IList> GatherRptGroups(JCG.LinkedDictionary pp.rptGroup == -1 || pp.rptGroup == g); pp.rptGroup = g; } } @@ -682,7 +682,7 @@ public override float GetScore() public override int Advance(int target) { - Debug.Assert(target > DocID); + Debugging.Assert(() => target > DocID); do { if (!AdvanceMin(target)) diff --git a/src/Lucene.Net/Search/SortField.cs b/src/Lucene.Net/Search/SortField.cs index 6a13e3618a..6d2ffce6d9 100644 --- a/src/Lucene.Net/Search/SortField.cs +++ b/src/Lucene.Net/Search/SortField.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Text; @@ -456,7 +456,7 @@ public virtual FieldComparer GetComparer(int numHits, int sortPos) #pragma warning restore 612, 618 case SortFieldType.CUSTOM: - Debug.Assert(comparerSource != null); + Debugging.Assert(() => comparerSource != null); return comparerSource.NewComparer(field, numHits, sortPos, reverse); case SortFieldType.STRING: diff --git a/src/Lucene.Net/Search/SortRescorer.cs b/src/Lucene.Net/Search/SortRescorer.cs index f7937300eb..d373e982bd 100644 --- a/src/Lucene.Net/Search/SortRescorer.cs +++ b/src/Lucene.Net/Search/SortRescorer.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Search { @@ -92,7 +92,7 @@ public override Explanation Explain(IndexSearcher searcher, Explanation firstPas { TopDocs oneHit = new TopDocs(1, new ScoreDoc[] { new ScoreDoc(docID, firstPassExplanation.Value) }); TopDocs hits = Rescore(searcher, oneHit, 1); - Debug.Assert(hits.TotalHits == 1); + Debugging.Assert(() => hits.TotalHits == 1); // TODO: if we could ask the Sort to explain itself then // we wouldn't need the separate ExpressionRescorer... diff --git a/src/Lucene.Net/Search/Spans/NearSpansOrdered.cs b/src/Lucene.Net/Search/Spans/NearSpansOrdered.cs index b4976f05e4..9424fc8a3a 100644 --- a/src/Lucene.Net/Search/Spans/NearSpansOrdered.cs +++ b/src/Lucene.Net/Search/Spans/NearSpansOrdered.cs @@ -1,7 +1,7 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using JCG = J2N.Collections.Generic; @@ -285,7 +285,7 @@ private bool ToSameDoc() } for (int i = 0; i < subSpansByDoc.Length; i++) { - Debug.Assert((subSpansByDoc[i].Doc == maxDoc), " NearSpansOrdered.toSameDoc() spans " + subSpansByDoc[0] + "\n at doc " + subSpansByDoc[i].Doc + ", but should be at " + maxDoc); + Debugging.Assert(() => subSpansByDoc[i].Doc == maxDoc, () => " NearSpansOrdered.toSameDoc() spans " + subSpansByDoc[0] + "\n at doc " + subSpansByDoc[i].Doc + ", but should be at " + maxDoc); } inSameDoc = true; return true; @@ -298,7 +298,7 @@ private bool ToSameDoc() /// and ends before . internal static bool DocSpansOrdered(Spans spans1, Spans spans2) { - Debug.Assert(spans1.Doc == spans2.Doc, "doc1 " + spans1.Doc + " != doc2 " + spans2.Doc); + Debugging.Assert(() => spans1.Doc == spans2.Doc, () => "doc1 " + spans1.Doc + " != doc2 " + spans2.Doc); int start1 = spans1.Start; int start2 = spans2.Start; /* Do not call docSpansOrdered(int,int,int,int) to avoid invoking .end() : */ @@ -409,7 +409,7 @@ private bool ShrinkToAfterShortestMatch() possibleMatchPayloads.UnionWith(possiblePayload); } - Debug.Assert(prevStart <= matchStart); + Debugging.Assert(() => prevStart <= matchStart); if (matchStart > prevEnd) // Only non overlapping spans add to slop. { matchSlop += (matchStart - prevEnd); diff --git a/src/Lucene.Net/Search/Spans/SpanFirstQuery.cs b/src/Lucene.Net/Search/Spans/SpanFirstQuery.cs index d986792a8a..abc997b0e5 100644 --- a/src/Lucene.Net/Search/Spans/SpanFirstQuery.cs +++ b/src/Lucene.Net/Search/Spans/SpanFirstQuery.cs @@ -1,4 +1,4 @@ -using System.Diagnostics; +using Lucene.Net.Diagnostics; using System.Text; namespace Lucene.Net.Search.Spans @@ -41,7 +41,7 @@ public SpanFirstQuery(SpanQuery match, int end) protected override AcceptStatus AcceptPosition(Spans spans) { - Debug.Assert(spans.Start != spans.End, "start equals end: " + spans.Start); + Debugging.Assert(() => spans.Start != spans.End, () => "start equals end: " + spans.Start); if (spans.Start >= m_end) { return AcceptStatus.NO_AND_ADVANCE; diff --git a/src/Lucene.Net/Search/Spans/SpanPositionRangeQuery.cs b/src/Lucene.Net/Search/Spans/SpanPositionRangeQuery.cs index d8571de6b6..2eeb036360 100644 --- a/src/Lucene.Net/Search/Spans/SpanPositionRangeQuery.cs +++ b/src/Lucene.Net/Search/Spans/SpanPositionRangeQuery.cs @@ -1,4 +1,4 @@ -using System.Diagnostics; +using Lucene.Net.Diagnostics; using System.Text; namespace Lucene.Net.Search.Spans @@ -40,7 +40,7 @@ public SpanPositionRangeQuery(SpanQuery match, int start, int end) protected override AcceptStatus AcceptPosition(Spans spans) { - Debug.Assert(spans.Start != spans.End); + Debugging.Assert(() => spans.Start != spans.End); if (spans.Start >= m_end) { return AcceptStatus.NO_AND_ADVANCE; diff --git a/src/Lucene.Net/Search/Spans/TermSpans.cs b/src/Lucene.Net/Search/Spans/TermSpans.cs index e4edf6901a..3f610e098e 100644 --- a/src/Lucene.Net/Search/Spans/TermSpans.cs +++ b/src/Lucene.Net/Search/Spans/TermSpans.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Search.Spans { @@ -76,7 +76,7 @@ public override bool Next() public override bool SkipTo(int target) { - Debug.Assert(target > m_doc); + Debugging.Assert(() => target > m_doc); m_doc = m_postings.Advance(target); if (m_doc == DocIdSetIterator.NO_MORE_DOCS) { diff --git a/src/Lucene.Net/Search/TermCollectingRewrite.cs b/src/Lucene.Net/Search/TermCollectingRewrite.cs index 45eede7453..286ab3aa7a 100644 --- a/src/Lucene.Net/Search/TermCollectingRewrite.cs +++ b/src/Lucene.Net/Search/TermCollectingRewrite.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Search { @@ -68,7 +68,7 @@ internal void CollectTerms(IndexReader reader, MultiTermQuery query, TermCollect } TermsEnum termsEnum = GetTermsEnum(query, terms, collector.Attributes); - Debug.Assert(termsEnum != null); + Debugging.Assert(() => termsEnum != null); if (termsEnum == TermsEnum.EMPTY) { diff --git a/src/Lucene.Net/Search/TermQuery.cs b/src/Lucene.Net/Search/TermQuery.cs index b48b87946e..8278d5a997 100644 --- a/src/Lucene.Net/Search/TermQuery.cs +++ b/src/Lucene.Net/Search/TermQuery.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Text; namespace Lucene.Net.Search @@ -24,8 +24,8 @@ namespace Lucene.Net.Search using AtomicReader = Lucene.Net.Index.AtomicReader; using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext; - using IBits = Lucene.Net.Util.IBits; using DocsEnum = Lucene.Net.Index.DocsEnum; + using IBits = Lucene.Net.Util.IBits; using IndexReaderContext = Lucene.Net.Index.IndexReaderContext; using ReaderUtil = Lucene.Net.Index.ReaderUtil; using Similarity = Lucene.Net.Search.Similarities.Similarity; @@ -60,7 +60,7 @@ internal sealed class TermWeight : Weight public TermWeight(TermQuery outerInstance, IndexSearcher searcher, TermContext termStates) { this.outerInstance = outerInstance; - Debug.Assert(termStates != null, "TermContext must not be null"); + Debugging.Assert(() => termStates != null, () => "TermContext must not be null"); this.termStates = termStates; this.similarity = searcher.Similarity; this.stats = similarity.ComputeWeight(outerInstance.Boost, searcher.CollectionStatistics(outerInstance.term.Field), searcher.TermStatistics(outerInstance.term, termStates)); @@ -85,14 +85,14 @@ public override void Normalize(float queryNorm, float topLevelBoost) public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) { - Debug.Assert(termStates.TopReaderContext == ReaderUtil.GetTopLevelContext(context), "The top-reader used to create Weight (" + termStates.TopReaderContext + ") is not the same as the current reader's top-reader (" + ReaderUtil.GetTopLevelContext(context)); + Debugging.Assert(() => termStates.TopReaderContext == ReaderUtil.GetTopLevelContext(context), () => "The top-reader used to create Weight (" + termStates.TopReaderContext + ") is not the same as the current reader's top-reader (" + ReaderUtil.GetTopLevelContext(context)); TermsEnum termsEnum = GetTermsEnum(context); if (termsEnum == null) { return null; } DocsEnum docs = termsEnum.Docs(acceptDocs, null); - Debug.Assert(docs != null); + Debugging.Assert(() => docs != null); return new TermScorer(this, docs, similarity.GetSimScorer(stats, context)); } @@ -105,7 +105,7 @@ private TermsEnum GetTermsEnum(AtomicReaderContext context) TermState state = termStates.Get(context.Ord); if (state == null) // term is not present in that reader { - Debug.Assert(TermNotInReader(context.AtomicReader, outerInstance.term), "no termstate found but term exists in reader term=" + outerInstance.term); + Debugging.Assert(() => TermNotInReader(context.AtomicReader, outerInstance.term), () => "no termstate found but term exists in reader term=" + outerInstance.term); return null; } //System.out.println("LD=" + reader.getLiveDocs() + " set?=" + (reader.getLiveDocs() != null ? reader.getLiveDocs().get(0) : "null")); @@ -170,7 +170,7 @@ public TermQuery(Term t, int docFreq) /// public TermQuery(Term t, TermContext states) { - Debug.Assert(states != null); + Debugging.Assert(() => states != null); term = t; docFreq = states.DocFreq; perReaderTermState = states; diff --git a/src/Lucene.Net/Search/TermScorer.cs b/src/Lucene.Net/Search/TermScorer.cs index c52a6b65b4..14cb6154e4 100644 --- a/src/Lucene.Net/Search/TermScorer.cs +++ b/src/Lucene.Net/Search/TermScorer.cs @@ -1,4 +1,4 @@ -using System.Diagnostics; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Search { @@ -62,7 +62,7 @@ public override int NextDoc() public override float GetScore() { - Debug.Assert(DocID != NO_MORE_DOCS); + Debugging.Assert(() => DocID != NO_MORE_DOCS); return docScorer.Score(docsEnum.DocID, docsEnum.Freq); } diff --git a/src/Lucene.Net/Search/TermStatistics.cs b/src/Lucene.Net/Search/TermStatistics.cs index 9ee976cba3..7b082ef0d2 100644 --- a/src/Lucene.Net/Search/TermStatistics.cs +++ b/src/Lucene.Net/Search/TermStatistics.cs @@ -1,4 +1,4 @@ -using System.Diagnostics; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Search { @@ -37,8 +37,8 @@ public class TermStatistics /// public TermStatistics(BytesRef term, long docFreq, long totalTermFreq) { - Debug.Assert(docFreq >= 0); - Debug.Assert(totalTermFreq == -1 || totalTermFreq >= docFreq); // #positions must be >= #postings + Debugging.Assert(() => docFreq >= 0); + Debugging.Assert(() => totalTermFreq == -1 || totalTermFreq >= docFreq); // #positions must be >= #postings this.term = term; this.docFreq = docFreq; this.totalTermFreq = totalTermFreq; diff --git a/src/Lucene.Net/Search/TopDocs.cs b/src/Lucene.Net/Search/TopDocs.cs index eac80c822c..64b9131de8 100644 --- a/src/Lucene.Net/Search/TopDocs.cs +++ b/src/Lucene.Net/Search/TopDocs.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Runtime.CompilerServices; @@ -107,7 +107,7 @@ public ScoreMergeSortQueue(TopDocs[] shardHits) // Returns true if first is < second protected internal override bool LessThan(ShardRef first, ShardRef second) { - Debug.Assert(first != second); + Debugging.Assert(() => first != second); float firstScore = shardHits[first.ShardIndex][first.HitIndex].Score; float secondScore = shardHits[second.ShardIndex][second.HitIndex].Score; @@ -134,7 +134,7 @@ protected internal override bool LessThan(ShardRef first, ShardRef second) { // Tie break in same shard: resolve however the // shard had resolved it: - Debug.Assert(first.HitIndex != second.HitIndex); + Debugging.Assert(() => first.HitIndex != second.HitIndex); return first.HitIndex < second.HitIndex; } } @@ -191,7 +191,7 @@ public MergeSortQueue(Sort sort, TopDocs[] shardHits) // Returns true if first is < second protected internal override bool LessThan(ShardRef first, ShardRef second) { - Debug.Assert(first != second); + Debugging.Assert(() => first != second); FieldDoc firstFD = (FieldDoc)shardHits[first.ShardIndex][first.HitIndex]; FieldDoc secondFD = (FieldDoc)shardHits[second.ShardIndex][second.HitIndex]; //System.out.println(" lessThan:\n first=" + first + " doc=" + firstFD.doc + " score=" + firstFD.score + "\n second=" + second + " doc=" + secondFD.doc + " score=" + secondFD.score); @@ -226,7 +226,7 @@ protected internal override bool LessThan(ShardRef first, ShardRef second) // Tie break in same shard: resolve however the // shard had resolved it: //System.out.println(" return tb " + (first.hitIndex < second.hitIndex)); - Debug.Assert(first.HitIndex != second.HitIndex); + Debugging.Assert(() => first.HitIndex != second.HitIndex); return first.HitIndex < second.HitIndex; } } @@ -305,7 +305,7 @@ public static TopDocs Merge(Sort sort, int start, int size, TopDocs[] shardHits) int hitUpto = 0; while (hitUpto < numIterOnHits) { - Debug.Assert(queue.Count > 0); + Debugging.Assert(() => queue.Count > 0); ShardRef @ref = queue.Pop(); ScoreDoc hit = shardHits[@ref.ShardIndex].ScoreDocs[@ref.HitIndex++]; hit.ShardIndex = @ref.ShardIndex; diff --git a/src/Lucene.Net/Search/TopScoreDocCollector.cs b/src/Lucene.Net/Search/TopScoreDocCollector.cs index cd9c1820d9..3e8c330446 100644 --- a/src/Lucene.Net/Search/TopScoreDocCollector.cs +++ b/src/Lucene.Net/Search/TopScoreDocCollector.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; namespace Lucene.Net.Search { @@ -51,8 +51,8 @@ public override void Collect(int doc) float score = scorer.GetScore(); // this collector cannot handle these scores: - Debug.Assert(score != float.NegativeInfinity); - Debug.Assert(!float.IsNaN(score)); + Debugging.Assert(() => !float.IsNegativeInfinity(score)); + Debugging.Assert(() => !float.IsNaN(score)); m_totalHits++; if (score <= pqTop.Score) @@ -91,8 +91,8 @@ public override void Collect(int doc) float score = scorer.GetScore(); // this collector cannot handle these scores: - Debug.Assert(score != float.NegativeInfinity); - Debug.Assert(!float.IsNaN(score)); + Debugging.Assert(() => !float.IsNegativeInfinity(score)); + Debugging.Assert(() => !float.IsNaN(score)); m_totalHits++; @@ -145,7 +145,7 @@ public override void Collect(int doc) float score = scorer.GetScore(); // this collector cannot handle NaN - Debug.Assert(!float.IsNaN(score)); + Debugging.Assert(() => !float.IsNaN(score)); m_totalHits++; if (score < pqTop.Score) @@ -188,7 +188,7 @@ public override void Collect(int doc) float score = scorer.GetScore(); // this collector cannot handle NaN - Debug.Assert(!float.IsNaN(score)); + Debugging.Assert(() => !float.IsNaN(score)); m_totalHits++; if (score > after.Score || (score == after.Score && doc <= afterDoc)) diff --git a/src/Lucene.Net/Search/TopTermsRewrite.cs b/src/Lucene.Net/Search/TopTermsRewrite.cs index f4b9b5e37d..55c585f3cf 100644 --- a/src/Lucene.Net/Search/TopTermsRewrite.cs +++ b/src/Lucene.Net/Search/TopTermsRewrite.cs @@ -1,7 +1,7 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using JCG = J2N.Collections.Generic; namespace Lucene.Net.Search @@ -84,7 +84,7 @@ public override Query Rewrite(IndexReader reader, MultiTermQuery query) foreach (ScoreTerm st in scoreTerms) { Term term = new Term(query.m_field, st.Bytes); - Debug.Assert(reader.DocFreq(term) == st.TermState.DocFreq, "reader DF is " + reader.DocFreq(term) + " vs " + st.TermState.DocFreq + " term=" + term); + Debugging.Assert(() => reader.DocFreq(term) == st.TermState.DocFreq, () => "reader DF is " + reader.DocFreq(term) + " vs " + st.TermState.DocFreq + " term=" + term); AddClause(q, term, st.TermState.DocFreq, query.Boost * st.Boost, st.TermState); // add to query } return q; @@ -120,7 +120,7 @@ public override void SetNextEnum(TermsEnum termsEnum) this.termsEnum = termsEnum; this.termComp = termsEnum.Comparer; - Debug.Assert(CompareToLastTerm(null)); + Debugging.Assert(() => CompareToLastTerm(null)); // lazy init the initial ScoreTerm because comparer is not known on ctor: if (st == null) @@ -145,7 +145,7 @@ private bool CompareToLastTerm(BytesRef t) } else { - Debug.Assert(termsEnum.Comparer.Compare(lastTerm, t) < 0, "lastTerm=" + lastTerm + " t=" + t); + Debugging.Assert(() => termsEnum.Comparer.Compare(lastTerm, t) < 0, () => "lastTerm=" + lastTerm + " t=" + t); lastTerm.CopyBytes(t); } return true; @@ -157,7 +157,7 @@ public override bool Collect(BytesRef bytes) // make sure within a single seg we always collect // terms in order - Debug.Assert(CompareToLastTerm(bytes)); + Debugging.Assert(() => CompareToLastTerm(bytes)); //System.out.println("TTR.collect term=" + bytes.utf8ToString() + " boost=" + boost + " ord=" + readerContext.ord); // ignore uncompetitive hits @@ -174,11 +174,11 @@ public override bool Collect(BytesRef bytes) } } TermState state = termsEnum.GetTermState(); - Debug.Assert(state != null); + Debugging.Assert(() => state != null); if (visitedTerms.TryGetValue(bytes, out ScoreTerm t2)) { // if the term is already in the PQ, only update docFreq of term in PQ - Debug.Assert(t2.Boost == boost, "boost should be equal in all segment TermsEnums"); + Debugging.Assert(() => t2.Boost == boost, () => "boost should be equal in all segment TermsEnums"); t2.TermState.Register(state, m_readerContext.Ord, termsEnum.DocFreq, termsEnum.TotalTermFreq); } else @@ -187,7 +187,7 @@ public override bool Collect(BytesRef bytes) st.Bytes.CopyBytes(bytes); st.Boost = boost; visitedTerms[st.Bytes] = st; - Debug.Assert(st.TermState.DocFreq == 0); + Debugging.Assert(() => st.TermState.DocFreq == 0); st.TermState.Register(state, m_readerContext.Ord, termsEnum.DocFreq, termsEnum.TotalTermFreq); stQueue.Add(st); // possibly drop entries from queue @@ -201,7 +201,7 @@ public override bool Collect(BytesRef bytes) { st = new ScoreTerm(termComp, new TermContext(m_topReaderContext)); } - Debug.Assert(stQueue.Count <= maxSize, "the PQ size must be limited to maxSize"); + Debugging.Assert(() => stQueue.Count <= maxSize, () => "the PQ size must be limited to maxSize"); // set maxBoostAtt with values to help FuzzyTermsEnum to optimize if (stQueue.Count == maxSize) { @@ -247,7 +247,7 @@ public override bool Equals(object obj) private static readonly IComparer scoreTermSortByTermComp = Comparer.Create((st1, st2) => { - Debug.Assert(st1.TermComp == st2.TermComp, "term comparer should not change between segments"); + Debugging.Assert(() => st1.TermComp == st2.TermComp, () => "term comparer should not change between segments"); return st1.TermComp.Compare(st1.Bytes, st2.Bytes); }); diff --git a/src/Lucene.Net/Store/BaseDirectory.cs b/src/Lucene.Net/Store/BaseDirectory.cs index 19552fc507..63e0d6a3f5 100644 --- a/src/Lucene.Net/Store/BaseDirectory.cs +++ b/src/Lucene.Net/Store/BaseDirectory.cs @@ -1,6 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; -using System.Reflection; namespace Lucene.Net.Store { @@ -67,7 +66,7 @@ public override void ClearLock(string name) public override void SetLockFactory(LockFactory lockFactory) { - Debug.Assert(lockFactory != null); + Debugging.Assert(() => lockFactory != null); this.m_lockFactory = lockFactory; lockFactory.LockPrefix = this.GetLockID(); } diff --git a/src/Lucene.Net/Store/BufferedIndexInput.cs b/src/Lucene.Net/Store/BufferedIndexInput.cs index fc0311d8c6..40e32ed03f 100644 --- a/src/Lucene.Net/Store/BufferedIndexInput.cs +++ b/src/Lucene.Net/Store/BufferedIndexInput.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; using System.IO; namespace Lucene.Net.Store @@ -80,7 +80,7 @@ public BufferedIndexInput(string resourceDesc, int bufferSize) /// Change the buffer size used by this public void SetBufferSize(int newSize) { - Debug.Assert(m_buffer == null || bufferSize == m_buffer.Length, "buffer=" + m_buffer + " bufferSize=" + bufferSize + " buffer.length=" + (m_buffer != null ? m_buffer.Length : 0)); + Debugging.Assert(() => m_buffer == null || bufferSize == m_buffer.Length, () => "buffer=" + m_buffer + " bufferSize=" + bufferSize + " buffer.length=" + (m_buffer != null ? m_buffer.Length : 0)); if (newSize != bufferSize) { CheckBufferSize(newSize); diff --git a/src/Lucene.Net/Store/ByteArrayDataOutput.cs b/src/Lucene.Net/Store/ByteArrayDataOutput.cs index 3a46256e53..c23564f40a 100644 --- a/src/Lucene.Net/Store/ByteArrayDataOutput.cs +++ b/src/Lucene.Net/Store/ByteArrayDataOutput.cs @@ -1,5 +1,4 @@ -using System; -using System.Diagnostics; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Store { @@ -67,13 +66,13 @@ public virtual void Reset(byte[] bytes, int offset, int len) public override void WriteByte(byte b) { - Debug.Assert(pos < limit); + Debugging.Assert(() => pos < limit); bytes[pos++] = b; } public override void WriteBytes(byte[] b, int offset, int length) { - Debug.Assert(pos + length <= limit); + Debugging.Assert(() => pos + length <= limit); System.Buffer.BlockCopy(b, offset, bytes, pos, length); pos += length; } diff --git a/src/Lucene.Net/Store/ByteBufferIndexInput.cs b/src/Lucene.Net/Store/ByteBufferIndexInput.cs index dc1a7fa73f..821ce705a7 100644 --- a/src/Lucene.Net/Store/ByteBufferIndexInput.cs +++ b/src/Lucene.Net/Store/ByteBufferIndexInput.cs @@ -1,9 +1,7 @@ using J2N.IO; -using Lucene.Net.Util; +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; using System.IO; -using System.Reflection; using System.Runtime.CompilerServices; namespace Lucene.Net.Store @@ -87,8 +85,8 @@ internal ByteBufferIndexInput(string resourceDescription, ByteBuffer[] buffers, // uses RuntimeHelpers.GetHashCode() to find the item, so technically, it IS an identity collection. this.clones = trackClones ? new ConditionalWeakTable() : null; - Debug.Assert(chunkSizePower >= 0 && chunkSizePower <= 30); - Debug.Assert(((long)((ulong)length >> chunkSizePower)) < int.MaxValue); + Debugging.Assert(() => chunkSizePower >= 0 && chunkSizePower <= 30); + Debugging.Assert(() => ((long)((ulong)length >> chunkSizePower)) < int.MaxValue); // LUCENENET specific: MMapIndexInput calls SetBuffers() to populate // the buffers, so we need to skip that call if it is null here, and @@ -302,7 +300,7 @@ private ByteBufferIndexInput BuildSlice(long offset, long length) ByteBufferIndexInput clone = (ByteBufferIndexInput)base.Clone(); clone.isClone = true; // we keep clone.clones, so it shares the same map with original and we have no additional cost on clones - Debug.Assert(clone.clones == this.clones); + Debugging.Assert(() => clone.clones == this.clones); clone.buffers = BuildSlice(buffers, offset, length); clone.offset = (int)(offset & chunkSizeMask); clone.length = length; diff --git a/src/Lucene.Net/Store/CompoundFileDirectory.cs b/src/Lucene.Net/Store/CompoundFileDirectory.cs index d3d7e7c6bf..92db1d2f3c 100644 --- a/src/Lucene.Net/Store/CompoundFileDirectory.cs +++ b/src/Lucene.Net/Store/CompoundFileDirectory.cs @@ -1,9 +1,9 @@ using J2N.Collections.Generic.Extensions; using J2N.Numerics; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; namespace Lucene.Net.Store @@ -120,7 +120,7 @@ public CompoundFileDirectory(Directory directory, string fileName, IOContext con } else { - Debug.Assert(!(directory is CompoundFileDirectory), "compound file inside of compound file: " + fileName); + Debugging.Assert(() => !(directory is CompoundFileDirectory), () => "compound file inside of compound file: " + fileName); this.entries = SENTINEL; this.IsOpen = true; writer = new CompoundFileWriter(directory, fileName); @@ -295,7 +295,7 @@ protected override void Dispose(bool disposing) IsOpen = false; if (writer != null) { - Debug.Assert(openForWrite); + Debugging.Assert(() => openForWrite); writer.Dispose(); } else @@ -311,7 +311,7 @@ public override IndexInput OpenInput(string name, IOContext context) lock (this) { EnsureOpen(); - Debug.Assert(!openForWrite); + Debugging.Assert(() => !openForWrite); string id = IndexFileNames.StripSegmentName(name); if (!entries.TryGetValue(id, out FileEntry entry) || entry == null) { @@ -415,7 +415,7 @@ public override Lock MakeLock(string name) public override IndexInputSlicer CreateSlicer(string name, IOContext context) { EnsureOpen(); - Debug.Assert(!openForWrite); + Debugging.Assert(() => !openForWrite); string id = IndexFileNames.StripSegmentName(name); if (!entries.TryGetValue(id, out FileEntry entry) || entry == null) { diff --git a/src/Lucene.Net/Store/CompoundFileWriter.cs b/src/Lucene.Net/Store/CompoundFileWriter.cs index 885426a6ce..3c1953e6ee 100644 --- a/src/Lucene.Net/Store/CompoundFileWriter.cs +++ b/src/Lucene.Net/Store/CompoundFileWriter.cs @@ -1,8 +1,8 @@ using J2N.Collections.Generic.Extensions; using J2N.Threading.Atomic; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Runtime.CompilerServices; using JCG = J2N.Collections.Generic; @@ -160,7 +160,7 @@ public void Dispose() closed = true; // open the compound stream GetOutput(); - Debug.Assert(dataOut != null); + Debugging.Assert(() => dataOut != null); CodecUtil.WriteFooter(dataOut); } catch (IOException e) @@ -253,7 +253,7 @@ internal IndexOutput CreateOutput(string name, IOContext context) bool outputLocked = false; try { - Debug.Assert(name != null, "name must not be null"); + Debugging.Assert(() => name != null, () => "name must not be null"); if (entries.ContainsKey(name)) { throw new ArgumentException("File " + name + " already exists"); @@ -262,7 +262,7 @@ internal IndexOutput CreateOutput(string name, IOContext context) entry.File = name; entries[name] = entry; string id = IndexFileNames.StripSegmentName(name); - Debug.Assert(!seenIDs.Contains(id), "file=\"" + name + "\" maps to id=\"" + id + "\", which was already written"); + Debugging.Assert(() => !seenIDs.Contains(id), () => "file=\"" + name + "\" maps to id=\"" + id + "\", which was already written"); seenIDs.Add(id); DirectCFSIndexOutput @out; @@ -285,7 +285,7 @@ internal IndexOutput CreateOutput(string name, IOContext context) entries.Remove(name); if (outputLocked) // release the output lock if not successful { - Debug.Assert(outputTaken); + Debugging.Assert(() => outputTaken); ReleaseOutputLock(); } } @@ -315,7 +315,7 @@ private void PrunePendingEntries() finally { bool compareAndSet = outputTaken.CompareAndSet(true, false); - Debug.Assert(compareAndSet); + Debugging.Assert(() => compareAndSet); } } } @@ -397,7 +397,7 @@ public override long GetFilePointer() [Obsolete("(4.1) this method will be removed in Lucene 5.0")] public override void Seek(long pos) { - Debug.Assert(!closed); + Debugging.Assert(() => !closed); @delegate.Seek(offset + pos); } @@ -405,21 +405,21 @@ public override long Length { get { - Debug.Assert(!closed); + Debugging.Assert(() => !closed); return @delegate.Length - offset; } } public override void WriteByte(byte b) { - Debug.Assert(!closed); + Debugging.Assert(() => !closed); writtenBytes++; @delegate.WriteByte(b); } public override void WriteBytes(byte[] b, int offset, int length) { - Debug.Assert(!closed); + Debugging.Assert(() => !closed); writtenBytes += length; @delegate.WriteBytes(b, offset, length); } diff --git a/src/Lucene.Net/Store/DataInput.cs b/src/Lucene.Net/Store/DataInput.cs index 512103d7f4..0979d2daae 100644 --- a/src/Lucene.Net/Store/DataInput.cs +++ b/src/Lucene.Net/Store/DataInput.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Text; using JCG = J2N.Collections.Generic; @@ -324,7 +324,7 @@ public virtual void SkipBytes(long numBytes) { skipBuffer = new byte[SKIP_BUFFER_SIZE]; } - Debug.Assert(skipBuffer.Length == SKIP_BUFFER_SIZE); + Debugging.Assert(() => skipBuffer.Length == SKIP_BUFFER_SIZE); for (long skipped = 0; skipped < numBytes; ) { var step = (int)Math.Min(SKIP_BUFFER_SIZE, numBytes - skipped); diff --git a/src/Lucene.Net/Store/DataOutput.cs b/src/Lucene.Net/Store/DataOutput.cs index 46552c51c2..b7e1937e93 100644 --- a/src/Lucene.Net/Store/DataOutput.cs +++ b/src/Lucene.Net/Store/DataOutput.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System.Collections.Generic; -using System.Diagnostics; using System.IO; namespace Lucene.Net.Store @@ -231,7 +231,7 @@ public virtual void WriteInt64(long i) /// public void WriteVInt64(long i) { - Debug.Assert(i >= 0L); + Debugging.Assert(() => i >= 0L); while ((i & ~0x7FL) != 0L) { WriteByte((byte)unchecked((sbyte)((i & 0x7FL) | 0x80L))); @@ -262,7 +262,7 @@ public virtual void WriteString(string s) /// Copy numBytes bytes from input to ourself. public virtual void CopyBytes(DataInput input, long numBytes) { - Debug.Assert(numBytes >= 0, "numBytes=" + numBytes); + Debugging.Assert(() => numBytes >= 0, () => "numBytes=" + numBytes); long left = numBytes; if (copyBuffer == null) { diff --git a/src/Lucene.Net/Store/IOContext.cs b/src/Lucene.Net/Store/IOContext.cs index 9732248dde..bd791448a4 100644 --- a/src/Lucene.Net/Store/IOContext.cs +++ b/src/Lucene.Net/Store/IOContext.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; -using System.Diagnostics; namespace Lucene.Net.Store { @@ -67,7 +67,7 @@ public IOContext() public IOContext(FlushInfo flushInfo) { - Debug.Assert(flushInfo != null); + Debugging.Assert(() => flushInfo != null); this.Context = UsageContext.FLUSH; this.MergeInfo = null; this.ReadOnce = false; @@ -94,8 +94,8 @@ public IOContext(MergeInfo mergeInfo) private IOContext(UsageContext context, MergeInfo mergeInfo) { - Debug.Assert(context != UsageContext.MERGE || mergeInfo != null, "MergeInfo must not be null if context is MERGE"); - Debug.Assert(context != UsageContext.FLUSH, "Use IOContext(FlushInfo) to create a FLUSH IOContext"); + Debugging.Assert(() => context != UsageContext.MERGE || mergeInfo != null, () => "MergeInfo must not be null if context is MERGE"); + Debugging.Assert(() => context != UsageContext.FLUSH, () => "Use IOContext(FlushInfo) to create a FLUSH IOContext"); this.Context = context; this.ReadOnce = false; this.MergeInfo = mergeInfo; diff --git a/src/Lucene.Net/Store/IndexInput.cs b/src/Lucene.Net/Store/IndexInput.cs index 938772152e..6d8bf5759d 100644 --- a/src/Lucene.Net/Store/IndexInput.cs +++ b/src/Lucene.Net/Store/IndexInput.cs @@ -72,7 +72,7 @@ public void Dispose() /// Returns the current position in this file, where the next read will /// occur. /// - public abstract long GetFilePointer(); + public abstract long GetFilePointer(); // LUCENENET TODO: API - make into property /// /// Sets current position in this file, where the next read will occur. diff --git a/src/Lucene.Net/Store/IndexOutput.cs b/src/Lucene.Net/Store/IndexOutput.cs index ef9549fd0c..5774b4f728 100644 --- a/src/Lucene.Net/Store/IndexOutput.cs +++ b/src/Lucene.Net/Store/IndexOutput.cs @@ -53,7 +53,7 @@ public void Dispose() /// Returns the current position in this file, where the next write will /// occur. /// - public abstract long GetFilePointer(); + public abstract long GetFilePointer(); // LUCENENET TODO: API - Make into property /// /// Sets current position in this file, where the next write will occur. diff --git a/src/Lucene.Net/Store/MMapDirectory.cs b/src/Lucene.Net/Store/MMapDirectory.cs index 02bc823a87..a0ccb0bcdc 100644 --- a/src/Lucene.Net/Store/MMapDirectory.cs +++ b/src/Lucene.Net/Store/MMapDirectory.cs @@ -1,9 +1,8 @@ using J2N.IO; using J2N.IO.MemoryMappedFiles; using J2N.Numerics; -using Lucene.Net.Support; +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; using System.IO; using System.IO.MemoryMappedFiles; @@ -112,7 +111,7 @@ public MMapDirectory(DirectoryInfo path, LockFactory lockFactory, int maxChunkSi throw new ArgumentException("Maximum chunk size for mmap must be >0"); } this.chunkSizePower = 31 - maxChunkSize.LeadingZeroCount(); - Debug.Assert(this.chunkSizePower >= 0 && this.chunkSizePower <= 30); + Debugging.Assert(() => this.chunkSizePower >= 0 && this.chunkSizePower <= 30); } /// diff --git a/src/Lucene.Net/Store/NIOFSDirectory.cs b/src/Lucene.Net/Store/NIOFSDirectory.cs index d41c2b34d2..bf26d21140 100644 --- a/src/Lucene.Net/Store/NIOFSDirectory.cs +++ b/src/Lucene.Net/Store/NIOFSDirectory.cs @@ -1,7 +1,7 @@ using J2N.IO; +using Lucene.Net.Diagnostics; using Lucene.Net.Support.IO; using System; -using System.Diagnostics; using System.IO; namespace Lucene.Net.Store @@ -234,7 +234,7 @@ protected override void ReadInternal(byte[] b, int offset, int len) if (b == m_buffer && 0 == offset) { // Use our own pre-wrapped byteBuf: - Debug.Assert(byteBuf != null); + Debugging.Assert(() => byteBuf != null); byteBuf.Clear(); byteBuf.Limit = len; bb = byteBuf; @@ -276,7 +276,7 @@ protected override void ReadInternal(byte[] b, int offset, int len) readOffset += i; readLength -= i; } - Debug.Assert(readLength == 0); + Debugging.Assert(() => readLength == 0); } catch (IOException ioe) { diff --git a/src/Lucene.Net/Store/RAMOutputStream.cs b/src/Lucene.Net/Store/RAMOutputStream.cs index abbcc55fce..d1e431513a 100644 --- a/src/Lucene.Net/Store/RAMOutputStream.cs +++ b/src/Lucene.Net/Store/RAMOutputStream.cs @@ -1,6 +1,6 @@ using Lucene.Net.Support; using System; -using System.Diagnostics; +using Lucene.Net.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Store @@ -162,7 +162,7 @@ public override void WriteByte(byte b) public override void WriteBytes(byte[] b, int offset, int len) { - Debug.Assert(b != null); + Debugging.Assert(() => b != null); crc.Update(b, offset, len); while (len > 0) { diff --git a/src/Lucene.Net/Store/RateLimitedDirectoryWrapper.cs b/src/Lucene.Net/Store/RateLimitedDirectoryWrapper.cs index 08bc05ab48..1d352f7ca0 100644 --- a/src/Lucene.Net/Store/RateLimitedDirectoryWrapper.cs +++ b/src/Lucene.Net/Store/RateLimitedDirectoryWrapper.cs @@ -67,7 +67,7 @@ public override void Copy(Directory to, string src, string dest, IOContext conte private RateLimiter GetRateLimiter(IOContext.UsageContext context) { - //Debug.Assert(context != null); // LUCENENET NOTE: In .NET, enum can never be null + //Debugging.Assert(context != null); // LUCENENET NOTE: In .NET, enum can never be null RateLimiter ret; return _contextRateLimiters.TryGetValue(context, out ret) ? ret : null; } diff --git a/src/Lucene.Net/Store/SimpleFSDirectory.cs b/src/Lucene.Net/Store/SimpleFSDirectory.cs index 66801a9122..8006f06434 100644 --- a/src/Lucene.Net/Store/SimpleFSDirectory.cs +++ b/src/Lucene.Net/Store/SimpleFSDirectory.cs @@ -226,7 +226,7 @@ protected override void ReadInternal(byte[] b, int offset, int len) // { // throw new EndOfStreamException("read past EOF: " + this + " off: " + offset + " len: " + len + " total: " + total + " chunkLen: " + toRead + " end: " + m_end); // } - // Debug.Assert(i > 0, "RandomAccessFile.read with non zero-length toRead must always read at least one byte"); + // Debugging.Assert(i > 0, "RandomAccessFile.read with non zero-length toRead must always read at least one byte"); // total += i; //} @@ -235,7 +235,7 @@ protected override void ReadInternal(byte[] b, int offset, int len) // all we need to do is Read(). total = m_file.Read(b, offset, len); - //Debug.Assert(total == len); + //Debugging.Assert(total == len); } catch (IOException ioe) { diff --git a/src/Lucene.Net/Support/BitArrayExtensions.cs b/src/Lucene.Net/Support/BitArrayExtensions.cs index 2639bfa7bb..ce15d315ae 100644 --- a/src/Lucene.Net/Support/BitArrayExtensions.cs +++ b/src/Lucene.Net/Support/BitArrayExtensions.cs @@ -278,7 +278,7 @@ public static void SafeSet(this BitArray a, int loc, bool value) // Clears all bits in this BitArray that correspond to a set bit in the parameter BitArray public static void AndNot(this BitArray bitsA, BitArray bitsB) { - //Debug.Assert(bitsA.Length == bitsB.Length, "BitArray lengths are not the same"); + //Debugging.Assert(bitsA.Length == bitsB.Length, "BitArray lengths are not the same"); for (int i = 0; i < bitsA.Length; i++) { //bitsA was longer than bitsB diff --git a/src/Lucene.Net/Support/Collections.cs b/src/Lucene.Net/Support/Collections.cs index c207eb092e..ff632a5b47 100644 --- a/src/Lucene.Net/Support/Collections.cs +++ b/src/Lucene.Net/Support/Collections.cs @@ -2,10 +2,10 @@ using J2N.Collections.Generic.Extensions; using J2N.Collections.ObjectModel; using J2N.Globalization; +using Lucene.Net.Diagnostics; using System; using System.Collections; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; using System.Text; using JCG = J2N.Collections.Generic; @@ -270,7 +270,7 @@ private class ReverseComparer2 : IComparer public ReverseComparer2(IComparer cmp) { - Debug.Assert(cmp != null); + Debugging.Assert(() => cmp != null); this.cmp = cmp; } diff --git a/src/Lucene.Net/Util/ArrayUtil.cs b/src/Lucene.Net/Util/ArrayUtil.cs index 3462fc6d14..8c19f4f38d 100644 --- a/src/Lucene.Net/Util/ArrayUtil.cs +++ b/src/Lucene.Net/Util/ArrayUtil.cs @@ -1,7 +1,6 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; -using System.Reflection; namespace Lucene.Net.Util { @@ -271,7 +270,7 @@ public static int GetShrinkSize(int currentSize, int targetSize, int bytesPerEle public static short[] Grow(short[] array, int minSize) { - Debug.Assert(minSize >= 0, "size must be positive (got " + minSize + "): likely integer overflow?"); + Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { short[] newArray = new short[Oversize(minSize, RamUsageEstimator.NUM_BYTES_INT16)]; @@ -291,7 +290,7 @@ public static short[] Grow(short[] array) public static float[] Grow(float[] array, int minSize) { - Debug.Assert(minSize >= 0, "size must be positive (got " + minSize + "): likely integer overflow?"); + Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { float[] newArray = new float[Oversize(minSize, RamUsageEstimator.NUM_BYTES_SINGLE)]; @@ -311,7 +310,7 @@ public static float[] Grow(float[] array) public static double[] Grow(double[] array, int minSize) { - Debug.Assert(minSize >= 0, "size must be positive (got " + minSize + "): likely integer overflow?"); + Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { double[] newArray = new double[Oversize(minSize, RamUsageEstimator.NUM_BYTES_DOUBLE)]; @@ -331,7 +330,7 @@ public static double[] Grow(double[] array) public static short[] Shrink(short[] array, int targetSize) { - Debug.Assert(targetSize >= 0, "size must be positive (got " + targetSize + "): likely integer overflow?"); + Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); int newSize = GetShrinkSize(array.Length, targetSize, RamUsageEstimator.NUM_BYTES_INT16); if (newSize != array.Length) { @@ -347,7 +346,7 @@ public static short[] Shrink(short[] array, int targetSize) public static int[] Grow(int[] array, int minSize) { - Debug.Assert(minSize >= 0, "size must be positive (got " + minSize + "): likely integer overflow?"); + Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { int[] newArray = new int[Oversize(minSize, RamUsageEstimator.NUM_BYTES_INT32)]; @@ -367,7 +366,7 @@ public static int[] Grow(int[] array) public static int[] Shrink(int[] array, int targetSize) { - Debug.Assert(targetSize >= 0, "size must be positive (got " + targetSize + "): likely integer overflow?"); + Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); int newSize = GetShrinkSize(array.Length, targetSize, RamUsageEstimator.NUM_BYTES_INT32); if (newSize != array.Length) { @@ -383,7 +382,7 @@ public static int[] Shrink(int[] array, int targetSize) public static long[] Grow(long[] array, int minSize) { - Debug.Assert(minSize >= 0, "size must be positive (got " + minSize + "): likely integer overflow?"); + Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { long[] newArray = new long[Oversize(minSize, RamUsageEstimator.NUM_BYTES_INT64)]; @@ -403,7 +402,7 @@ public static long[] Grow(long[] array) public static long[] Shrink(long[] array, int targetSize) { - Debug.Assert(targetSize >= 0, "size must be positive (got " + targetSize + "): likely integer overflow?"); + Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); int newSize = GetShrinkSize(array.Length, targetSize, RamUsageEstimator.NUM_BYTES_INT64); if (newSize != array.Length) { @@ -420,7 +419,7 @@ public static long[] Shrink(long[] array, int targetSize) [CLSCompliant(false)] public static sbyte[] Grow(sbyte[] array, int minSize) { - Debug.Assert(minSize >= 0, "size must be positive (got " + minSize + "): likely integer overflow?"); + Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { var newArray = new sbyte[Oversize(minSize, 1)]; @@ -435,7 +434,7 @@ public static sbyte[] Grow(sbyte[] array, int minSize) public static byte[] Grow(byte[] array, int minSize) { - Debug.Assert(minSize >= 0, "size must be positive (got " + minSize + "): likely integer overflow?"); + Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { byte[] newArray = new byte[Oversize(minSize, 1)]; @@ -455,7 +454,7 @@ public static byte[] Grow(byte[] array) public static byte[] Shrink(byte[] array, int targetSize) { - Debug.Assert(targetSize >= 0, "size must be positive (got " + targetSize + "): likely integer overflow?"); + Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); int newSize = GetShrinkSize(array.Length, targetSize, 1); if (newSize != array.Length) { @@ -471,7 +470,7 @@ public static byte[] Shrink(byte[] array, int targetSize) public static bool[] Grow(bool[] array, int minSize) { - Debug.Assert(minSize >= 0, "size must be positive (got " + minSize + "): likely integer overflow?"); + Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { bool[] newArray = new bool[Oversize(minSize, 1)]; @@ -491,7 +490,7 @@ public static bool[] Grow(bool[] array) public static bool[] Shrink(bool[] array, int targetSize) { - Debug.Assert(targetSize >= 0, "size must be positive (got " + targetSize + "): likely integer overflow?"); + Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); int newSize = GetShrinkSize(array.Length, targetSize, 1); if (newSize != array.Length) { @@ -507,7 +506,7 @@ public static bool[] Shrink(bool[] array, int targetSize) public static char[] Grow(char[] array, int minSize) { - Debug.Assert(minSize >= 0, "size must be positive (got " + minSize + "): likely integer overflow?"); + Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { char[] newArray = new char[Oversize(minSize, RamUsageEstimator.NUM_BYTES_CHAR)]; @@ -527,7 +526,7 @@ public static char[] Grow(char[] array) public static char[] Shrink(char[] array, int targetSize) { - Debug.Assert(targetSize >= 0, "size must be positive (got " + targetSize + "): likely integer overflow?"); + Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); int newSize = GetShrinkSize(array.Length, targetSize, RamUsageEstimator.NUM_BYTES_CHAR); if (newSize != array.Length) { @@ -544,7 +543,7 @@ public static char[] Shrink(char[] array, int targetSize) [CLSCompliant(false)] public static int[][] Grow(int[][] array, int minSize) { - Debug.Assert(minSize >= 0, "size must be positive (got " + minSize + "): likely integer overflow?"); + Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { var newArray = new int[Oversize(minSize, RamUsageEstimator.NUM_BYTES_OBJECT_REF)][]; @@ -566,7 +565,7 @@ public static int[][] Grow(int[][] array) [CLSCompliant(false)] public static int[][] Shrink(int[][] array, int targetSize) { - Debug.Assert(targetSize >= 0, "size must be positive (got " + targetSize + "): likely integer overflow?"); + Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); int newSize = GetShrinkSize(array.Length, targetSize, RamUsageEstimator.NUM_BYTES_OBJECT_REF); if (newSize != array.Length) { @@ -583,7 +582,7 @@ public static int[][] Shrink(int[][] array, int targetSize) [CLSCompliant(false)] public static float[][] Grow(float[][] array, int minSize) { - Debug.Assert(minSize >= 0, "size must be positive (got " + minSize + "): likely integer overflow?"); + Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { float[][] newArray = new float[Oversize(minSize, RamUsageEstimator.NUM_BYTES_OBJECT_REF)][]; @@ -605,7 +604,7 @@ public static float[][] Grow(float[][] array) [CLSCompliant(false)] public static float[][] Shrink(float[][] array, int targetSize) { - Debug.Assert(targetSize >= 0, "size must be positive (got " + targetSize + "): likely integer overflow?"); + Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); int newSize = GetShrinkSize(array.Length, targetSize, RamUsageEstimator.NUM_BYTES_OBJECT_REF); if (newSize != array.Length) { @@ -780,7 +779,7 @@ public static int[] ToInt32Array(ICollection ints) } // paranoia: - Debug.Assert(upto == result.Length); + Debugging.Assert(() => upto == result.Length); return result; } diff --git a/src/Lucene.Net/Util/AttributeSource.cs b/src/Lucene.Net/Util/AttributeSource.cs index 7a5ad5a3f6..3b7927cff3 100644 --- a/src/Lucene.Net/Util/AttributeSource.cs +++ b/src/Lucene.Net/Util/AttributeSource.cs @@ -1,10 +1,9 @@ using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections; using System.Collections.Generic; -using System.Diagnostics; -using System.Reflection; using System.Runtime.CompilerServices; using System.Text; using FlagsAttribute = Lucene.Net.Analysis.TokenAttributes.FlagsAttribute; @@ -371,7 +370,7 @@ public void AddAttributeImpl(Attribute att) foreach (var curInterfaceRef in foundInterfaces) { curInterfaceRef.TryGetTarget(out Type curInterface); - Debug.Assert(curInterface != null, "We have a strong reference on the class holding the interfaces, so they should never get evicted"); + Debugging.Assert(() => curInterface != null, () => "We have a strong reference on the class holding the interfaces, so they should never get evicted"); // Attribute is a superclass of this interface if (!attributes.ContainsKey(curInterface)) { diff --git a/src/Lucene.Net/Util/Automaton/Automaton.cs b/src/Lucene.Net/Util/Automaton/Automaton.cs index 41406a5fcf..b7ae9136c3 100644 --- a/src/Lucene.Net/Util/Automaton/Automaton.cs +++ b/src/Lucene.Net/Util/Automaton/Automaton.cs @@ -1,8 +1,8 @@ using J2N; using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Text; using JCG = J2N.Collections.Generic; @@ -299,7 +299,7 @@ public virtual void SetNumberedStates(State[] states) public virtual void SetNumberedStates(State[] states, int count) { - Debug.Assert(count <= states.Length); + Debugging.Assert(() => count <= states.Length); // TODO: maybe we can eventually allow for oversizing here... if (count < states.Length) { @@ -550,7 +550,7 @@ public virtual Transition[][] GetSortedTransitions() s.SortTransitions(Transition.COMPARE_BY_MIN_MAX_THEN_DEST); s.TrimTransitionsArray(); transitions[s.number] = s.TransitionsArray; - Debug.Assert(s.TransitionsArray != null); + Debugging.Assert(() => s.TransitionsArray != null); } return transitions; } diff --git a/src/Lucene.Net/Util/Automaton/BasicOperations.cs b/src/Lucene.Net/Util/Automaton/BasicOperations.cs index 540eb071a5..9a0793c215 100644 --- a/src/Lucene.Net/Util/Automaton/BasicOperations.cs +++ b/src/Lucene.Net/Util/Automaton/BasicOperations.cs @@ -1,7 +1,7 @@ using J2N; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Linq; using System.Text; using JCG = J2N.Collections.Generic; @@ -721,7 +721,7 @@ private PointTransitions Find(int point) if (count == HASHMAP_CUTOVER) { // switch to HashMap on the fly - Debug.Assert(map.Count == 0); + Debugging.Assert(() => map.Count == 0); for (int i = 0; i < count; i++) { map[points[i].point] = points[i]; @@ -845,7 +845,7 @@ public static void Determinize(Automaton a) if (statesSet.upto > 0) { - Debug.Assert(lastPoint != -1); + Debugging.Assert(() => lastPoint != -1); statesSet.ComputeHash(); @@ -868,7 +868,7 @@ public static void Determinize(Automaton a) } else { - Debug.Assert((accCount > 0) == q.accept, "accCount=" + accCount + " vs existing accept=" + q.accept + " states=" + statesSet); + Debugging.Assert(() => (accCount > 0) == q.accept, () => "accCount=" + accCount + " vs existing accept=" + q.accept + " states=" + statesSet); } r.AddTransition(new Transition(lastPoint, point - 1, q)); @@ -902,7 +902,7 @@ public static void Determinize(Automaton a) points.points[i].starts.count = 0; } points.Reset(); - Debug.Assert(statesSet.upto == 0, "upto=" + statesSet.upto); + Debugging.Assert(() => statesSet.upto == 0, () => "upto=" + statesSet.upto); } a.deterministic = true; a.SetNumberedStates(newStatesArray, newStateUpto); diff --git a/src/Lucene.Net/Util/Automaton/CompiledAutomaton.cs b/src/Lucene.Net/Util/Automaton/CompiledAutomaton.cs index f9eb4b9e38..fd8de06584 100644 --- a/src/Lucene.Net/Util/Automaton/CompiledAutomaton.cs +++ b/src/Lucene.Net/Util/Automaton/CompiledAutomaton.cs @@ -1,7 +1,7 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Text; @@ -228,7 +228,7 @@ private BytesRef AddTail(int state, BytesRef term, int idx, int leadLabel) } } - Debug.Assert(maxTransition != null); + Debugging.Assert(() => maxTransition != null); // Append floorLabel int floorLabel; @@ -256,7 +256,7 @@ private BytesRef AddTail(int state, BytesRef term, int idx, int leadLabel) Transition[] transitions = sortedTransitions[state]; if (transitions.Length == 0) { - Debug.Assert(RunAutomaton.IsAccept(state)); + Debugging.Assert(() => RunAutomaton.IsAccept(state)); term.Length = idx; //if (DEBUG) System.out.println(" return " + term.utf8ToString()); return term; @@ -265,7 +265,7 @@ private BytesRef AddTail(int state, BytesRef term, int idx, int leadLabel) { // We are pushing "top" -- so get last label of // last transition: - Debug.Assert(transitions.Length != 0); + Debugging.Assert(() => transitions.Length != 0); Transition lastTransition = transitions[transitions.Length - 1]; if (idx >= term.Bytes.Length) { @@ -364,7 +364,7 @@ public virtual BytesRef Floor(BytesRef input, BytesRef output) Transition[] transitions = sortedTransitions[state]; if (transitions.Length == 0) { - Debug.Assert(RunAutomaton.IsAccept(state)); + Debugging.Assert(() => RunAutomaton.IsAccept(state)); output.Length = idx; //if (DEBUG) System.out.println(" return " + output.utf8ToString()); return output; diff --git a/src/Lucene.Net/Util/Automaton/DaciukMihovAutomatonBuilder.cs b/src/Lucene.Net/Util/Automaton/DaciukMihovAutomatonBuilder.cs index 0484d08a52..f1e6f079f5 100644 --- a/src/Lucene.Net/Util/Automaton/DaciukMihovAutomatonBuilder.cs +++ b/src/Lucene.Net/Util/Automaton/DaciukMihovAutomatonBuilder.cs @@ -1,11 +1,11 @@ using J2N; -using J2N.Text; using J2N.Runtime.CompilerServices; +using J2N.Text; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; -using JCG = J2N.Collections.Generic; using Arrays = Lucene.Net.Support.Arrays; +using JCG = J2N.Collections.Generic; namespace Lucene.Net.Util.Automaton { @@ -131,7 +131,7 @@ public override int GetHashCode() /// internal State NewState(int label) { - Debug.Assert(Array.BinarySearch(labels, label) < 0, "State already has transition labeled: " + label); + Debugging.Assert(() => Array.BinarySearch(labels, label) < 0, () => "State already has transition labeled: " + label); labels = Arrays.CopyOf(labels, labels.Length + 1); states = Arrays.CopyOf(states, states.Length + 1); @@ -145,7 +145,7 @@ internal State NewState(int label) /// internal State LastChild() // LUCENENET NOTE: Kept this a method because there is another overload { - Debug.Assert(HasChildren, "No outgoing transitions."); + Debugging.Assert(() => HasChildren, () => "No outgoing transitions."); return states[states.Length - 1]; } @@ -161,7 +161,7 @@ internal State LastChild(int label) { s = states[index]; } - Debug.Assert(s == GetState(label)); + Debugging.Assert(() => s == GetState(label)); return s; } @@ -171,7 +171,7 @@ internal State LastChild(int label) /// internal void ReplaceLastChild(State state) { - Debug.Assert(HasChildren, "No outgoing transitions."); + Debugging.Assert(() => HasChildren, () => "No outgoing transitions."); states[states.Length - 1] = state; } @@ -227,9 +227,9 @@ private static bool ReferenceEquals(object[] a1, object[] a2) /// public void Add(CharsRef current) { - Debug.Assert(stateRegistry != null, "Automaton already built."); - Debug.Assert(previous == null || comparer.Compare(previous, current) <= 0, "Input must be in sorted UTF-8 order: " + previous + " >= " + current); - Debug.Assert(SetPrevious(current)); + Debugging.Assert(() => stateRegistry != null, () => "Automaton already built."); + Debugging.Assert(() => previous == null || comparer.Compare(previous, current) <= 0, () => "Input must be in sorted UTF-8 order: " + previous + " >= " + current); + Debugging.Assert(() => SetPrevious(current)); // Descend in the automaton (find matching prefix). int pos = 0, max = current.Length; diff --git a/src/Lucene.Net/Util/Automaton/Lev1ParametricDescription.cs b/src/Lucene.Net/Util/Automaton/Lev1ParametricDescription.cs index 590ec8d155..ad11e5488f 100644 --- a/src/Lucene.Net/Util/Automaton/Lev1ParametricDescription.cs +++ b/src/Lucene.Net/Util/Automaton/Lev1ParametricDescription.cs @@ -1,4 +1,4 @@ -using System.Diagnostics; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Util.Automaton { @@ -32,12 +32,12 @@ internal class Lev1ParametricDescription : ParametricDescription internal override int Transition(int absState, int position, int vector) { // null absState should never be passed in - Debug.Assert(absState != -1); + Debugging.Assert(() => absState != -1); // decode absState -> state, offset int state = absState / (m_w + 1); int offset = absState % (m_w + 1); - Debug.Assert(offset >= 0); + Debugging.Assert(() => offset >= 0); if (position == m_w) { diff --git a/src/Lucene.Net/Util/Automaton/Lev1TParametricDescription.cs b/src/Lucene.Net/Util/Automaton/Lev1TParametricDescription.cs index 610c4f88f4..f3e6e362a0 100644 --- a/src/Lucene.Net/Util/Automaton/Lev1TParametricDescription.cs +++ b/src/Lucene.Net/Util/Automaton/Lev1TParametricDescription.cs @@ -1,4 +1,4 @@ -using System.Diagnostics; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Util.Automaton { @@ -34,12 +34,12 @@ internal class Lev1TParametricDescription : ParametricDescription internal override int Transition(int absState, int position, int vector) { // null absState should never be passed in - Debug.Assert(absState != -1); + Debugging.Assert(() => absState != -1); // decode absState -> state, offset int state = absState / (m_w + 1); int offset = absState % (m_w + 1); - Debug.Assert(offset >= 0); + Debugging.Assert(() => offset >= 0); if (position == m_w) { diff --git a/src/Lucene.Net/Util/Automaton/Lev2ParametricDescription.cs b/src/Lucene.Net/Util/Automaton/Lev2ParametricDescription.cs index 924cc679e2..f87f7a048a 100644 --- a/src/Lucene.Net/Util/Automaton/Lev2ParametricDescription.cs +++ b/src/Lucene.Net/Util/Automaton/Lev2ParametricDescription.cs @@ -1,4 +1,4 @@ -using System.Diagnostics; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Util.Automaton { @@ -32,12 +32,12 @@ internal class Lev2ParametricDescription : ParametricDescription internal override int Transition(int absState, int position, int vector) { // null absState should never be passed in - Debug.Assert(absState != -1); + Debugging.Assert(() => absState != -1); // decode absState -> state, offset int state = absState / (m_w + 1); int offset = absState % (m_w + 1); - Debug.Assert(offset >= 0); + Debugging.Assert(() => offset >= 0); if (position == m_w) { diff --git a/src/Lucene.Net/Util/Automaton/Lev2TParametricDescription.cs b/src/Lucene.Net/Util/Automaton/Lev2TParametricDescription.cs index 69e81271c6..1b08abc8fe 100644 --- a/src/Lucene.Net/Util/Automaton/Lev2TParametricDescription.cs +++ b/src/Lucene.Net/Util/Automaton/Lev2TParametricDescription.cs @@ -1,4 +1,4 @@ -using System.Diagnostics; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Util.Automaton { @@ -34,12 +34,12 @@ internal class Lev2TParametricDescription : ParametricDescription internal override int Transition(int absState, int position, int vector) { // null absState should never be passed in - Debug.Assert(absState != -1); + Debugging.Assert(() => absState != -1); // decode absState -> state, offset int state = absState / (m_w + 1); int offset = absState % (m_w + 1); - Debug.Assert(offset >= 0); + Debugging.Assert(() => offset >= 0); if (position == m_w) { diff --git a/src/Lucene.Net/Util/Automaton/LevenshteinAutomata.cs b/src/Lucene.Net/Util/Automaton/LevenshteinAutomata.cs index c1d3db15db..8048eab29c 100644 --- a/src/Lucene.Net/Util/Automaton/LevenshteinAutomata.cs +++ b/src/Lucene.Net/Util/Automaton/LevenshteinAutomata.cs @@ -1,7 +1,7 @@ using J2N; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using JCG = J2N.Collections.Generic; namespace Lucene.Net.Util.Automaton @@ -276,7 +276,7 @@ internal virtual bool IsAccept(int absState) // decode absState -> state, offset int state = absState / (m_w + 1); int offset = absState % (m_w + 1); - Debug.Assert(offset >= 0); + Debugging.Assert(() => offset >= 0); return m_w - offset + minErrors[state] <= m_n; } diff --git a/src/Lucene.Net/Util/Automaton/SortedIntSet.cs b/src/Lucene.Net/Util/Automaton/SortedIntSet.cs index 994c964108..92192fd46b 100644 --- a/src/Lucene.Net/Util/Automaton/SortedIntSet.cs +++ b/src/Lucene.Net/Util/Automaton/SortedIntSet.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Text; using JCG = J2N.Collections.Generic; @@ -157,7 +157,7 @@ public void Decr(int num) return; } } - Debug.Assert(false); + Debugging.Assert(() => false); } public void ComputeHash() diff --git a/src/Lucene.Net/Util/Automaton/State.cs b/src/Lucene.Net/Util/Automaton/State.cs index 82bf57dfcb..2e5912ca46 100644 --- a/src/Lucene.Net/Util/Automaton/State.cs +++ b/src/Lucene.Net/Util/Automaton/State.cs @@ -1,7 +1,7 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Text; @@ -184,7 +184,7 @@ public virtual bool Accept /// public virtual State Step(int c) { - Debug.Assert(c >= 0); + Debugging.Assert(() => c >= 0); for (int i = 0; i < numTransitions; i++) { Transition t = transitionsArray[i]; diff --git a/src/Lucene.Net/Util/Automaton/Transition.cs b/src/Lucene.Net/Util/Automaton/Transition.cs index 452bb41cef..fb4e54fce0 100644 --- a/src/Lucene.Net/Util/Automaton/Transition.cs +++ b/src/Lucene.Net/Util/Automaton/Transition.cs @@ -1,7 +1,6 @@ using J2N.Text; -using System; +using Lucene.Net.Diagnostics; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; using System.Text; @@ -64,7 +63,7 @@ public class Transition /// Destination state. public Transition(int c, State to) { - Debug.Assert(c >= 0); + Debugging.Assert(() => c >= 0); min = max = c; this.to = to; } @@ -77,8 +76,8 @@ public Transition(int c, State to) /// Destination state. public Transition(int min, int max, State to) { - Debug.Assert(min >= 0); - Debug.Assert(max >= 0); + Debugging.Assert(() => min >= 0); + Debugging.Assert(() => max >= 0); if (max < min) { int t = max; diff --git a/src/Lucene.Net/Util/Automaton/UTF32ToUTF8.cs b/src/Lucene.Net/Util/Automaton/UTF32ToUTF8.cs index 107686af3d..313786d441 100644 --- a/src/Lucene.Net/Util/Automaton/UTF32ToUTF8.cs +++ b/src/Lucene.Net/Util/Automaton/UTF32ToUTF8.cs @@ -1,7 +1,7 @@ using J2N; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Text; namespace Lucene.Net.Util.Automaton @@ -177,8 +177,8 @@ private void Build(State start, State end, UTF8Sequence startUTF8, UTF8Sequence } else { - Debug.Assert(startUTF8.len > upto + 1); - Debug.Assert(endUTF8.len > upto + 1); + Debugging.Assert(() => startUTF8.len > upto + 1); + Debugging.Assert(() => endUTF8.len > upto + 1); State n = NewUTF8State(); // Single value leading edge diff --git a/src/Lucene.Net/Util/BroadWord.cs b/src/Lucene.Net/Util/BroadWord.cs index 6a66cff554..c70029ce5c 100644 --- a/src/Lucene.Net/Util/BroadWord.cs +++ b/src/Lucene.Net/Util/BroadWord.cs @@ -1,5 +1,5 @@ using J2N.Numerics; -using System.Diagnostics; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Util { @@ -70,7 +70,7 @@ public static int Select(long x, int r) long b = (long)((ulong)(((long)((ulong)SmallerUpTo7_8(s, (r * L8_L)) >> 7)) * L8_L) >> 53); // & (~7L); // Step 3, side ways addition for byte number times 8 long l = r - (((long)((ulong)(s << 8) >> (int)b)) & 0xFFL); // Step 4, byte wise rank, subtract the rank with byte at b-8, or zero for b=0; - Debug.Assert(0L <= 1); + Debugging.Assert(() => 0L <= 1); //assert l < 8 : l; //fails when bit r is not available. // Select bit l from byte (x >>> b): @@ -149,7 +149,7 @@ public static long SmallerUpto15_16(long x, long y) /// The index of the r-th 1 bit in x, or if no such bit exists, 72. public static int SelectNaive(long x, int r) { - Debug.Assert(r >= 1); + Debugging.Assert(() => r >= 1); int s = -1; while ((x != 0L) && (r > 0)) { diff --git a/src/Lucene.Net/Util/ByteBlockPool.cs b/src/Lucene.Net/Util/ByteBlockPool.cs index bfb7ed1c0b..88d0731191 100644 --- a/src/Lucene.Net/Util/ByteBlockPool.cs +++ b/src/Lucene.Net/Util/ByteBlockPool.cs @@ -1,9 +1,9 @@ using J2N.Collections.Generic.Extensions; using J2N.Numerics; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Diagnostics.CodeAnalysis; namespace Lucene.Net.Util @@ -354,7 +354,7 @@ public void SetBytesRef(BytesRef term, int textStart) term.Length = (bytes[pos] & 0x7f) + ((bytes[pos + 1] & 0xff) << 7); term.Offset = pos + 2; } - Debug.Assert(term.Length >= 0); + Debugging.Assert(() => term.Length >= 0); } /// diff --git a/src/Lucene.Net/Util/BytesRef.cs b/src/Lucene.Net/Util/BytesRef.cs index 88ff684c0c..7e293b6af1 100644 --- a/src/Lucene.Net/Util/BytesRef.cs +++ b/src/Lucene.Net/Util/BytesRef.cs @@ -1,8 +1,8 @@ using J2N.Text; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Text; using WritableArrayAttribute = Lucene.Net.Support.WritableArrayAttribute; @@ -88,7 +88,7 @@ public BytesRef(byte[] bytes, int offset, int length) this.bytes = bytes; this.Offset = offset; this.Length = length; - Debug.Assert(IsValid()); + Debugging.Assert(IsValid); } /// @@ -140,7 +140,7 @@ public BytesRef(string text) /// unpaired surrogates or invalid UTF16 code units. public void CopyChars(ICharSequence text) { - Debug.Assert(Offset == 0); // TODO broken if offset != 0 + Debugging.Assert(() => Offset == 0); // TODO broken if offset != 0 UnicodeUtil.UTF16toUTF8(text, 0, text.Length, this); } @@ -151,7 +151,7 @@ public void CopyChars(ICharSequence text) /// unpaired surrogates or invalid UTF16 code units. public void CopyChars(string text) { - Debug.Assert(Offset == 0); // TODO broken if offset != 0 + Debugging.Assert(() => Offset == 0); // TODO broken if offset != 0 UnicodeUtil.UTF16toUTF8(text, 0, text.Length, this); } @@ -164,7 +164,7 @@ public void CopyChars(string text) /// Another , should not be null. public bool BytesEquals(BytesRef other) { - Debug.Assert(other != null); + Debugging.Assert(() => other != null); if (Length == other.Length) { var otherUpto = other.Offset; @@ -298,7 +298,7 @@ public void Append(BytesRef other) /// public void Grow(int newLength) { - Debug.Assert(Offset == 0); // NOTE: senseless if offset != 0 + Debugging.Assert(() => Offset == 0); // NOTE: senseless if offset != 0 bytes = ArrayUtil.Grow(bytes, newLength); } @@ -307,7 +307,7 @@ public void Grow(int newLength) public int CompareTo(object other) // LUCENENET specific: Implemented IComparable for FieldComparer { BytesRef br = other as BytesRef; - Debug.Assert(br != null); + Debugging.Assert(() => br != null); return utf8SortedAsUnicodeSortOrder.Compare(this, br); } diff --git a/src/Lucene.Net/Util/BytesRefArray.cs b/src/Lucene.Net/Util/BytesRefArray.cs index ac798afa23..4257dd3cce 100644 --- a/src/Lucene.Net/Util/BytesRefArray.cs +++ b/src/Lucene.Net/Util/BytesRefArray.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Util { @@ -97,7 +97,7 @@ public BytesRef Get(BytesRef spare, int index) { int offset = offsets[index]; int length = index == lastElement - 1 ? currentOffset - offset : offsets[index + 1] - offset; - Debug.Assert(spare.Offset == 0); + Debugging.Assert(() => spare.Offset == 0); spare.Grow(length); spare.Length = length; pool.ReadBytes(offset, spare.Bytes, spare.Offset, spare.Length); diff --git a/src/Lucene.Net/Util/BytesRefHash.cs b/src/Lucene.Net/Util/BytesRefHash.cs index 89c2504fa1..450483ff31 100644 --- a/src/Lucene.Net/Util/BytesRefHash.cs +++ b/src/Lucene.Net/Util/BytesRefHash.cs @@ -1,7 +1,7 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Diagnostics; #if FEATURE_SERIALIZABLE_EXCEPTIONS using System.Runtime.Serialization; #endif @@ -121,8 +121,8 @@ public BytesRefHash(ByteBlockPool pool, int capacity, BytesStartArray bytesStart /// bytesID public BytesRef Get(int bytesID, BytesRef @ref) { - Debug.Assert(bytesStart != null, "bytesStart is null - not initialized"); - Debug.Assert(bytesID < bytesStart.Length, "bytesID exceeds byteStart len: " + bytesStart.Length); + Debugging.Assert(() => bytesStart != null, () => "bytesStart is null - not initialized"); + Debugging.Assert(() => bytesID < bytesStart.Length, () => "bytesID exceeds byteStart len: " + bytesStart.Length); pool.SetBytesRef(@ref, bytesStart[bytesID]); return @ref; } @@ -137,7 +137,7 @@ public BytesRef Get(int bytesID, BytesRef @ref) /// public int[] Compact() { - Debug.Assert(bytesStart != null, "bytesStart is null - not initialized"); + Debugging.Assert(() => bytesStart != null, () => "bytesStart is null - not initialized"); int upto = 0; for (int i = 0; i < hashSize; i++) { @@ -152,7 +152,7 @@ public int[] Compact() } } - Debug.Assert(upto == count); + Debugging.Assert(() => upto == count); lastCount = count; return ids; } @@ -198,7 +198,7 @@ protected override void Swap(int i, int j) protected override int Compare(int i, int j) { int id1 = compact[i], id2 = compact[j]; - Debug.Assert(outerInstance.bytesStart.Length > id1 && outerInstance.bytesStart.Length > id2); + Debugging.Assert(() => outerInstance.bytesStart.Length > id1 && outerInstance.bytesStart.Length > id2); outerInstance.pool.SetBytesRef(outerInstance.scratch1, outerInstance.bytesStart[id1]); outerInstance.pool.SetBytesRef(scratch2, outerInstance.bytesStart[id2]); return comp.Compare(outerInstance.scratch1, scratch2); @@ -207,14 +207,14 @@ protected override int Compare(int i, int j) protected override void SetPivot(int i) { int id = compact[i]; - Debug.Assert(outerInstance.bytesStart.Length > id); + Debugging.Assert(() => outerInstance.bytesStart.Length > id); outerInstance.pool.SetBytesRef(pivot, outerInstance.bytesStart[id]); } protected override int ComparePivot(int j) { int id = compact[j]; - Debug.Assert(outerInstance.bytesStart.Length > id); + Debugging.Assert(() => outerInstance.bytesStart.Length > id); outerInstance.pool.SetBytesRef(scratch2, outerInstance.bytesStart[id]); return comp.Compare(pivot, scratch2); } @@ -301,7 +301,7 @@ public void Dispose() /// public int Add(BytesRef bytes) { - Debug.Assert(bytesStart != null, "Bytesstart is null - not initialized"); + Debugging.Assert(() => bytesStart != null, () => "Bytesstart is null - not initialized"); int length = bytes.Length; // final position int hashPos = FindHash(bytes); @@ -324,7 +324,7 @@ public int Add(BytesRef bytes) if (count >= bytesStart.Length) { bytesStart = bytesStartArray.Grow(); - Debug.Assert(count < bytesStart.Length + 1, "count: " + count + " len: " + bytesStart.Length); + Debugging.Assert(() => count < bytesStart.Length + 1, () => "count: " + count + " len: " + bytesStart.Length); } e = count++; @@ -339,7 +339,7 @@ public int Add(BytesRef bytes) // 1 byte to store length buffer[bufferUpto] = (byte)length; pool.ByteUpto += length + 1; - Debug.Assert(length >= 0, "Length must be positive: " + length); + Debugging.Assert(() => length >= 0, () => "Length must be positive: " + length); System.Buffer.BlockCopy(bytes.Bytes, bytes.Offset, buffer, bufferUpto + 1, length); } else @@ -350,7 +350,7 @@ public int Add(BytesRef bytes) pool.ByteUpto += length + 2; System.Buffer.BlockCopy(bytes.Bytes, bytes.Offset, buffer, bufferUpto + 2, length); } - Debug.Assert(ids[hashPos] == -1); + Debugging.Assert(() => ids[hashPos] == -1); ids[hashPos] = e; if (count == hashHalfSize) @@ -377,7 +377,7 @@ public int Find(BytesRef bytes) private int FindHash(BytesRef bytes) { - Debug.Assert(bytesStart != null, "bytesStart is null - not initialized"); + Debugging.Assert(() => bytesStart != null, () => "bytesStart is null - not initialized"); int code = DoHash(bytes.Bytes, bytes.Offset, bytes.Length); @@ -409,7 +409,7 @@ private int FindHash(BytesRef bytes) /// public int AddByPoolOffset(int offset) { - Debug.Assert(bytesStart != null, "Bytesstart is null - not initialized"); + Debugging.Assert(() => bytesStart != null, () => "Bytesstart is null - not initialized"); // final position int code = offset; int hashPos = offset & hashMask; @@ -431,11 +431,11 @@ public int AddByPoolOffset(int offset) if (count >= bytesStart.Length) { bytesStart = bytesStartArray.Grow(); - Debug.Assert(count < bytesStart.Length + 1, "count: " + count + " len: " + bytesStart.Length); + Debugging.Assert(() => count < bytesStart.Length + 1, () => "count: " + count + " len: " + bytesStart.Length); } e = count++; bytesStart[e] = offset; - Debug.Assert(ids[hashPos] == -1); + Debugging.Assert(() => ids[hashPos] == -1); ids[hashPos] = e; if (count == hashHalfSize) @@ -489,7 +489,7 @@ private void Rehash(int newSize, bool hashOnData) } int hashPos = code & newMask; - Debug.Assert(hashPos >= 0); + Debugging.Assert(() => hashPos >= 0); if (newHash[hashPos] != -1) { // Conflict; use linear probe to find an open slot @@ -546,8 +546,8 @@ public void Reinit() /// for the given id public int ByteStart(int bytesID) { - Debug.Assert(bytesStart != null, "bytesStart is null - not initialized"); - Debug.Assert(bytesID >= 0 && bytesID < count, bytesID.ToString()); + Debugging.Assert(() => bytesStart != null, () => "bytesStart is null - not initialized"); + Debugging.Assert(() => bytesID >= 0 && bytesID < count, () => bytesID.ToString()); return bytesStart[bytesID]; } @@ -645,7 +645,7 @@ public override int[] Clear() public override int[] Grow() { - Debug.Assert(bytesStart != null); + Debugging.Assert(() => bytesStart != null); return bytesStart = ArrayUtil.Grow(bytesStart, bytesStart.Length + 1); } diff --git a/src/Lucene.Net/Util/CharsRef.cs b/src/Lucene.Net/Util/CharsRef.cs index d0120a62ee..2116fe2aef 100644 --- a/src/Lucene.Net/Util/CharsRef.cs +++ b/src/Lucene.Net/Util/CharsRef.cs @@ -1,8 +1,8 @@ using J2N.Text; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using WritableArrayAttribute = Lucene.Net.Support.WritableArrayAttribute; @@ -99,7 +99,7 @@ public CharsRef(char[] chars, int offset, int length) this.chars = chars; this.Offset = offset; this.Length = length; - Debug.Assert(IsValid()); + Debugging.Assert(IsValid); } /// @@ -228,7 +228,7 @@ public void CopyChars(CharsRef other) /// public void Grow(int newLength) { - Debug.Assert(Offset == 0); + Debugging.Assert(() => Offset == 0); if (chars.Length < newLength) { chars = ArrayUtil.Grow(chars, newLength); diff --git a/src/Lucene.Net/Util/FilterIterator.cs b/src/Lucene.Net/Util/FilterIterator.cs index 0c11c931c0..9cd8fce613 100644 --- a/src/Lucene.Net/Util/FilterIterator.cs +++ b/src/Lucene.Net/Util/FilterIterator.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Util { @@ -47,7 +47,7 @@ public bool MoveNext() return false; } - Debug.Assert(nextIsSet); + Debugging.Assert(() => nextIsSet); try { current = next; diff --git a/src/Lucene.Net/Util/FixedBitSet.cs b/src/Lucene.Net/Util/FixedBitSet.cs index 915d58479c..9d12d67f1a 100644 --- a/src/Lucene.Net/Util/FixedBitSet.cs +++ b/src/Lucene.Net/Util/FixedBitSet.cs @@ -1,7 +1,7 @@ using J2N.Numerics; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; namespace Lucene.Net.Util { @@ -259,7 +259,7 @@ public int Cardinality() public bool Get(int index) { - Debug.Assert(index >= 0 && index < numBits, "index=" + index + ", numBits=" + numBits); + Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index + ", numBits=" + numBits); int i = index >> 6; // div 64 // signed shift will keep a negative index and force an // array-index-out-of-bounds-exception, removing the need for an explicit check. @@ -270,7 +270,7 @@ public bool Get(int index) public void Set(int index) { - Debug.Assert(index >= 0 && index < numBits, "index=" + index + ", numBits=" + numBits); + Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index + ", numBits=" + numBits); int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -279,7 +279,7 @@ public void Set(int index) public bool GetAndSet(int index) { - Debug.Assert(index >= 0 && index < numBits); + Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -290,7 +290,7 @@ public bool GetAndSet(int index) public void Clear(int index) { - Debug.Assert(index >= 0 && index < numBits); + Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = index >> 6; int bit = index & 0x03f; long bitmask = 1L << bit; @@ -299,7 +299,7 @@ public void Clear(int index) public bool GetAndClear(int index) { - Debug.Assert(index >= 0 && index < numBits); + Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -314,7 +314,7 @@ public bool GetAndClear(int index) /// public int NextSetBit(int index) { - Debug.Assert(index >= 0 && index < numBits, "index=" + index + ", numBits=" + numBits); + Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index + ", numBits=" + numBits); int i = index >> 6; int subIndex = index & 0x3f; // index within the word long word = bits[i] >> subIndex; // skip all the bits to the right of index @@ -342,7 +342,7 @@ public int NextSetBit(int index) /// public int PrevSetBit(int index) { - Debug.Assert(index >= 0 && index < numBits, "index=" + index + " numBits=" + numBits); + Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index + " numBits=" + numBits); int i = index >> 6; int subIndex = index & 0x3f; // index within the word long word = (bits[i] << (63 - subIndex)); // skip all the bits to the left of index @@ -405,7 +405,7 @@ public void Or(FixedBitSet other) private void Or(long[] otherArr, int otherNumWords) { - Debug.Assert(otherNumWords <= numWords, "numWords=" + numWords + ", otherNumWords=" + otherNumWords); + Debugging.Assert(() => otherNumWords <= numWords, () => "numWords=" + numWords + ", otherNumWords=" + otherNumWords); long[] thisArr = this.bits; int pos = Math.Min(numWords, otherNumWords); while (--pos >= 0) @@ -418,7 +418,7 @@ private void Or(long[] otherArr, int otherNumWords) /// this = this XOR other public void Xor(FixedBitSet other) { - Debug.Assert(other.numWords <= numWords, "numWords=" + numWords + ", other.numWords=" + other.numWords); + Debugging.Assert(() => other.numWords <= numWords, () => "numWords=" + numWords + ", other.numWords=" + other.numWords); long[] thisBits = this.bits; long[] otherBits = other.bits; int pos = Math.Min(numWords, other.numWords); @@ -577,8 +577,8 @@ private void AndNot(long[] otherArr, int otherNumWords) /// One-past the last bit to flip public void Flip(int startIndex, int endIndex) { - Debug.Assert(startIndex >= 0 && startIndex < numBits); - Debug.Assert(endIndex >= 0 && endIndex <= numBits); + Debugging.Assert(() => startIndex >= 0 && startIndex < numBits); + Debugging.Assert(() => endIndex >= 0 && endIndex <= numBits); if (endIndex <= startIndex) { return; @@ -623,8 +623,8 @@ public void Flip(int startIndex, int endIndex) /// One-past the last bit to set public void Set(int startIndex, int endIndex) { - Debug.Assert(startIndex >= 0 && startIndex < numBits); - Debug.Assert(endIndex >= 0 && endIndex <= numBits); + Debugging.Assert(() => startIndex >= 0 && startIndex < numBits); + Debugging.Assert(() => endIndex >= 0 && endIndex <= numBits); if (endIndex <= startIndex) { return; @@ -655,8 +655,8 @@ public void Set(int startIndex, int endIndex) /// One-past the last bit to clear public void Clear(int startIndex, int endIndex) { - Debug.Assert(startIndex >= 0 && startIndex < numBits, "startIndex=" + startIndex + ", numBits=" + numBits); - Debug.Assert(endIndex >= 0 && endIndex <= numBits, "endIndex=" + endIndex + ", numBits=" + numBits); + Debugging.Assert(() => startIndex >= 0 && startIndex < numBits, () => "startIndex=" + startIndex + ", numBits=" + numBits); + Debugging.Assert(() => endIndex >= 0 && endIndex <= numBits, () => "endIndex=" + endIndex + ", numBits=" + numBits); if (endIndex <= startIndex) { return; diff --git a/src/Lucene.Net/Util/Fst/Builder.cs b/src/Lucene.Net/Util/Fst/Builder.cs index bdd27153e0..c48c50c09d 100644 --- a/src/Lucene.Net/Util/Fst/Builder.cs +++ b/src/Lucene.Net/Util/Fst/Builder.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; using System.Diagnostics.CodeAnalysis; namespace Lucene.Net.Util.Fst @@ -206,7 +206,7 @@ private CompiledNode CompileNode(UnCompiledNode nodeIn, int tailLength) { node = fst.AddNode(nodeIn); } - Debug.Assert(node != -2); + Debugging.Assert(() => node != -2); nodeIn.Clear(); @@ -370,8 +370,8 @@ public virtual void Add(Int32sRef input, T output) output = NO_OUTPUT; } - Debug.Assert(lastInput.Length == 0 || input.CompareTo(lastInput) >= 0, "inputs are added out of order lastInput=" + lastInput + " vs input=" + input); - Debug.Assert(ValidOutput(output)); + Debugging.Assert(() => lastInput.Length == 0 || input.CompareTo(lastInput) >= 0, () => "inputs are added out of order lastInput=" + lastInput + " vs input=" + input); + Debugging.Assert(() => ValidOutput(output)); //System.out.println("\nadd: " + input); if (input.Length == 0) @@ -441,7 +441,7 @@ public virtual void Add(Int32sRef input, T output) UnCompiledNode parentNode = frontier[idx - 1]; T lastOutput = parentNode.GetLastOutput(input.Int32s[input.Offset + idx - 1]); - Debug.Assert(ValidOutput(lastOutput)); + Debugging.Assert(() => ValidOutput(lastOutput)); T commonOutputPrefix; T wordSuffix; @@ -449,9 +449,9 @@ public virtual void Add(Int32sRef input, T output) if (!lastOutput.Equals(NO_OUTPUT)) { commonOutputPrefix = fst.Outputs.Common(output, lastOutput); - Debug.Assert(ValidOutput(commonOutputPrefix)); + Debugging.Assert(() => ValidOutput(commonOutputPrefix)); wordSuffix = fst.Outputs.Subtract(lastOutput, commonOutputPrefix); - Debug.Assert(ValidOutput(wordSuffix)); + Debugging.Assert(() => ValidOutput(wordSuffix)); parentNode.SetLastOutput(input.Int32s[input.Offset + idx - 1], commonOutputPrefix); node.PrependOutput(wordSuffix); } @@ -461,7 +461,7 @@ public virtual void Add(Int32sRef input, T output) } output = fst.Outputs.Subtract(output, commonOutputPrefix); - Debug.Assert(ValidOutput(output)); + Debugging.Assert(() => ValidOutput(output)); } if (lastInput.Length == input.Length && prefixLenPlus1 == 1 + input.Length) @@ -657,17 +657,17 @@ public void Clear() public S GetLastOutput(int labelToMatch) { - Debug.Assert(NumArcs > 0); - Debug.Assert(Arcs[NumArcs - 1].Label == labelToMatch); + Debugging.Assert(() => NumArcs > 0); + Debugging.Assert(() => Arcs[NumArcs - 1].Label == labelToMatch); return Arcs[NumArcs - 1].Output; } public void AddArc(int label, INode target) { - Debug.Assert(label >= 0); + Debugging.Assert(() => label >= 0); if (NumArcs != 0) { - Debug.Assert(label > Arcs[NumArcs - 1].Label, "arc[-1].Label=" + Arcs[NumArcs - 1].Label + " new label=" + label + " numArcs=" + NumArcs); + Debugging.Assert(() => label > Arcs[NumArcs - 1].Label, () => "arc[-1].Label=" + Arcs[NumArcs - 1].Label + " new label=" + label + " numArcs=" + NumArcs); } if (NumArcs == Arcs.Length) { @@ -688,9 +688,9 @@ public void AddArc(int label, INode target) public void ReplaceLast(int labelToMatch, INode target, S nextFinalOutput, bool isFinal) { - Debug.Assert(NumArcs > 0); + Debugging.Assert(() => NumArcs > 0); Arc arc = Arcs[NumArcs - 1]; - Debug.Assert(arc.Label == labelToMatch, "arc.Label=" + arc.Label + " vs " + labelToMatch); + Debugging.Assert(() => arc.Label == labelToMatch, () => "arc.Label=" + arc.Label + " vs " + labelToMatch); arc.Target = target; //assert target.Node != -2; arc.NextFinalOutput = nextFinalOutput; @@ -699,36 +699,36 @@ public void ReplaceLast(int labelToMatch, INode target, S nextFinalOutput, bool public void DeleteLast(int label, INode target) { - Debug.Assert(NumArcs > 0); - Debug.Assert(label == Arcs[NumArcs - 1].Label); - Debug.Assert(target == Arcs[NumArcs - 1].Target); + Debugging.Assert(() => NumArcs > 0); + Debugging.Assert(() => label == Arcs[NumArcs - 1].Label); + Debugging.Assert(() => target == Arcs[NumArcs - 1].Target); NumArcs--; } public void SetLastOutput(int labelToMatch, S newOutput) { - Debug.Assert(Owner.ValidOutput(newOutput)); - Debug.Assert(NumArcs > 0); + Debugging.Assert(() => Owner.ValidOutput(newOutput)); + Debugging.Assert(() => NumArcs > 0); Arc arc = Arcs[NumArcs - 1]; - Debug.Assert(arc.Label == labelToMatch); + Debugging.Assert(() => arc.Label == labelToMatch); arc.Output = newOutput; } // pushes an output prefix forward onto all arcs public void PrependOutput(S outputPrefix) { - Debug.Assert(Owner.ValidOutput(outputPrefix)); + Debugging.Assert(() => Owner.ValidOutput(outputPrefix)); for (int arcIdx = 0; arcIdx < NumArcs; arcIdx++) { Arcs[arcIdx].Output = Owner.Fst.Outputs.Add(outputPrefix, Arcs[arcIdx].Output); - Debug.Assert(Owner.ValidOutput(Arcs[arcIdx].Output)); + Debugging.Assert(() => Owner.ValidOutput(Arcs[arcIdx].Output)); } if (IsFinal) { Output = Owner.Fst.Outputs.Add(outputPrefix, Output); - Debug.Assert(Owner.ValidOutput(Output)); + Debugging.Assert(() => Owner.ValidOutput(Output)); } } } diff --git a/src/Lucene.Net/Util/Fst/ByteSequenceOutputs.cs b/src/Lucene.Net/Util/Fst/ByteSequenceOutputs.cs index 78aa93ad0f..f92702821d 100644 --- a/src/Lucene.Net/Util/Fst/ByteSequenceOutputs.cs +++ b/src/Lucene.Net/Util/Fst/ByteSequenceOutputs.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; namespace Lucene.Net.Util.Fst { @@ -42,8 +42,8 @@ private ByteSequenceOutputs() public override BytesRef Common(BytesRef output1, BytesRef output2) { - Debug.Assert(output1 != null); - Debug.Assert(output2 != null); + Debugging.Assert(() => output1 != null); + Debugging.Assert(() => output2 != null); int pos1 = output1.Offset; int pos2 = output2.Offset; @@ -81,8 +81,8 @@ public override BytesRef Common(BytesRef output1, BytesRef output2) public override BytesRef Subtract(BytesRef output, BytesRef inc) { - Debug.Assert(output != null); - Debug.Assert(inc != null); + Debugging.Assert(() => output != null); + Debugging.Assert(() => inc != null); if (inc == NO_OUTPUT) { // no prefix removed @@ -95,16 +95,16 @@ public override BytesRef Subtract(BytesRef output, BytesRef inc) } else { - Debug.Assert(inc.Length < output.Length, "inc.length=" + inc.Length + " vs output.length=" + output.Length); - Debug.Assert(inc.Length > 0); + Debugging.Assert(() => inc.Length < output.Length, () => "inc.length=" + inc.Length + " vs output.length=" + output.Length); + Debugging.Assert(() => inc.Length > 0); return new BytesRef(output.Bytes, output.Offset + inc.Length, output.Length - inc.Length); } } public override BytesRef Add(BytesRef prefix, BytesRef output) { - Debug.Assert(prefix != null); - Debug.Assert(output != null); + Debugging.Assert(() => prefix != null); + Debugging.Assert(() => output != null); if (prefix == NO_OUTPUT) { return output; @@ -115,8 +115,8 @@ public override BytesRef Add(BytesRef prefix, BytesRef output) } else { - Debug.Assert(prefix.Length > 0); - Debug.Assert(output.Length > 0); + Debugging.Assert(() => prefix.Length > 0); + Debugging.Assert(() => output.Length > 0); BytesRef result = new BytesRef(prefix.Length + output.Length); Array.Copy(prefix.Bytes, prefix.Offset, result.Bytes, 0, prefix.Length); Array.Copy(output.Bytes, output.Offset, result.Bytes, prefix.Length, output.Length); @@ -127,7 +127,7 @@ public override BytesRef Add(BytesRef prefix, BytesRef output) public override void Write(BytesRef prefix, DataOutput @out) { - Debug.Assert(prefix != null); + Debugging.Assert(() => prefix != null); @out.WriteVInt32(prefix.Length); @out.WriteBytes(prefix.Bytes, prefix.Offset, prefix.Length); } diff --git a/src/Lucene.Net/Util/Fst/BytesRefFSTEnum.cs b/src/Lucene.Net/Util/Fst/BytesRefFSTEnum.cs index 8a4f6f5a8c..9e2074893c 100644 --- a/src/Lucene.Net/Util/Fst/BytesRefFSTEnum.cs +++ b/src/Lucene.Net/Util/Fst/BytesRefFSTEnum.cs @@ -1,4 +1,4 @@ -using System.Diagnostics; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Util.Fst { @@ -86,7 +86,7 @@ public BytesRefFSTEnum.InputOutput SeekExact(BytesRef target) m_targetLength = target.Length; if (base.DoSeekExact()) { - Debug.Assert(m_upto == 1 + target.Length); + Debugging.Assert(() => m_upto == 1 + target.Length); return SetResult(); } else diff --git a/src/Lucene.Net/Util/Fst/BytesStore.cs b/src/Lucene.Net/Util/Fst/BytesStore.cs index b05850c144..ee857d099a 100644 --- a/src/Lucene.Net/Util/Fst/BytesStore.cs +++ b/src/Lucene.Net/Util/Fst/BytesStore.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; using JCG = J2N.Collections.Generic; namespace Lucene.Net.Util.Fst @@ -131,7 +131,7 @@ public override void WriteBytes(byte[] b, int offset, int len) internal virtual void WriteBytes(long dest, byte[] b, int offset, int len) { //System.out.println(" BS.writeBytes dest=" + dest + " offset=" + offset + " len=" + len); - Debug.Assert(dest + len <= Position, "dest=" + dest + " pos=" + Position + " len=" + len); + Debugging.Assert(() => dest + len <= Position, () => "dest=" + dest + " pos=" + Position + " len=" + len); // Note: weird: must go "backwards" because copyBytes // calls us with overlapping src/dest. If we @@ -198,7 +198,7 @@ internal virtual void WriteBytes(long dest, byte[] b, int offset, int len) public virtual void CopyBytes(long src, long dest, int len) { //System.out.println("BS.copyBytes src=" + src + " dest=" + dest + " len=" + len); - Debug.Assert(src < dest); + Debugging.Assert(() => src < dest); // Note: weird: must go "backwards" because copyBytes // calls us with overlapping src/dest. If we @@ -287,8 +287,8 @@ public virtual void WriteInt32(long pos, int value) /// Reverse from , inclusive, to , inclusive. public virtual void Reverse(long srcPos, long destPos) { - Debug.Assert(srcPos < destPos); - Debug.Assert(destPos < Position); + Debugging.Assert(() => srcPos < destPos); + Debugging.Assert(() => destPos < Position); //System.out.println("reverse src=" + srcPos + " dest=" + destPos); int srcBlockIndex = (int)(srcPos >> blockBits); @@ -355,8 +355,8 @@ public virtual void SkipBytes(int len) /// public virtual void Truncate(long newLen) { - Debug.Assert(newLen <= Position); - Debug.Assert(newLen >= 0); + Debugging.Assert(() => newLen <= Position); + Debugging.Assert(() => newLen >= 0); int blockIndex = (int)(newLen >> blockBits); nextWrite = (int)(newLen & blockMask); if (nextWrite == 0) @@ -373,7 +373,7 @@ public virtual void Truncate(long newLen) { current = blocks[blockIndex]; } - Debug.Assert(newLen == Position); + Debugging.Assert(() => newLen == Position); } public virtual void Finish() @@ -469,7 +469,7 @@ public override long Position nextBuffer = bufferIndex + 1; current = outerInstance.blocks[bufferIndex]; nextRead = (int)(value & outerInstance.blockMask); - Debug.Assert(this.Position == value, "pos=" + value + " Position=" + this.Position); + Debugging.Assert(() => this.Position == value, () => "pos=" + value + " Position=" + this.Position); } } @@ -542,7 +542,7 @@ public override long Position nextBuffer = bufferIndex - 1; current = outerInstance.blocks[bufferIndex]; nextRead = (int)(value & outerInstance.blockMask); - Debug.Assert(this.Position == value, "value=" + value + " this.Position=" + this.Position); + Debugging.Assert(() => this.Position == value, () => "value=" + value + " this.Position=" + this.Position); } } diff --git a/src/Lucene.Net/Util/Fst/CharSequenceOutputs.cs b/src/Lucene.Net/Util/Fst/CharSequenceOutputs.cs index 52c9e0ad12..c38c7da21b 100644 --- a/src/Lucene.Net/Util/Fst/CharSequenceOutputs.cs +++ b/src/Lucene.Net/Util/Fst/CharSequenceOutputs.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; namespace Lucene.Net.Util.Fst { @@ -42,8 +42,8 @@ private CharSequenceOutputs() public override CharsRef Common(CharsRef output1, CharsRef output2) { - Debug.Assert(output1 != null); - Debug.Assert(output2 != null); + Debugging.Assert(() => output1 != null); + Debugging.Assert(() => output2 != null); int pos1 = output1.Offset; int pos2 = output2.Offset; @@ -81,8 +81,8 @@ public override CharsRef Common(CharsRef output1, CharsRef output2) public override CharsRef Subtract(CharsRef output, CharsRef inc) { - Debug.Assert(output != null); - Debug.Assert(inc != null); + Debugging.Assert(() => output != null); + Debugging.Assert(() => inc != null); if (inc == NO_OUTPUT) { // no prefix removed @@ -95,16 +95,16 @@ public override CharsRef Subtract(CharsRef output, CharsRef inc) } else { - Debug.Assert(inc.Length < output.Length, "inc.Length=" + inc.Length + " vs output.Length=" + output.Length); - Debug.Assert(inc.Length > 0); + Debugging.Assert(() => inc.Length < output.Length, () => "inc.Length=" + inc.Length + " vs output.Length=" + output.Length); + Debugging.Assert(() => inc.Length > 0); return new CharsRef(output.Chars, output.Offset + inc.Length, output.Length - inc.Length); } } public override CharsRef Add(CharsRef prefix, CharsRef output) { - Debug.Assert(prefix != null); - Debug.Assert(output != null); + Debugging.Assert(() => prefix != null); + Debugging.Assert(() => output != null); if (prefix == NO_OUTPUT) { return output; @@ -115,8 +115,8 @@ public override CharsRef Add(CharsRef prefix, CharsRef output) } else { - Debug.Assert(prefix.Length > 0); - Debug.Assert(output.Length > 0); + Debugging.Assert(() => prefix.Length > 0); + Debugging.Assert(() => output.Length > 0); var result = new CharsRef(prefix.Length + output.Length); Array.Copy(prefix.Chars, prefix.Offset, result.Chars, 0, prefix.Length); Array.Copy(output.Chars, output.Offset, result.Chars, prefix.Length, output.Length); @@ -127,7 +127,7 @@ public override CharsRef Add(CharsRef prefix, CharsRef output) public override void Write(CharsRef prefix, DataOutput @out) { - Debug.Assert(prefix != null); + Debugging.Assert(() => prefix != null); @out.WriteVInt32(prefix.Length); // TODO: maybe UTF8? for (int idx = 0; idx < prefix.Length; idx++) diff --git a/src/Lucene.Net/Util/Fst/FST.cs b/src/Lucene.Net/Util/Fst/FST.cs index a8ef46e37c..2eec5f70b0 100644 --- a/src/Lucene.Net/Util/Fst/FST.cs +++ b/src/Lucene.Net/Util/Fst/FST.cs @@ -1,11 +1,9 @@ using J2N.Collections; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Collections; using System.Collections.Generic; -using System.Diagnostics; using System.IO; -using System.Reflection; using System.Text; using JCG = J2N.Collections.Generic; @@ -379,8 +377,8 @@ private void CacheRootArcs() ReadRootArcs(cachedRootArcs); bool set = SetAssertingRootArcs(cachedRootArcs); - Debug.Assert(set); - Debug.Assert(AssertRootArcs()); + Debugging.Assert(() => set); + Debugging.Assert(AssertRootArcs); } public void ReadRootArcs(FST.Arc[] arcs) @@ -393,7 +391,7 @@ public void ReadRootArcs(FST.Arc[] arcs) ReadFirstRealTargetArc(arc.Target, arc, @in); while (true) { - Debug.Assert(arc.Label != FST.END_LABEL); + Debugging.Assert(() => arc.Label != FST.END_LABEL); if (arc.Label < cachedRootArcs.Length) { arcs[arc.Label] = (new FST.Arc()).CopyFrom(arc); @@ -420,37 +418,37 @@ private bool SetAssertingRootArcs(FST.Arc[] arcs) private bool AssertRootArcs() { - Debug.Assert(cachedRootArcs != null); - Debug.Assert(assertingCachedRootArcs != null); + Debugging.Assert(() => cachedRootArcs != null); + Debugging.Assert(() => assertingCachedRootArcs != null); for (int i = 0; i < cachedRootArcs.Length; i++) { FST.Arc root = cachedRootArcs[i]; FST.Arc asserting = assertingCachedRootArcs[i]; if (root != null) { - Debug.Assert(root.ArcIdx == asserting.ArcIdx); - Debug.Assert(root.BytesPerArc == asserting.BytesPerArc); - Debug.Assert(root.Flags == asserting.Flags); - Debug.Assert(root.Label == asserting.Label); - Debug.Assert(root.NextArc == asserting.NextArc); + Debugging.Assert(() => root.ArcIdx == asserting.ArcIdx); + Debugging.Assert(() => root.BytesPerArc == asserting.BytesPerArc); + Debugging.Assert(() => root.Flags == asserting.Flags); + Debugging.Assert(() => root.Label == asserting.Label); + Debugging.Assert(() => root.NextArc == asserting.NextArc); // LUCENENET NOTE: In .NET, IEnumerable will not equal another identical IEnumerable // because it checks for reference equality, not that the list contents // are the same. StructuralEqualityComparer.Default.Equals() will make that check. - Debug.Assert(typeof(T).IsValueType + Debugging.Assert(() => typeof(T).IsValueType ? JCG.EqualityComparer.Default.Equals(root.NextFinalOutput, asserting.NextFinalOutput) : StructuralEqualityComparer.Default.Equals(root.NextFinalOutput, asserting.NextFinalOutput)); - Debug.Assert(root.Node == asserting.Node); - Debug.Assert(root.NumArcs == asserting.NumArcs); - Debug.Assert(typeof(T).IsValueType + Debugging.Assert(() => root.Node == asserting.Node); + Debugging.Assert(() => root.NumArcs == asserting.NumArcs); + Debugging.Assert(() => typeof(T).IsValueType ? JCG.EqualityComparer.Default.Equals(root.Output, asserting.Output) : StructuralEqualityComparer.Default.Equals(root.Output, asserting.Output)); - Debug.Assert(root.PosArcsStart == asserting.PosArcsStart); - Debug.Assert(root.Target == asserting.Target); + Debugging.Assert(() => root.PosArcsStart == asserting.PosArcsStart); + Debugging.Assert(() => root.Target == asserting.Target); } else { - Debug.Assert(root == null && asserting == null); + Debugging.Assert(() => root == null && asserting == null); } } return true; @@ -584,15 +582,15 @@ public void Save(FileInfo file) // LUCENENET NOTE: static Read() was moved into the FST class private void WriteLabel(DataOutput @out, int v) { - Debug.Assert(v >= 0, "v=" + v); + Debugging.Assert(() => v >= 0, () => "v=" + v); if (inputType == FST.INPUT_TYPE.BYTE1) { - Debug.Assert(v <= 255, "v=" + v); + Debugging.Assert(() => v <= 255, () => "v=" + v); @out.WriteByte((byte)(sbyte)v); } else if (inputType == FST.INPUT_TYPE.BYTE2) { - Debug.Assert(v <= 65535, "v=" + v); + Debugging.Assert(() => v <= 65535, () => "v=" + v); @out.WriteInt16((short)v); } else @@ -696,7 +694,7 @@ internal long AddNode(Builder.UnCompiledNode nodeIn) } else { - Debug.Assert(arc.NextFinalOutput.Equals(NO_OUTPUT)); + Debugging.Assert(() => arc.NextFinalOutput.Equals(NO_OUTPUT)); } bool targetHasArcs = target.Node > 0; @@ -735,7 +733,7 @@ internal long AddNode(Builder.UnCompiledNode nodeIn) if (targetHasArcs && (flags & FST.BIT_TARGET_NEXT) == 0) { - Debug.Assert(target.Node > 0); + Debugging.Assert(() => target.Node > 0); //System.out.println(" write target"); bytes.WriteVInt64(target.Node); } @@ -775,7 +773,7 @@ internal long AddNode(Builder.UnCompiledNode nodeIn) if (doFixedArray) { const int MAX_HEADER_SIZE = 11; // header(byte) + numArcs(vint) + numBytes(vint) - Debug.Assert(maxBytesPerArc > 0); + Debugging.Assert(() => maxBytesPerArc > 0); // 2nd pass just "expands" all arcs to take up a fixed // byte size @@ -795,7 +793,7 @@ internal long AddNode(Builder.UnCompiledNode nodeIn) // expand the arcs in place, backwards long srcPos = bytes.Position; long destPos = fixedArrayStart + nodeIn.NumArcs * maxBytesPerArc; - Debug.Assert(destPos >= srcPos); + Debugging.Assert(() => destPos >= srcPos); if (destPos > srcPos) { bytes.SkipBytes((int)(destPos - srcPos)); @@ -807,7 +805,7 @@ internal long AddNode(Builder.UnCompiledNode nodeIn) if (srcPos != destPos) { //System.out.println(" copy len=" + bytesPerArc[arcIdx]); - Debug.Assert(destPos > srcPos, "destPos=" + destPos + " srcPos=" + srcPos + " arcIdx=" + arcIdx + " maxBytesPerArc=" + maxBytesPerArc + " bytesPerArc[arcIdx]=" + bytesPerArc[arcIdx] + " nodeIn.numArcs=" + nodeIn.NumArcs); + Debugging.Assert(() => destPos > srcPos, () => "destPos=" + destPos + " srcPos=" + srcPos + " arcIdx=" + arcIdx + " maxBytesPerArc=" + maxBytesPerArc + " bytesPerArc[arcIdx]=" + bytesPerArc[arcIdx] + " nodeIn.numArcs=" + nodeIn.NumArcs); bytes.CopyBytes(srcPos, destPos, bytesPerArc[arcIdx]); } } @@ -893,7 +891,7 @@ public FST.Arc ReadLastTargetArc(FST.Arc follow, FST.Arc arc, FST.Bytes if (!TargetHasArcs(follow)) { //System.out.println(" end node"); - Debug.Assert(follow.IsFinal); + Debugging.Assert(() => follow.IsFinal); arc.Label = FST.END_LABEL; arc.Target = FST.FINAL_END_NODE; arc.Output = follow.NextFinalOutput; @@ -960,7 +958,7 @@ public FST.Arc ReadLastTargetArc(FST.Arc follow, FST.Arc arc, FST.Bytes arc.NextArc = @in.Position; } ReadNextRealArc(arc, @in); - Debug.Assert(arc.IsLast); + Debugging.Assert(() => arc.IsLast); return arc; } } @@ -1094,7 +1092,7 @@ public FST.Arc ReadNextArc(FST.Arc arc, FST.BytesReader @in) /// public int ReadNextArcLabel(FST.Arc arc, FST.BytesReader @in) { - Debug.Assert(!arc.IsLast); + Debugging.Assert(() => !arc.IsLast); if (arc.Label == FST.END_LABEL) { @@ -1159,7 +1157,7 @@ public FST.Arc ReadNextRealArc(FST.Arc arc, FST.BytesReader @in) { // arcs are at fixed entries arc.ArcIdx++; - Debug.Assert(arc.ArcIdx < arc.NumArcs); + Debugging.Assert(() => arc.ArcIdx < arc.NumArcs); @in.Position = arc.PosArcsStart; @in.SkipBytes(arc.ArcIdx * arc.BytesPerArc); } @@ -1226,7 +1224,7 @@ public FST.Arc ReadNextRealArc(FST.Arc arc, FST.BytesReader @in) else { arc.Target = arc.Node - 1; - Debug.Assert(arc.Target > 0); + Debugging.Assert(() => arc.Target > 0); } } else @@ -1302,7 +1300,7 @@ public FST.Arc FindTargetArc(int labelToMatch, FST.Arc follow, FST.Arc { // LUCENE-5152: detect tricky cases where caller // modified previously returned cached root-arcs: - Debug.Assert(AssertRootArcs()); + Debugging.Assert(AssertRootArcs); FST.Arc result = cachedRootArcs[labelToMatch]; if (result == null) { @@ -1829,7 +1827,7 @@ internal FST Pack(int minInCountDeref, int maxDerefNodes, float acceptableOve } else { - Debug.Assert(arc.NextFinalOutput.Equals(NO_OUTPUT)); + Debugging.Assert(() => arc.NextFinalOutput.Equals(NO_OUTPUT)); } if (!TargetHasArcs(arc)) { @@ -1872,7 +1870,7 @@ internal FST Pack(int minInCountDeref, int maxDerefNodes, float acceptableOve absPtr = 0; } - Debug.Assert(flags != FST.ARCS_AS_FIXED_ARRAY); + Debugging.Assert(() => flags != FST.ARCS_AS_FIXED_ARRAY); writer.WriteByte((byte)(sbyte)flags); fst.WriteLabel(writer, arc.Label); @@ -1994,7 +1992,7 @@ internal FST Pack(int minInCountDeref, int maxDerefNodes, float acceptableOve // order) so nodes should only point forward to // other nodes because we only produce acyclic FSTs // w/ nodes only pointing "forwards": - Debug.Assert(!negDelta); + Debugging.Assert(() => !negDelta); //System.out.println("TOT wasted=" + totWasted); // Converged! break; @@ -2023,9 +2021,9 @@ internal FST Pack(int minInCountDeref, int maxDerefNodes, float acceptableOve fst.EmptyOutput = emptyOutput; } - Debug.Assert(fst.nodeCount == nodeCount, "fst.nodeCount=" + fst.nodeCount + " nodeCount=" + nodeCount); - Debug.Assert(fst.arcCount == arcCount); - Debug.Assert(fst.arcWithOutputCount == arcWithOutputCount, "fst.arcWithOutputCount=" + fst.arcWithOutputCount + " arcWithOutputCount=" + arcWithOutputCount); + Debugging.Assert(() => fst.nodeCount == nodeCount, () => "fst.nodeCount=" + fst.nodeCount + " nodeCount=" + nodeCount); + Debugging.Assert(() => fst.arcCount == arcCount); + Debugging.Assert(() => fst.arcWithOutputCount == arcWithOutputCount, () => "fst.arcWithOutputCount=" + fst.arcWithOutputCount + " arcWithOutputCount=" + arcWithOutputCount); fst.bytes.Finish(); fst.CacheRootArcs(); @@ -2340,7 +2338,7 @@ public NodeQueue(int topN) protected internal override bool LessThan(NodeAndInCount a, NodeAndInCount b) { int cmp = a.CompareTo(b); - Debug.Assert(cmp != 0); + Debugging.Assert(() => cmp != 0); return cmp < 0; } } diff --git a/src/Lucene.Net/Util/Fst/FSTEnum.cs b/src/Lucene.Net/Util/Fst/FSTEnum.cs index 8298fdf2d7..9dd3bc5516 100644 --- a/src/Lucene.Net/Util/Fst/FSTEnum.cs +++ b/src/Lucene.Net/Util/Fst/FSTEnum.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; namespace Lucene.Net.Util.Fst { @@ -199,8 +199,8 @@ protected virtual void DoSeekCeil() // Match arc.ArcIdx = mid - 1; m_fst.ReadNextRealArc(arc, @in); - Debug.Assert(arc.ArcIdx == mid); - Debug.Assert(arc.Label == targetLabel, "arc.label=" + arc.Label + " vs targetLabel=" + targetLabel + " mid=" + mid); + Debugging.Assert(() => arc.ArcIdx == mid); + Debugging.Assert(() => arc.Label == targetLabel, () => "arc.label=" + arc.Label + " vs targetLabel=" + targetLabel + " mid=" + mid); m_output[m_upto] = m_fst.Outputs.Add(m_output[m_upto - 1], arc.Output); if (targetLabel == FST.END_LABEL) { @@ -217,7 +217,7 @@ protected virtual void DoSeekCeil() // Dead end arc.ArcIdx = arc.NumArcs - 2; m_fst.ReadNextRealArc(arc, @in); - Debug.Assert(arc.IsLast); + Debugging.Assert(() => arc.IsLast); // Dead end (target is after the last arc); // rollback to last fork then push m_upto--; @@ -242,7 +242,7 @@ protected virtual void DoSeekCeil() { arc.ArcIdx = (low > high ? low : high) - 1; m_fst.ReadNextRealArc(arc, @in); - Debug.Assert(arc.Label > targetLabel); + Debugging.Assert(() => arc.Label > targetLabel); PushFirst(); return; } @@ -370,8 +370,8 @@ protected virtual void DoSeekFloor() //System.out.println(" match! arcIdx=" + mid); arc.ArcIdx = mid - 1; m_fst.ReadNextRealArc(arc, @in); - Debug.Assert(arc.ArcIdx == mid); - Debug.Assert(arc.Label == targetLabel, "arc.label=" + arc.Label + " vs targetLabel=" + targetLabel + " mid=" + mid); + Debugging.Assert(() => arc.ArcIdx == mid); + Debugging.Assert(() => arc.Label == targetLabel, () => "arc.label=" + arc.Label + " vs targetLabel=" + targetLabel + " mid=" + mid); m_output[m_upto] = m_fst.Outputs.Add(m_output[m_upto - 1], arc.Output); if (targetLabel == FST.END_LABEL) { @@ -426,8 +426,8 @@ protected virtual void DoSeekFloor() // LUCNENET specific: We don't want the ReadNextArcLabel call to be // excluded when Debug.Assert is stripped out by the compiler. bool check = arc.IsLast || m_fst.ReadNextArcLabel(arc, @in) > targetLabel; - Debug.Assert(check); - Debug.Assert(arc.Label < targetLabel, "arc.label=" + arc.Label + " vs targetLabel=" + targetLabel); + Debugging.Assert(() => check); + Debugging.Assert(() => arc.Label < targetLabel, () => "arc.label=" + arc.Label + " vs targetLabel=" + targetLabel); PushLast(); return; } @@ -574,7 +574,7 @@ private void Incr() private void PushFirst() { FST.Arc arc = m_arcs[m_upto]; - Debug.Assert(arc != null); + Debugging.Assert(() => arc != null); while (true) { @@ -601,7 +601,7 @@ private void PushFirst() private void PushLast() { FST.Arc arc = m_arcs[m_upto]; - Debug.Assert(arc != null); + Debugging.Assert(() => arc != null); while (true) { diff --git a/src/Lucene.Net/Util/Fst/IntSequenceOutputs.cs b/src/Lucene.Net/Util/Fst/IntSequenceOutputs.cs index 935d2b18aa..7bfed37fb4 100644 --- a/src/Lucene.Net/Util/Fst/IntSequenceOutputs.cs +++ b/src/Lucene.Net/Util/Fst/IntSequenceOutputs.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; namespace Lucene.Net.Util.Fst { @@ -44,8 +44,8 @@ private Int32SequenceOutputs() public override Int32sRef Common(Int32sRef output1, Int32sRef output2) { - Debug.Assert(output1 != null); - Debug.Assert(output2 != null); + Debugging.Assert(() => output1 != null); + Debugging.Assert(() => output2 != null); int pos1 = output1.Offset; int pos2 = output2.Offset; @@ -83,8 +83,8 @@ public override Int32sRef Common(Int32sRef output1, Int32sRef output2) public override Int32sRef Subtract(Int32sRef output, Int32sRef inc) { - Debug.Assert(output != null); - Debug.Assert(inc != null); + Debugging.Assert(() => output != null); + Debugging.Assert(() => inc != null); if (inc == NO_OUTPUT) { // no prefix removed @@ -97,16 +97,16 @@ public override Int32sRef Subtract(Int32sRef output, Int32sRef inc) } else { - Debug.Assert(inc.Length < output.Length, "inc.length=" + inc.Length + " vs output.length=" + output.Length); - Debug.Assert(inc.Length > 0); + Debugging.Assert(() => inc.Length < output.Length, () => "inc.length=" + inc.Length + " vs output.length=" + output.Length); + Debugging.Assert(() => inc.Length > 0); return new Int32sRef(output.Int32s, output.Offset + inc.Length, output.Length - inc.Length); } } public override Int32sRef Add(Int32sRef prefix, Int32sRef output) { - Debug.Assert(prefix != null); - Debug.Assert(output != null); + Debugging.Assert(() => prefix != null); + Debugging.Assert(() => output != null); if (prefix == NO_OUTPUT) { return output; @@ -117,8 +117,8 @@ public override Int32sRef Add(Int32sRef prefix, Int32sRef output) } else { - Debug.Assert(prefix.Length > 0); - Debug.Assert(output.Length > 0); + Debugging.Assert(() => prefix.Length > 0); + Debugging.Assert(() => output.Length > 0); Int32sRef result = new Int32sRef(prefix.Length + output.Length); Array.Copy(prefix.Int32s, prefix.Offset, result.Int32s, 0, prefix.Length); Array.Copy(output.Int32s, output.Offset, result.Int32s, prefix.Length, output.Length); @@ -129,7 +129,7 @@ public override Int32sRef Add(Int32sRef prefix, Int32sRef output) public override void Write(Int32sRef prefix, DataOutput @out) { - Debug.Assert(prefix != null); + Debugging.Assert(() => prefix != null); @out.WriteVInt32(prefix.Length); for (int idx = 0; idx < prefix.Length; idx++) { diff --git a/src/Lucene.Net/Util/Fst/IntsRefFSTEnum.cs b/src/Lucene.Net/Util/Fst/IntsRefFSTEnum.cs index 03c83b738a..8d57075daa 100644 --- a/src/Lucene.Net/Util/Fst/IntsRefFSTEnum.cs +++ b/src/Lucene.Net/Util/Fst/IntsRefFSTEnum.cs @@ -1,4 +1,4 @@ -using System.Diagnostics; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Util.Fst { @@ -88,7 +88,7 @@ public Int32sRefFSTEnum.InputOutput SeekExact(Int32sRef target) m_targetLength = target.Length; if (base.DoSeekExact()) { - Debug.Assert(m_upto == 1 + target.Length); + Debugging.Assert(() => m_upto == 1 + target.Length); return SetResult(); } else diff --git a/src/Lucene.Net/Util/Fst/NoOutputs.cs b/src/Lucene.Net/Util/Fst/NoOutputs.cs index 9a36d82a80..33bb13754c 100644 --- a/src/Lucene.Net/Util/Fst/NoOutputs.cs +++ b/src/Lucene.Net/Util/Fst/NoOutputs.cs @@ -1,4 +1,4 @@ -using System.Diagnostics; +using Lucene.Net.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Fst @@ -64,30 +64,30 @@ private NoOutputs() public override object Common(object output1, object output2) { - Debug.Assert(output1 == NO_OUTPUT); - Debug.Assert(output2 == NO_OUTPUT); + Debugging.Assert(() => output1 == NO_OUTPUT); + Debugging.Assert(() => output2 == NO_OUTPUT); return NO_OUTPUT; } public override object Subtract(object output, object inc) { - Debug.Assert(output == NO_OUTPUT); - Debug.Assert(inc == NO_OUTPUT); + Debugging.Assert(() => output == NO_OUTPUT); + Debugging.Assert(() => inc == NO_OUTPUT); return NO_OUTPUT; } public override object Add(object prefix, object output) { - Debug.Assert(prefix == NO_OUTPUT, "got " + prefix); - Debug.Assert(output == NO_OUTPUT); + Debugging.Assert(() => prefix == NO_OUTPUT, () => "got " + prefix); + Debugging.Assert(() => output == NO_OUTPUT); return NO_OUTPUT; } [MethodImpl(MethodImplOptions.NoInlining)] public override object Merge(object first, object second) { - Debug.Assert(first == NO_OUTPUT); - Debug.Assert(second == NO_OUTPUT); + Debugging.Assert(() => first == NO_OUTPUT); + Debugging.Assert(() => second == NO_OUTPUT); return NO_OUTPUT; } diff --git a/src/Lucene.Net/Util/Fst/NodeHash.cs b/src/Lucene.Net/Util/Fst/NodeHash.cs index 996835575b..ad402d0ff0 100644 --- a/src/Lucene.Net/Util/Fst/NodeHash.cs +++ b/src/Lucene.Net/Util/Fst/NodeHash.cs @@ -1,6 +1,5 @@ using J2N.Collections; -using System.Diagnostics; -using System.Reflection; +using Lucene.Net.Diagnostics; using JCG = J2N.Collections.Generic; namespace Lucene.Net.Util.Fst @@ -164,7 +163,7 @@ public long Add(Builder.UnCompiledNode nodeIn) long node = fst.AddNode(nodeIn); //System.out.println(" now freeze node=" + node); long hashNode = Hash(node); - Debug.Assert(hashNode == h, "frozenHash=" + hashNode + " vs h=" + h); + Debugging.Assert(() => hashNode == h, () => "frozenHash=" + hashNode + " vs h=" + h); count++; table.Set(pos, node); // Rehash at 2/3 occupancy: diff --git a/src/Lucene.Net/Util/Fst/PairOutputs.cs b/src/Lucene.Net/Util/Fst/PairOutputs.cs index e2796af516..c9eb877259 100644 --- a/src/Lucene.Net/Util/Fst/PairOutputs.cs +++ b/src/Lucene.Net/Util/Fst/PairOutputs.cs @@ -1,4 +1,4 @@ -using System.Diagnostics; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Util.Fst { @@ -86,7 +86,7 @@ public virtual Pair NewPair(A a, B b) else { var p = new Pair(a, b); - Debug.Assert(Valid(p)); + Debugging.Assert(() => Valid(p)); return p; } } @@ -126,28 +126,28 @@ private bool Valid(Pair pair) public override Pair Common(Pair pair1, Pair pair2) { - Debug.Assert(Valid(pair1)); - Debug.Assert(Valid(pair2)); + Debugging.Assert(() => Valid(pair1)); + Debugging.Assert(() => Valid(pair2)); return NewPair(outputs1.Common(pair1.Output1, pair2.Output1), outputs2.Common(pair1.Output2, pair2.Output2)); } public override Pair Subtract(Pair output, Pair inc) { - Debug.Assert(Valid(output)); - Debug.Assert(Valid(inc)); + Debugging.Assert(() => Valid(output)); + Debugging.Assert(() => Valid(inc)); return NewPair(outputs1.Subtract(output.Output1, inc.Output1), outputs2.Subtract(output.Output2, inc.Output2)); } public override Pair Add(Pair prefix, Pair output) { - Debug.Assert(Valid(prefix)); - Debug.Assert(Valid(output)); + Debugging.Assert(() => Valid(prefix)); + Debugging.Assert(() => Valid(output)); return NewPair(outputs1.Add(prefix.Output1, output.Output1), outputs2.Add(prefix.Output2, output.Output2)); } public override void Write(Pair output, DataOutput writer) { - Debug.Assert(Valid(output)); + Debugging.Assert(() => Valid(output)); outputs1.Write(output.Output1, writer); outputs2.Write(output.Output2, writer); } @@ -163,7 +163,7 @@ public override Pair Read(DataInput @in) public override string OutputToString(Pair output) { - Debug.Assert(Valid(output)); + Debugging.Assert(() => Valid(output)); return ""; } diff --git a/src/Lucene.Net/Util/Fst/PositiveIntOutputs.cs b/src/Lucene.Net/Util/Fst/PositiveIntOutputs.cs index eb804a9e02..ea2cb101e5 100644 --- a/src/Lucene.Net/Util/Fst/PositiveIntOutputs.cs +++ b/src/Lucene.Net/Util/Fst/PositiveIntOutputs.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; namespace Lucene.Net.Util.Fst { @@ -45,25 +45,25 @@ private PositiveInt32Outputs() public override long? Common(long? output1, long? output2) { - Debug.Assert(Valid(output1)); - Debug.Assert(Valid(output2)); + Debugging.Assert(() => Valid(output1)); + Debugging.Assert(() => Valid(output2)); if (output1 == NO_OUTPUT || output2 == NO_OUTPUT) { return NO_OUTPUT; } else { - Debug.Assert(output1 > 0); - Debug.Assert(output2 > 0); + Debugging.Assert(() => output1 > 0); + Debugging.Assert(() => output2 > 0); return Math.Min(output1.Value, output2.Value); } } public override long? Subtract(long? output, long? inc) { - Debug.Assert(Valid(output)); - Debug.Assert(Valid(inc)); - Debug.Assert(output >= inc); + Debugging.Assert(() => Valid(output)); + Debugging.Assert(() => Valid(inc)); + Debugging.Assert(() => output >= inc); if (inc == NO_OUTPUT) { @@ -81,8 +81,8 @@ private PositiveInt32Outputs() public override long? Add(long? prefix, long? output) { - Debug.Assert(Valid(prefix)); - Debug.Assert(Valid(output)); + Debugging.Assert(() => Valid(prefix)); + Debugging.Assert(() => Valid(output)); if (prefix == NO_OUTPUT) { return output; @@ -99,7 +99,7 @@ private PositiveInt32Outputs() public override void Write(long? output, DataOutput @out) { - Debug.Assert(Valid(output)); + Debugging.Assert(() => Valid(output)); @out.WriteVInt64(output.Value); } @@ -118,8 +118,8 @@ public override void Write(long? output, DataOutput @out) private bool Valid(long? o) { - Debug.Assert(o != null, "PositiveIntOutput precondition fail"); - Debug.Assert(o == NO_OUTPUT || o > 0, "o=" + o); + Debugging.Assert(() => o != null, () => "PositiveIntOutput precondition fail"); + Debugging.Assert(() => o == NO_OUTPUT || o > 0, () => "o=" + o); return true; } diff --git a/src/Lucene.Net/Util/Fst/Util.cs b/src/Lucene.Net/Util/Fst/Util.cs index 2f3920bc5d..a924b55924 100644 --- a/src/Lucene.Net/Util/Fst/Util.cs +++ b/src/Lucene.Net/Util/Fst/Util.cs @@ -1,8 +1,8 @@ using J2N; using J2N.Text; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; using System.IO; using BitSet = Lucene.Net.Util.OpenBitSet; @@ -74,7 +74,7 @@ public static T Get(FST fst, Int32sRef input) /// public static T Get(FST fst, BytesRef input) { - Debug.Assert(fst.InputType == FST.INPUT_TYPE.BYTE1); + Debugging.Assert(() => fst.InputType == FST.INPUT_TYPE.BYTE1); var fstReader = fst.GetBytesReader(); @@ -385,7 +385,7 @@ public TopNSearcher(FST fst, int topN, int maxQueueDepth, IComparer compar /// protected virtual void AddIfCompetitive(FSTPath path) { - Debug.Assert(queue != null); + Debugging.Assert(() => queue != null); T cost = fst.Outputs.Add(path.Cost, path.Arc.Output); //System.out.println(" addIfCompetitive queue.size()=" + queue.size() + " path=" + path + " + label=" + path.arc.label); @@ -408,7 +408,7 @@ protected virtual void AddIfCompetitive(FSTPath path) path.Input.Length--; // We should never see dups: - Debug.Assert(cmp != 0); + Debugging.Assert(() => cmp != 0); if (cmp < 0) { @@ -596,7 +596,7 @@ public virtual TopResults Search() fst.ReadNextArc(path.Arc, fstReader); } - Debug.Assert(foundZero); + Debugging.Assert(() => foundZero); if (queue != null) { @@ -917,7 +917,7 @@ public static void ToDot(FST fst, TextWriter @out, bool sameRank, bool lab arcColor = "black"; } - Debug.Assert(arc.Label != FST.END_LABEL); + Debugging.Assert(() => arc.Label != FST.END_LABEL); @out.Write(" " + node + " -> " + arc.Target + " [label=\"" + PrintableLabel(arc.Label) + outs + "\"" + (arc.IsFinal ? " style=\"bold\"" : "") + " color=\"" + arcColor + "\"]\n"); // Break the loop if we're on the last arc of this state. @@ -1068,7 +1068,7 @@ public static BytesRef ToBytesRef(Int32sRef input, BytesRef scratch) { int value = input.Int32s[i + input.Offset]; // NOTE: we allow -128 to 255 - Debug.Assert(value >= sbyte.MinValue && value <= 255, "value " + value + " doesn't fit into byte"); + Debugging.Assert(() => value >= sbyte.MinValue && value <= 255, () => "value " + value + " doesn't fit into byte"); scratch.Bytes[i] = (byte)value; } scratch.Length = input.Length; diff --git a/src/Lucene.Net/Util/IndexableBinaryStringTools.cs b/src/Lucene.Net/Util/IndexableBinaryStringTools.cs index 209e4aade7..43f17972f8 100644 --- a/src/Lucene.Net/Util/IndexableBinaryStringTools.cs +++ b/src/Lucene.Net/Util/IndexableBinaryStringTools.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; namespace Lucene.Net.Util { @@ -137,7 +137,7 @@ public static void Encode(byte[] inputArray, int inputOffset, int inputLength, c [CLSCompliant(false)] public static void Encode(sbyte[] inputArray, int inputOffset, int inputLength, char[] outputArray, int outputOffset, int outputLength) { - Debug.Assert(outputLength == GetEncodedLength(inputArray, inputOffset, inputLength)); + Debugging.Assert(() => outputLength == GetEncodedLength(inputArray, inputOffset, inputLength)); if (inputLength > 0) { int inputByteNum = inputOffset; @@ -217,7 +217,7 @@ public static void Decode(char[] inputArray, int inputOffset, int inputLength, b [CLSCompliant(false)] public static void Decode(char[] inputArray, int inputOffset, int inputLength, sbyte[] outputArray, int outputOffset, int outputLength) { - Debug.Assert(outputLength == GetDecodedLength(inputArray, inputOffset, inputLength)); + Debugging.Assert(() => outputLength == GetDecodedLength(inputArray, inputOffset, inputLength)); int numInputChars = inputLength - 1; int numOutputBytes = outputLength; diff --git a/src/Lucene.Net/Util/InfoStream.cs b/src/Lucene.Net/Util/InfoStream.cs index 5d9beaa7b7..9724d44092 100644 --- a/src/Lucene.Net/Util/InfoStream.cs +++ b/src/Lucene.Net/Util/InfoStream.cs @@ -1,5 +1,5 @@ using System; -using System.Diagnostics; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Util { @@ -42,7 +42,7 @@ private sealed class NoOutput : InfoStream { public override void Message(string component, string message) { - Debug.Assert(false, "message() should not be called when isEnabled returns false"); + Debugging.Assert(() => false, () => "message() should not be called when isEnabled returns false"); } public override bool IsEnabled(string component) diff --git a/src/Lucene.Net/Util/IntBlockPool.cs b/src/Lucene.Net/Util/IntBlockPool.cs index e621a64d07..54045808ec 100644 --- a/src/Lucene.Net/Util/IntBlockPool.cs +++ b/src/Lucene.Net/Util/IntBlockPool.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; using System.Diagnostics.CodeAnalysis; namespace Lucene.Net.Util @@ -243,7 +243,7 @@ private int NewSlice(int size) if (Int32Upto > INT32_BLOCK_SIZE - size) { NextBuffer(); - Debug.Assert(AssertSliceBuffer(buffer)); + Debugging.Assert(() => AssertSliceBuffer(buffer)); } int upto = Int32Upto; @@ -292,7 +292,7 @@ private int AllocSlice(int[] slice, int sliceOffset) if (Int32Upto > INT32_BLOCK_SIZE - newSize) { NextBuffer(); - Debug.Assert(AssertSliceBuffer(buffer)); + Debugging.Assert(() => AssertSliceBuffer(buffer)); } int newUpto = Int32Upto; @@ -337,7 +337,7 @@ public virtual void Reset(int sliceOffset) public virtual void WriteInt32(int value) { int[] ints = pool.buffers[offset >> INT32_BLOCK_SHIFT]; - Debug.Assert(ints != null); + Debugging.Assert(() => ints != null); int relativeOffset = offset & INT32_BLOCK_MASK; if (ints[relativeOffset] != 0) { @@ -427,7 +427,7 @@ public bool IsEndOfSlice { get { - Debug.Assert(upto + bufferOffset <= end); + Debugging.Assert(() => upto + bufferOffset <= end); return upto + bufferOffset == end; } } @@ -440,8 +440,8 @@ public bool IsEndOfSlice /// public int ReadInt32() { - Debug.Assert(!IsEndOfSlice); - Debug.Assert(upto <= limit); + Debugging.Assert(() => !IsEndOfSlice); + Debugging.Assert(() => upto <= limit); if (upto == limit) { NextSlice(); @@ -465,7 +465,7 @@ private void NextSlice() if (nextIndex + newSize >= end) { // We are advancing to the final slice - Debug.Assert(end - nextIndex > 0); + Debugging.Assert(() => end - nextIndex > 0); limit = end - bufferOffset; } else diff --git a/src/Lucene.Net/Util/IntsRef.cs b/src/Lucene.Net/Util/IntsRef.cs index 2d1d5c56f6..3a3b6df5ec 100644 --- a/src/Lucene.Net/Util/IntsRef.cs +++ b/src/Lucene.Net/Util/IntsRef.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Text; @@ -101,7 +101,7 @@ public Int32sRef(int[] ints, int offset, int length) this.ints = ints; this.Offset = offset; this.Length = length; - Debug.Assert(IsValid()); + Debugging.Assert(IsValid); } /// @@ -222,7 +222,7 @@ public void CopyInt32s(Int32sRef other) /// public void Grow(int newLength) { - Debug.Assert(Offset == 0); + Debugging.Assert(() => Offset == 0); if (ints.Length < newLength) { ints = ArrayUtil.Grow(ints, newLength); diff --git a/src/Lucene.Net/Util/LongBitSet.cs b/src/Lucene.Net/Util/LongBitSet.cs index 1535b534f4..7c6d4131d6 100644 --- a/src/Lucene.Net/Util/LongBitSet.cs +++ b/src/Lucene.Net/Util/LongBitSet.cs @@ -1,7 +1,7 @@ using J2N.Numerics; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; namespace Lucene.Net.Util { @@ -122,7 +122,7 @@ public long Cardinality() public bool Get(long index) { - Debug.Assert(index >= 0 && index < numBits, "index=" + index); + Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index); int i = (int)(index >> 6); // div 64 // signed shift will keep a negative index and force an // array-index-out-of-bounds-exception, removing the need for an explicit check. @@ -133,7 +133,7 @@ public bool Get(long index) public void Set(long index) { - Debug.Assert(index >= 0 && index < numBits, "index=" + index + " numBits=" + numBits); + Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index + " numBits=" + numBits); int wordNum = (int)(index >> 6); // div 64 int bit = (int)(index & 0x3f); // mod 64 long bitmask = 1L << bit; @@ -142,7 +142,7 @@ public void Set(long index) public bool GetAndSet(long index) { - Debug.Assert(index >= 0 && index < numBits); + Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = (int)(index >> 6); // div 64 int bit = (int)(index & 0x3f); // mod 64 long bitmask = 1L << bit; @@ -153,7 +153,7 @@ public bool GetAndSet(long index) public void Clear(long index) { - Debug.Assert(index >= 0 && index < numBits); + Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = (int)(index >> 6); int bit = (int)(index & 0x03f); long bitmask = 1L << bit; @@ -162,7 +162,7 @@ public void Clear(long index) public bool GetAndClear(long index) { - Debug.Assert(index >= 0 && index < numBits); + Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = (int)(index >> 6); // div 64 int bit = (int)(index & 0x3f); // mod 64 long bitmask = 1L << bit; @@ -177,7 +177,7 @@ public bool GetAndClear(long index) /// public long NextSetBit(long index) { - Debug.Assert(index >= 0 && index < numBits); + Debugging.Assert(() => index >= 0 && index < numBits); int i = (int)(index >> 6); int subIndex = (int)(index & 0x3f); // index within the word long word = bits[i] >> subIndex; // skip all the bits to the right of index @@ -205,7 +205,7 @@ public long NextSetBit(long index) /// public long PrevSetBit(long index) { - Debug.Assert(index >= 0 && index < numBits, "index=" + index + " numBits=" + numBits); + Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index + " numBits=" + numBits); int i = (int)(index >> 6); int subIndex = (int)(index & 0x3f); // index within the word long word = (bits[i] << (63 - subIndex)); // skip all the bits to the left of index @@ -231,7 +231,7 @@ public long PrevSetBit(long index) /// this = this OR other public void Or(Int64BitSet other) { - Debug.Assert(other.numWords <= numWords, "numWords=" + numWords + ", other.numWords=" + other.numWords); + Debugging.Assert(() => other.numWords <= numWords, () => "numWords=" + numWords + ", other.numWords=" + other.numWords); int pos = Math.Min(numWords, other.numWords); while (--pos >= 0) { @@ -243,7 +243,7 @@ public void Or(Int64BitSet other) /// this = this XOR other public void Xor(Int64BitSet other) { - Debug.Assert(other.numWords <= numWords, "numWords=" + numWords + ", other.numWords=" + other.numWords); + Debugging.Assert(() => other.numWords <= numWords, () => "numWords=" + numWords + ", other.numWords=" + other.numWords); int pos = Math.Min(numWords, other.numWords); while (--pos >= 0) { @@ -303,8 +303,8 @@ public void AndNot(Int64BitSet other) /// One-past the last bit to flip public void Flip(long startIndex, long endIndex) { - Debug.Assert(startIndex >= 0 && startIndex < numBits); - Debug.Assert(endIndex >= 0 && endIndex <= numBits); + Debugging.Assert(() => startIndex >= 0 && startIndex < numBits); + Debugging.Assert(() => endIndex >= 0 && endIndex <= numBits); if (endIndex <= startIndex) { return; @@ -348,8 +348,8 @@ public void Flip(long startIndex, long endIndex) /// One-past the last bit to set public void Set(long startIndex, long endIndex) { - Debug.Assert(startIndex >= 0 && startIndex < numBits); - Debug.Assert(endIndex >= 0 && endIndex <= numBits); + Debugging.Assert(() => startIndex >= 0 && startIndex < numBits); + Debugging.Assert(() => endIndex >= 0 && endIndex <= numBits); if (endIndex <= startIndex) { return; @@ -379,8 +379,8 @@ public void Set(long startIndex, long endIndex) /// One-past the last bit to clear public void Clear(long startIndex, long endIndex) { - Debug.Assert(startIndex >= 0 && startIndex < numBits); - Debug.Assert(endIndex >= 0 && endIndex <= numBits); + Debugging.Assert(() => startIndex >= 0 && startIndex < numBits); + Debugging.Assert(() => endIndex >= 0 && endIndex <= numBits); if (endIndex <= startIndex) { return; diff --git a/src/Lucene.Net/Util/LongsRef.cs b/src/Lucene.Net/Util/LongsRef.cs index 65c885ce6a..13946c8637 100644 --- a/src/Lucene.Net/Util/LongsRef.cs +++ b/src/Lucene.Net/Util/LongsRef.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Text; @@ -101,7 +101,7 @@ public Int64sRef(long[] longs, int offset, int length) this.longs = longs; this.Offset = offset; this.Length = length; - Debug.Assert(IsValid()); + Debugging.Assert(IsValid); } /// @@ -222,7 +222,7 @@ public void CopyInt64s(Int64sRef other) /// public void Grow(int newLength) { - Debug.Assert(Offset == 0); + Debugging.Assert(() => Offset == 0); if (longs.Length < newLength) { longs = ArrayUtil.Grow(longs, newLength); diff --git a/src/Lucene.Net/Util/MergedIterator.cs b/src/Lucene.Net/Util/MergedIterator.cs index 6165b50270..87696a743a 100644 --- a/src/Lucene.Net/Util/MergedIterator.cs +++ b/src/Lucene.Net/Util/MergedIterator.cs @@ -1,7 +1,7 @@ using J2N.Text; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Util { @@ -113,7 +113,7 @@ public void Dispose() private void PullTop() { - Debug.Assert(numTop == 0); + Debugging.Assert(() => numTop == 0); top[numTop++] = queue.Pop(); if (removeDuplicates) { diff --git a/src/Lucene.Net/Util/OfflineSorter.cs b/src/Lucene.Net/Util/OfflineSorter.cs index ce15231f4b..f2bd12ecb2 100644 --- a/src/Lucene.Net/Util/OfflineSorter.cs +++ b/src/Lucene.Net/Util/OfflineSorter.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Store; using Lucene.Net.Support.IO; using System; @@ -368,7 +369,7 @@ private FileInfo SortPartition(/*int len*/) // LUCENENET NOTE: made private, sin IBytesRefIterator iter = buffer.GetIterator(comparer); while ((spare = iter.Next()) != null) { - Debug.Assert(spare.Length <= ushort.MaxValue); + Debugging.Assert(() => spare.Length <= ushort.MaxValue); @out.Write(spare); } } @@ -533,7 +534,7 @@ private static BinaryWriterDataOutput NewBinaryWriterDataOutput(FileInfo file) /// public virtual void Write(BytesRef @ref) { - Debug.Assert(@ref != null); + Debugging.Assert(() => @ref != null); Write(@ref.Bytes, @ref.Offset, @ref.Length); } @@ -553,9 +554,9 @@ public virtual void Write(byte[] bytes) /// public virtual void Write(byte[] bytes, int off, int len) { - Debug.Assert(bytes != null); - Debug.Assert(off >= 0 && off + len <= bytes.Length); - Debug.Assert(len >= 0); + Debugging.Assert(() => bytes != null); + Debugging.Assert(() => off >= 0 && off + len <= bytes.Length); + Debugging.Assert(() => len >= 0); os.WriteInt16((short)len); os.WriteBytes(bytes, off, len); // LUCENENET NOTE: We call WriteBytes, since there is no Write() on Lucene's version of DataOutput } @@ -650,7 +651,7 @@ public virtual byte[] Read() } #pragma warning restore CA1031 // Do not catch general exception types - Debug.Assert(length >= 0, "Sanity: sequence length < 0: " + length); + Debugging.Assert(() => length >= 0, () => "Sanity: sequence length < 0: " + length); byte[] result = new byte[length]; inputStream.ReadBytes(result, 0, length); return result; diff --git a/src/Lucene.Net/Util/OpenBitSet.cs b/src/Lucene.Net/Util/OpenBitSet.cs index 0b5a58cbc1..4891078755 100644 --- a/src/Lucene.Net/Util/OpenBitSet.cs +++ b/src/Lucene.Net/Util/OpenBitSet.cs @@ -1,7 +1,7 @@ using J2N.Numerics; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; namespace Lucene.Net.Util { @@ -200,7 +200,7 @@ public virtual bool Get(int index) /// public virtual bool FastGet(int index) { - Debug.Assert(index >= 0 && index < numBits); + Debugging.Assert(() => index >= 0 && index < numBits); int i = index >> 6; // div 64 // signed shift will keep a negative index and force an // array-index-out-of-bounds-exception, removing the need for an explicit check. @@ -230,7 +230,7 @@ public virtual bool Get(long index) /// public virtual bool FastGet(long index) { - Debug.Assert(index >= 0 && index < numBits); + Debugging.Assert(() => index >= 0 && index < numBits); int i = (int)(index >> 6); // div 64 int bit = (int)index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -255,7 +255,7 @@ public boolean get1(int index) { /// public virtual int GetBit(int index) { - Debug.Assert(index >= 0 && index < numBits); + Debugging.Assert(() => index >= 0 && index < numBits); int i = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 return ((int)((long)((ulong)m_bits[i] >> bit))) & 0x01; @@ -286,7 +286,7 @@ public virtual void Set(long index) /// public virtual void FastSet(int index) { - Debug.Assert(index >= 0 && index < numBits); + Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -299,7 +299,7 @@ public virtual void FastSet(int index) /// public virtual void FastSet(long index) { - Debug.Assert(index >= 0 && index < numBits); + Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = (int)(index >> 6); int bit = (int)index & 0x3f; long bitmask = 1L << bit; @@ -354,7 +354,7 @@ protected virtual int ExpandingWordNum(long index) /// public virtual void FastClear(int index) { - Debug.Assert(index >= 0 && index < numBits); + Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = index >> 6; int bit = index & 0x03f; long bitmask = 1L << bit; @@ -374,7 +374,7 @@ public virtual void FastClear(int index) /// public virtual void FastClear(long index) { - Debug.Assert(index >= 0 && index < numBits); + Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = (int)(index >> 6); // div 64 int bit = (int)index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -493,7 +493,7 @@ public virtual void Clear(long startIndex, long endIndex) /// public virtual bool GetAndSet(int index) { - Debug.Assert(index >= 0 && index < numBits); + Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -508,7 +508,7 @@ public virtual bool GetAndSet(int index) /// public virtual bool GetAndSet(long index) { - Debug.Assert(index >= 0 && index < numBits); + Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = (int)(index >> 6); // div 64 int bit = (int)index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -523,7 +523,7 @@ public virtual bool GetAndSet(long index) /// public virtual void FastFlip(int index) { - Debug.Assert(index >= 0 && index < numBits); + Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -536,7 +536,7 @@ public virtual void FastFlip(int index) /// public virtual void FastFlip(long index) { - Debug.Assert(index >= 0 && index < numBits); + Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = (int)(index >> 6); // div 64 int bit = (int)index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -559,7 +559,7 @@ public virtual void Flip(long index) /// public virtual bool FlipAndGet(int index) { - Debug.Assert(index >= 0 && index < numBits); + Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -573,7 +573,7 @@ public virtual bool FlipAndGet(int index) /// public virtual bool FlipAndGet(long index) { - Debug.Assert(index >= 0 && index < numBits); + Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = (int)(index >> 6); // div 64 int bit = (int)index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -913,7 +913,7 @@ public virtual void Union(OpenBitSet other) // https://github.com/apache/lucenenet/pull/154 int oldLen = m_wlen; EnsureCapacityWords(newLen); - Debug.Assert((numBits = Math.Max(other.numBits, numBits)) >= 0); + Debugging.Assert(() => (numBits = Math.Max(other.numBits, numBits)) >= 0); long[] thisArr = this.m_bits; long[] otherArr = other.m_bits; @@ -953,7 +953,7 @@ public virtual void Xor(OpenBitSet other) // https://github.com/apache/lucenenet/pull/154 int oldLen = m_wlen; EnsureCapacityWords(newLen); - Debug.Assert((numBits = Math.Max(other.numBits, numBits)) >= 0); + Debugging.Assert(() => (numBits = Math.Max(other.numBits, numBits)) >= 0); long[] thisArr = this.m_bits; long[] otherArr = other.m_bits; @@ -1011,7 +1011,7 @@ public virtual void EnsureCapacityWords(int numWords) { m_bits = ArrayUtil.Grow(m_bits, numWords); m_wlen = numWords; - Debug.Assert((this.numBits = Math.Max(this.numBits, numWords << 6)) >= 0); + Debugging.Assert(() => (this.numBits = Math.Max(this.numBits, numWords << 6)) >= 0); } /// @@ -1023,7 +1023,7 @@ public virtual void EnsureCapacity(long numBits) EnsureCapacityWords(Bits2words(numBits)); // ensureCapacityWords sets numBits to a multiple of 64, but we want to set // it to exactly what the app asked. - Debug.Assert((this.numBits = Math.Max(this.numBits, numBits)) >= 0); + Debugging.Assert(() => (this.numBits = Math.Max(this.numBits, numBits)) >= 0); } /// diff --git a/src/Lucene.Net/Util/PForDeltaDocIdSet.cs b/src/Lucene.Net/Util/PForDeltaDocIdSet.cs index af07284293..2c5c5f19db 100644 --- a/src/Lucene.Net/Util/PForDeltaDocIdSet.cs +++ b/src/Lucene.Net/Util/PForDeltaDocIdSet.cs @@ -1,7 +1,7 @@ using J2N.Numerics; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; namespace Lucene.Net.Util { @@ -63,7 +63,7 @@ static PForDeltaDocIdSet() for (int i = 1; i < ITERATIONS.Length; ++i) { DECODERS[i] = PackedInt32s.GetDecoder(PackedInt32s.Format.PACKED, PackedInt32s.VERSION_CURRENT, i); - Debug.Assert(BLOCK_SIZE % DECODERS[i].ByteValueCount == 0); + Debugging.Assert(() => BLOCK_SIZE % DECODERS[i].ByteValueCount == 0); ITERATIONS[i] = BLOCK_SIZE / DECODERS[i].ByteValueCount; BYTE_BLOCK_COUNTS[i] = ITERATIONS[i] * DECODERS[i].ByteBlockCount; maxByteBLockCount = Math.Max(maxByteBLockCount, DECODERS[i].ByteBlockCount); @@ -212,7 +212,7 @@ internal virtual int ComputeOptimalNumberOfBits() } } this.bitsPerException = actualBitsPerValue - bitsPerValue; - Debug.Assert(bufferSize < BLOCK_SIZE || numExceptions < bufferSize); + Debugging.Assert(() => bufferSize < BLOCK_SIZE || numExceptions < bufferSize); return blockSize; } @@ -231,7 +231,7 @@ internal virtual void PforEncode() buffer[i] &= mask; } } - Debug.Assert(ex == numExceptions); + Debugging.Assert(() => ex == numExceptions); Arrays.Fill(exceptions, numExceptions, BLOCK_SIZE, 0); } @@ -245,7 +245,7 @@ internal virtual void PforEncode() if (numExceptions > 0) { - Debug.Assert(bitsPerException > 0); + Debugging.Assert(() => bitsPerException > 0); data.WriteByte((byte)(sbyte)numExceptions); data.WriteByte((byte)(sbyte)bitsPerException); PackedInt32s.IEncoder encoder = PackedInt32s.GetEncoder(PackedInt32s.Format.PACKED, PackedInt32s.VERSION_CURRENT, bitsPerException); @@ -316,18 +316,18 @@ internal virtual void EncodeBlock() ++numBlocks; - Debug.Assert(data.Length - originalLength == blockSize, (data.Length - originalLength) + " <> " + blockSize); + Debugging.Assert(() => data.Length - originalLength == blockSize, () => (data.Length - originalLength) + " <> " + blockSize); } /// /// Build the instance. public virtual PForDeltaDocIdSet Build() { - Debug.Assert(bufferSize < BLOCK_SIZE); + Debugging.Assert(() => bufferSize < BLOCK_SIZE); if (cardinality == 0) { - Debug.Assert(previousDoc == -1); + Debugging.Assert(() => previousDoc == -1); return EMPTY; } @@ -469,7 +469,7 @@ internal virtual void PforDecompress(byte token) internal virtual void UnaryDecompress(byte token) { - Debug.Assert((token & HAS_EXCEPTIONS) == 0); + Debugging.Assert(() => (token & HAS_EXCEPTIONS) == 0); int docID = this.docID; for (int i = 0; i < BLOCK_SIZE; ) { @@ -505,7 +505,7 @@ internal virtual void DecompressBlock() internal virtual void SkipBlock() { - Debug.Assert(i == BLOCK_SIZE); + Debugging.Assert(() => i == BLOCK_SIZE); DecompressBlock(); docID = nextDocs[BLOCK_SIZE - 1]; } @@ -525,8 +525,8 @@ internal virtual int ForwardBinarySearch(int target) // advance forward and double the window at each step int indexSize = (int)docIDs.Count; int lo = Math.Max(blockIdx / indexInterval, 0), hi = lo + 1; - Debug.Assert(blockIdx == -1 || docIDs.Get(lo) <= docID); - Debug.Assert(lo + 1 == docIDs.Count || docIDs.Get(lo + 1) > docID); + Debugging.Assert(() => blockIdx == -1 || docIDs.Get(lo) <= docID); + Debugging.Assert(() => lo + 1 == docIDs.Count || docIDs.Get(lo + 1) > docID); while (true) { if (hi >= indexSize) @@ -557,14 +557,14 @@ internal virtual int ForwardBinarySearch(int target) hi = mid - 1; } } - Debug.Assert(docIDs.Get(hi) <= target); - Debug.Assert(hi + 1 == docIDs.Count || docIDs.Get(hi + 1) > target); + Debugging.Assert(() => docIDs.Get(hi) <= target); + Debugging.Assert(() => hi + 1 == docIDs.Count || docIDs.Get(hi + 1) > target); return hi; } public override int Advance(int target) { - Debug.Assert(target > docID); + Debugging.Assert(() => target > docID); if (nextDocs[BLOCK_SIZE - 1] < target) { // not in the next block, now use the index diff --git a/src/Lucene.Net/Util/Packed/AbstractAppendingLongBuffer.cs b/src/Lucene.Net/Util/Packed/AbstractAppendingLongBuffer.cs index 874c7e8488..cbab9fd6fe 100644 --- a/src/Lucene.Net/Util/Packed/AbstractAppendingLongBuffer.cs +++ b/src/Lucene.Net/Util/Packed/AbstractAppendingLongBuffer.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; namespace Lucene.Net.Util.Packed { @@ -110,7 +110,7 @@ internal virtual void Grow(int newBlockCount) public override sealed long Get(long index) { - Debug.Assert(index >= 0 && index < Count); + Debugging.Assert(() => index >= 0 && index < Count); int block = (int)(index >> pageShift); int element = (int)(index & pageMask); return Get(block, element); @@ -123,9 +123,9 @@ public override sealed long Get(long index) /// public int Get(long index, long[] arr, int off, int len) { - Debug.Assert(len > 0, "len must be > 0 (got " + len + ")"); - Debug.Assert(index >= 0 && index < Count); - Debug.Assert(off + len <= arr.Length); + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < Count); + Debugging.Assert(() => off + len <= arr.Length); int block = (int)(index >> pageShift); int element = (int)(index & pageMask); @@ -193,7 +193,7 @@ internal void FillValues() /// Return the next long in the buffer. public long Next() { - Debug.Assert(HasNext); + Debugging.Assert(() => HasNext); long result = currentValues[pOff++]; if (pOff == currentCount) { diff --git a/src/Lucene.Net/Util/Packed/AbstractBlockPackedWriter.cs b/src/Lucene.Net/Util/Packed/AbstractBlockPackedWriter.cs index b13dca239f..298c3e0248 100644 --- a/src/Lucene.Net/Util/Packed/AbstractBlockPackedWriter.cs +++ b/src/Lucene.Net/Util/Packed/AbstractBlockPackedWriter.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Packed @@ -72,7 +72,7 @@ protected AbstractBlockPackedWriter(DataOutput @out, int blockSize) // LUCENENET /// Reset this writer to wrap . The block size remains unchanged. public virtual void Reset(DataOutput @out) { - Debug.Assert(@out != null); + Debugging.Assert(() => @out != null); this.m_out = @out; m_off = 0; m_ord = 0L; diff --git a/src/Lucene.Net/Util/Packed/AbstractPagedMutable.cs b/src/Lucene.Net/Util/Packed/AbstractPagedMutable.cs index 1e678a5825..ef91b66cb4 100644 --- a/src/Lucene.Net/Util/Packed/AbstractPagedMutable.cs +++ b/src/Lucene.Net/Util/Packed/AbstractPagedMutable.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; namespace Lucene.Net.Util.Packed { @@ -86,7 +86,7 @@ internal int IndexInPage(long index) public override sealed long Get(long index) { - Debug.Assert(index >= 0 && index < size); + Debugging.Assert(() => index >= 0 && index < size); int pageIndex = PageIndex(index); int indexInPage = IndexInPage(index); return subMutables[pageIndex].Get(indexInPage); @@ -96,7 +96,7 @@ public override sealed long Get(long index) /// Set value at . public void Set(long index, long value) { - Debug.Assert(index >= 0 && index < size); + Debugging.Assert(() => index >= 0 && index < size); int pageIndex = PageIndex(index); int indexInPage = IndexInPage(index); subMutables[pageIndex].Set(indexInPage, value); @@ -150,7 +150,7 @@ public T Resize(long newSize) /// Similar to . public T Grow(long minSize) { - Debug.Assert(minSize >= 0); + Debugging.Assert(() => minSize >= 0); if (minSize <= Count) { T result = (T)this; diff --git a/src/Lucene.Net/Util/Packed/BlockPackedReader.cs b/src/Lucene.Net/Util/Packed/BlockPackedReader.cs index dfbd515f68..b2903c54fc 100644 --- a/src/Lucene.Net/Util/Packed/BlockPackedReader.cs +++ b/src/Lucene.Net/Util/Packed/BlockPackedReader.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Store; using System; -using System.Diagnostics; namespace Lucene.Net.Util.Packed { @@ -83,7 +83,7 @@ public BlockPackedReader(IndexInput @in, int packedIntsVersion, int blockSize, l public override long Get(long index) { - Debug.Assert(index >= 0 && index < valueCount); + Debugging.Assert(() => index >= 0 && index < valueCount); int block = (int)((long)((ulong)index >> blockShift)); int idx = (int)(index & blockMask); return (minValues == null ? 0 : minValues[block]) + subReaders[block].Get(idx); diff --git a/src/Lucene.Net/Util/Packed/BlockPackedReaderIterator.cs b/src/Lucene.Net/Util/Packed/BlockPackedReaderIterator.cs index 07473b6f03..e54dfcae98 100644 --- a/src/Lucene.Net/Util/Packed/BlockPackedReaderIterator.cs +++ b/src/Lucene.Net/Util/Packed/BlockPackedReaderIterator.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; using System.IO; namespace Lucene.Net.Util.Packed @@ -129,7 +129,7 @@ public BlockPackedReaderIterator(DataInput @in, int packedIntsVersion, int block public void Reset(DataInput @in, long valueCount) { this.@in = @in; - Debug.Assert(valueCount >= 0); + Debugging.Assert(() => valueCount >= 0); this.valueCount = valueCount; off = blockSize; ord = 0; @@ -139,7 +139,7 @@ public void Reset(DataInput @in, long valueCount) /// Skip exactly values. public void Skip(long count) { - Debug.Assert(count >= 0); + Debugging.Assert(() => count >= 0); if (ord + count > valueCount || ord + count < 0) { throw new EndOfStreamException(); @@ -156,7 +156,7 @@ public void Skip(long count) } // 2. skip as many blocks as necessary - Debug.Assert(off == blockSize); + Debugging.Assert(() => off == blockSize); while (count >= blockSize) { int token = @in.ReadByte() & 0xFF; @@ -180,7 +180,7 @@ public void Skip(long count) } // 3. skip last values - Debug.Assert(count < blockSize); + Debugging.Assert(() => count < blockSize); Refill(); ord += count; off += (int)count; @@ -229,7 +229,7 @@ public long Next() /// Read between 1 and values. public Int64sRef Next(int count) { - Debug.Assert(count > 0); + Debugging.Assert(() => count > 0); if (ord == valueCount) { throw new EndOfStreamException(); @@ -259,7 +259,7 @@ private void Refill() throw new IOException("Corrupted"); } long minValue = minEquals0 ? 0L : ZigZagDecode(1L + ReadVInt64(@in)); - Debug.Assert(minEquals0 || minValue != 0); + Debugging.Assert(() => minEquals0 || minValue != 0); if (bitsPerValue == 0) { diff --git a/src/Lucene.Net/Util/Packed/BlockPackedWriter.cs b/src/Lucene.Net/Util/Packed/BlockPackedWriter.cs index e1666c1784..7dbb499f32 100644 --- a/src/Lucene.Net/Util/Packed/BlockPackedWriter.cs +++ b/src/Lucene.Net/Util/Packed/BlockPackedWriter.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Packed @@ -71,7 +71,7 @@ public BlockPackedWriter(DataOutput @out, int blockSize) [MethodImpl(MethodImplOptions.NoInlining)] protected override void Flush() { - Debug.Assert(m_off > 0); + Debugging.Assert(() => m_off > 0); long min = long.MaxValue, max = long.MinValue; for (int i = 0; i < m_off; ++i) { diff --git a/src/Lucene.Net/Util/Packed/BulkOperation.cs b/src/Lucene.Net/Util/Packed/BulkOperation.cs index df2ee00d32..eebd5a68e2 100644 --- a/src/Lucene.Net/Util/Packed/BulkOperation.cs +++ b/src/Lucene.Net/Util/Packed/BulkOperation.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; // this file has been automatically generated, DO NOT EDIT @@ -164,12 +164,12 @@ public static BulkOperation Of(PackedInt32s.Format format, int bitsPerValue) { if (format == PackedInt32s.Format.PACKED) { - Debug.Assert(packedBulkOps[bitsPerValue - 1] != null); + Debugging.Assert(() => packedBulkOps[bitsPerValue - 1] != null); return packedBulkOps[bitsPerValue - 1]; } else if (format == PackedInt32s.Format.PACKED_SINGLE_BLOCK) { - Debug.Assert(packedSingleBlockBulkOps[bitsPerValue - 1] != null); + Debugging.Assert(() => packedSingleBlockBulkOps[bitsPerValue - 1] != null); return packedSingleBlockBulkOps[bitsPerValue - 1]; } else diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked.cs index b59e4fae5f..0ea2732093 100644 --- a/src/Lucene.Net/Util/Packed/BulkOperationPacked.cs +++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; namespace Lucene.Net.Util.Packed { @@ -36,7 +36,7 @@ internal class BulkOperationPacked : BulkOperation public BulkOperationPacked(int bitsPerValue) { this.bitsPerValue = bitsPerValue; - Debug.Assert(bitsPerValue > 0 && bitsPerValue <= 64); + Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64); int blocks = bitsPerValue; while ((blocks & 1) == 0) { @@ -62,7 +62,7 @@ public BulkOperationPacked(int bitsPerValue) this.mask = (1L << bitsPerValue) - 1; } this.intMask = (int)mask; - Debug.Assert(longValueCount * bitsPerValue == 64 * longBlockCount); + Debugging.Assert(() => longValueCount * bitsPerValue == 64 * longBlockCount); } /// @@ -125,7 +125,7 @@ public override void Decode(byte[] blocks, int blocksOffset, long[] values, int nextValue = (bytes & ((1L << bits) - 1)) << bitsLeft; } } - Debug.Assert(bitsLeft == bitsPerValue); + Debugging.Assert(() => bitsLeft == bitsPerValue); } public override void Decode(long[] blocks, int blocksOffset, int[] values, int valuesOffset, int iterations) @@ -178,7 +178,7 @@ public override void Decode(byte[] blocks, int blocksOffset, int[] values, int v nextValue = (bytes & ((1 << bits) - 1)) << bitsLeft; } } - Debug.Assert(bitsLeft == bitsPerValue); + Debugging.Assert(() => bitsLeft == bitsPerValue); } public override void Encode(long[] values, int valuesOffset, long[] blocks, int blocksOffset, int iterations) @@ -244,7 +244,7 @@ public override void Encode(long[] values, int valuesOffset, byte[] blocks, int for (int i = 0; i < byteValueCount * iterations; ++i) { long v = values[valuesOffset++]; - Debug.Assert(bitsPerValue == 64 || PackedInt32s.BitsRequired(v) <= bitsPerValue); + Debugging.Assert(() => bitsPerValue == 64 || PackedInt32s.BitsRequired(v) <= bitsPerValue); if (bitsPerValue < bitsLeft) { // just buffer @@ -266,7 +266,7 @@ public override void Encode(long[] values, int valuesOffset, byte[] blocks, int nextBlock = (int)((v & ((1L << bits) - 1)) << bitsLeft); } } - Debug.Assert(bitsLeft == 8); + Debugging.Assert(() => bitsLeft == 8); } public override void Encode(int[] values, int valuesOffset, byte[] blocks, int blocksOffset, int iterations) @@ -276,7 +276,7 @@ public override void Encode(int[] values, int valuesOffset, byte[] blocks, int b for (int i = 0; i < byteValueCount * iterations; ++i) { int v = values[valuesOffset++]; - Debug.Assert(PackedInt32s.BitsRequired(v & 0xFFFFFFFFL) <= bitsPerValue); + Debugging.Assert(() => PackedInt32s.BitsRequired(v & 0xFFFFFFFFL) <= bitsPerValue); if (bitsPerValue < bitsLeft) { // just buffer @@ -298,7 +298,7 @@ public override void Encode(int[] values, int valuesOffset, byte[] blocks, int b nextBlock = (v & ((1 << bits) - 1)) << bitsLeft; } } - Debug.Assert(bitsLeft == 8); + Debugging.Assert(() => bitsLeft == 8); } } } \ No newline at end of file diff --git a/src/Lucene.Net/Util/Packed/Direct16.cs b/src/Lucene.Net/Util/Packed/Direct16.cs index 579183ca05..5101e9c9ec 100644 --- a/src/Lucene.Net/Util/Packed/Direct16.cs +++ b/src/Lucene.Net/Util/Packed/Direct16.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; // this file has been automatically generated, DO NOT EDIT @@ -88,9 +88,9 @@ public override object GetArray() public override int Get(int index, long[] arr, int off, int len) { - Debug.Assert(len > 0, "len must be > 0 (got " + len + ")"); - Debug.Assert(index >= 0 && index < m_valueCount); - Debug.Assert(off + len <= arr.Length); + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(() => off + len <= arr.Length); int gets = Math.Min(m_valueCount - index, len); for (int i = index, o = off, end = index + gets; i < end; ++i, ++o) @@ -102,9 +102,9 @@ public override int Get(int index, long[] arr, int off, int len) public override int Set(int index, long[] arr, int off, int len) { - Debug.Assert(len > 0, "len must be > 0 (got " + len + ")"); - Debug.Assert(index >= 0 && index < m_valueCount); - Debug.Assert(off + len <= arr.Length); + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(() => off + len <= arr.Length); int sets = Math.Min(m_valueCount - index, len); for (int i = index, o = off, end = index + sets; i < end; ++i, ++o) @@ -116,7 +116,7 @@ public override int Set(int index, long[] arr, int off, int len) public override void Fill(int fromIndex, int toIndex, long val) { - Debug.Assert(val == (val & 0xFFFFL)); + Debugging.Assert(() => val == (val & 0xFFFFL)); Arrays.Fill(values, fromIndex, toIndex, (short)val); } } diff --git a/src/Lucene.Net/Util/Packed/Direct32.cs b/src/Lucene.Net/Util/Packed/Direct32.cs index 2ac817346a..af9741b761 100644 --- a/src/Lucene.Net/Util/Packed/Direct32.cs +++ b/src/Lucene.Net/Util/Packed/Direct32.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; // this file has been automatically generated, DO NOT EDIT @@ -88,9 +88,9 @@ public override object GetArray() public override int Get(int index, long[] arr, int off, int len) { - Debug.Assert(len > 0, "len must be > 0 (got " + len + ")"); - Debug.Assert(index >= 0 && index < m_valueCount); - Debug.Assert(off + len <= arr.Length); + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(() => off + len <= arr.Length); int gets = Math.Min(m_valueCount - index, len); for (int i = index, o = off, end = index + gets; i < end; ++i, ++o) @@ -102,9 +102,9 @@ public override int Get(int index, long[] arr, int off, int len) public override int Set(int index, long[] arr, int off, int len) { - Debug.Assert(len > 0, "len must be > 0 (got " + len + ")"); - Debug.Assert(index >= 0 && index < m_valueCount); - Debug.Assert(off + len <= arr.Length); + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(() => off + len <= arr.Length); int sets = Math.Min(m_valueCount - index, len); for (int i = index, o = off, end = index + sets; i < end; ++i, ++o) @@ -116,7 +116,7 @@ public override int Set(int index, long[] arr, int off, int len) public override void Fill(int fromIndex, int toIndex, long val) { - Debug.Assert(val == (val & 0xFFFFFFFFL)); + Debugging.Assert(() => val == (val & 0xFFFFFFFFL)); Arrays.Fill(values, fromIndex, toIndex, (int)val); } } diff --git a/src/Lucene.Net/Util/Packed/Direct64.cs b/src/Lucene.Net/Util/Packed/Direct64.cs index f4163e4201..31c61ffafe 100644 --- a/src/Lucene.Net/Util/Packed/Direct64.cs +++ b/src/Lucene.Net/Util/Packed/Direct64.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; // this file has been automatically generated, DO NOT EDIT @@ -82,9 +82,9 @@ public override object GetArray() public override int Get(int index, long[] arr, int off, int len) { - Debug.Assert(len > 0, "len must be > 0 (got " + len + ")"); - Debug.Assert(index >= 0 && index < m_valueCount); - Debug.Assert(off + len <= arr.Length); + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(() => off + len <= arr.Length); int gets = Math.Min(m_valueCount - index, len); System.Array.Copy(values, index, arr, off, gets); @@ -93,9 +93,9 @@ public override int Get(int index, long[] arr, int off, int len) public override int Set(int index, long[] arr, int off, int len) { - Debug.Assert(len > 0, "len must be > 0 (got " + len + ")"); - Debug.Assert(index >= 0 && index < m_valueCount); - Debug.Assert(off + len <= arr.Length); + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(() => off + len <= arr.Length); int sets = Math.Min(m_valueCount - index, len); System.Array.Copy(arr, off, values, index, sets); diff --git a/src/Lucene.Net/Util/Packed/Direct8.cs b/src/Lucene.Net/Util/Packed/Direct8.cs index 3afdd69c61..e0e6fe27d5 100644 --- a/src/Lucene.Net/Util/Packed/Direct8.cs +++ b/src/Lucene.Net/Util/Packed/Direct8.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; // this file has been automatically generated, DO NOT EDIT @@ -85,9 +85,9 @@ public override object GetArray() public override int Get(int index, long[] arr, int off, int len) { - Debug.Assert(len > 0, "len must be > 0 (got " + len + ")"); - Debug.Assert(index >= 0 && index < m_valueCount); - Debug.Assert(off + len <= arr.Length); + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(() => off + len <= arr.Length); int gets = Math.Min(m_valueCount - index, len); for (int i = index, o = off, end = index + gets; i < end; ++i, ++o) @@ -99,9 +99,9 @@ public override int Get(int index, long[] arr, int off, int len) public override int Set(int index, long[] arr, int off, int len) { - Debug.Assert(len > 0, "len must be > 0 (got " + len + ")"); - Debug.Assert(index >= 0 && index < m_valueCount); - Debug.Assert(off + len <= arr.Length); + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(() => off + len <= arr.Length); int sets = Math.Min(m_valueCount - index, len); for (int i = index, o = off, end = index + sets; i < end; ++i, ++o) @@ -113,7 +113,7 @@ public override int Set(int index, long[] arr, int off, int len) public override void Fill(int fromIndex, int toIndex, long val) { - Debug.Assert(val == (val & 0xFFL)); + Debugging.Assert(() => val == (val & 0xFFL)); Arrays.Fill(values, fromIndex, toIndex, (byte)val); } } diff --git a/src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs b/src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs index 2e89427dd2..6cb2e90e2b 100644 --- a/src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs +++ b/src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs @@ -1,6 +1,6 @@ using J2N.Numerics; +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; using System.Globalization; namespace Lucene.Net.Util.Packed @@ -126,7 +126,7 @@ private static long UnPackValue(long[] longArray, int numBits, long packIndex, l /// The low value for the current decoding index. private long CurrentLowValue() { - Debug.Assert(((efIndex >= 0) && (efIndex < numEncoded)), $"efIndex {efIndex.ToString(CultureInfo.InvariantCulture)}"); + Debugging.Assert(() => ((efIndex >= 0) && (efIndex < numEncoded)), () => $"efIndex {efIndex.ToString(CultureInfo.InvariantCulture)}"); return UnPackValue(efEncoder.lowerLongs, efEncoder.numLowBits, efIndex, efEncoder.lowerBitsMask); } @@ -249,7 +249,7 @@ public virtual long NextValue() /// public virtual bool AdvanceToIndex(long index) { - Debug.Assert(index > efIndex); + Debugging.Assert(() => index > efIndex); if (index >= numEncoded) { efIndex = numEncoded; @@ -257,7 +257,7 @@ public virtual bool AdvanceToIndex(long index) } if (!ToAfterCurrentHighBit()) { - Debug.Assert(false); + Debugging.Assert(() => false); } /* CHECKME: Add a (binary) search in the upperZeroBitPositions here. */ int curSetBits = curHighLong.PopCount(); @@ -275,7 +275,7 @@ public virtual bool AdvanceToIndex(long index) */ if (!ToAfterCurrentHighBit()) { - Debug.Assert(false); + Debugging.Assert(() => false); } ToNextHighValue(); } @@ -312,7 +312,7 @@ public virtual long AdvanceToValue(long target) indexEntryIndex = numIndexEntries - 1; // no further than last index entry } long indexHighValue = (indexEntryIndex + 1) * efEncoder.indexInterval; - Debug.Assert(indexHighValue <= highTarget); + Debugging.Assert(() => indexHighValue <= highTarget); if (indexHighValue > (setBitForIndex - efIndex)) // advance to just after zero bit position of index entry. { setBitForIndex = UnPackValue(efEncoder.upperZeroBitPositionIndex, efEncoder.nIndexEntryBits, indexEntryIndex, indexMask); @@ -321,7 +321,7 @@ public virtual long AdvanceToValue(long target) upperLong = efEncoder.upperLongs[highIndex]; curHighLong = (long)((ulong)upperLong >> ((int)(setBitForIndex & ((sizeof(long) * 8) - 1)))); // may contain the unary 1 bit for efIndex } - Debug.Assert(efIndex < numEncoded); // there is a high value to be found. + Debugging.Assert(() => efIndex < numEncoded); // there is a high value to be found. } int curSetBits = curHighLong.PopCount(); // shifted right. @@ -337,7 +337,7 @@ public virtual long AdvanceToValue(long target) } setBitForIndex += (sizeof(long) * 8) - (setBitForIndex & ((sizeof(long) * 8) - 1)); // highIndex = (int)(setBitForIndex >>> LOG2_LONG_SIZE); - Debug.Assert((highIndex + 1) == (int)((long)((ulong)setBitForIndex >> LOG2_INT64_SIZE))); + Debugging.Assert(() => (highIndex + 1) == (int)((long)((ulong)setBitForIndex >> LOG2_INT64_SIZE))); highIndex += 1; upperLong = efEncoder.upperLongs[highIndex]; curHighLong = upperLong; @@ -348,7 +348,7 @@ public virtual long AdvanceToValue(long target) while (curHighLong == 0L) { setBitForIndex += (sizeof(long) * 8) - (setBitForIndex & ((sizeof(long) * 8) - 1)); - Debug.Assert((highIndex + 1) == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE)); + Debugging.Assert(() => (highIndex + 1) == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE)); highIndex += 1; upperLong = efEncoder.upperLongs[highIndex]; curHighLong = upperLong; @@ -356,12 +356,12 @@ public virtual long AdvanceToValue(long target) // curHighLong has enough clear bits to reach highTarget, has at least 1 set bit, and may not have enough set bits. int rank = (int)(highTarget - (setBitForIndex - efIndex)); // the rank of the zero bit for highValue. - Debug.Assert((rank <= (sizeof(long) * 8)), ("rank " + rank)); + Debugging.Assert(() => (rank <= (sizeof(long) * 8)), () => ("rank " + rank)); if (rank >= 1) { long invCurHighLong = ~curHighLong; int clearBitForValue = (rank <= 8) ? BroadWord.SelectNaive(invCurHighLong, rank) : BroadWord.Select(invCurHighLong, rank); - Debug.Assert(clearBitForValue <= ((sizeof(long) * 8) - 1)); + Debugging.Assert(() => clearBitForValue <= ((sizeof(long) * 8) - 1)); setBitForIndex += clearBitForValue + 1; // the high bit just before setBitForIndex is zero int oneBitsBeforeClearBit = clearBitForValue - rank + 1; efIndex += oneBitsBeforeClearBit; // the high bit at setBitForIndex and belongs to the unary code for efIndex @@ -372,14 +372,14 @@ public virtual long AdvanceToValue(long target) if ((setBitForIndex & ((sizeof(long) * 8) - 1)) == 0L) // exhausted curHighLong { - Debug.Assert((highIndex + 1) == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE)); + Debugging.Assert(() => (highIndex + 1) == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE)); highIndex += 1; upperLong = efEncoder.upperLongs[highIndex]; curHighLong = upperLong; } else { - Debug.Assert(highIndex == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE)); + Debugging.Assert(() => highIndex == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE)); curHighLong = (long)((ulong)upperLong >> ((int)(setBitForIndex & ((sizeof(long) * 8) - 1)))); } // curHighLong has enough clear bits to reach highTarget, and may not have enough set bits. @@ -387,14 +387,14 @@ public virtual long AdvanceToValue(long target) while (curHighLong == 0L) { setBitForIndex += (sizeof(long) * 8) - (setBitForIndex & ((sizeof(long) * 8) - 1)); - Debug.Assert((highIndex + 1) == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE)); + Debugging.Assert(() => (highIndex + 1) == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE)); highIndex += 1; upperLong = efEncoder.upperLongs[highIndex]; curHighLong = upperLong; } } setBitForIndex += curHighLong.TrailingZeroCount(); - Debug.Assert((setBitForIndex - efIndex) >= highTarget); // highTarget reached + Debugging.Assert(() => (setBitForIndex - efIndex) >= highTarget); // highTarget reached // Linear search also with low values long currentValue = CombineHighLowValues((setBitForIndex - efIndex), CurrentLowValue()); diff --git a/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs b/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs index 339d9578fb..1fb241caae 100644 --- a/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs +++ b/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs @@ -1,7 +1,7 @@ using J2N.Numerics; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Text; @@ -177,7 +177,7 @@ public EliasFanoEncoder(long numValues, long upperBound, long indexInterval) this.lowerLongs = new long[(int)numLongsForLowBits]; long numHighBitsClear = (long)((ulong)((this.upperBound > 0) ? this.upperBound : 0) >> this.numLowBits); - Debug.Assert(numHighBitsClear <= (2 * this.numValues)); + Debugging.Assert(() => numHighBitsClear <= (2 * this.numValues)); long numHighBitsSet = this.numValues; long numLongsForHighBits = NumInt64sForBits(numHighBitsClear + numHighBitsSet); @@ -219,7 +219,7 @@ public EliasFanoEncoder(long numValues, long upperBound) /// private static long NumInt64sForBits(long numBits) // Note: int version in FixedBitSet.bits2words() { - Debug.Assert(numBits >= 0, numBits.ToString()); + Debugging.Assert(() => numBits >= 0, numBits.ToString); return (long)((ulong)(numBits + (sizeof(long) * 8 - 1)) >> LOG2_INT64_SIZE); } diff --git a/src/Lucene.Net/Util/Packed/GrowableWriter.cs b/src/Lucene.Net/Util/Packed/GrowableWriter.cs index ac0b483d6a..818fa73d6c 100644 --- a/src/Lucene.Net/Util/Packed/GrowableWriter.cs +++ b/src/Lucene.Net/Util/Packed/GrowableWriter.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; namespace Lucene.Net.Util.Packed { @@ -76,7 +76,7 @@ private void EnsureCapacity(long value) return; } int bitsRequired = value < 0 ? 64 : PackedInt32s.BitsRequired(value); - Debug.Assert(bitsRequired > current.BitsPerValue); + Debugging.Assert(() => bitsRequired > current.BitsPerValue); int valueCount = Count; PackedInt32s.Mutable next = PackedInt32s.GetMutable(valueCount, bitsRequired, acceptableOverheadRatio); PackedInt32s.Copy(current, 0, next, 0, valueCount, PackedInt32s.DEFAULT_BUFFER_SIZE); diff --git a/src/Lucene.Net/Util/Packed/MonotonicAppendingLongBuffer.cs b/src/Lucene.Net/Util/Packed/MonotonicAppendingLongBuffer.cs index 7a7415c5f0..1fb52a9237 100644 --- a/src/Lucene.Net/Util/Packed/MonotonicAppendingLongBuffer.cs +++ b/src/Lucene.Net/Util/Packed/MonotonicAppendingLongBuffer.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; namespace Lucene.Net.Util.Packed { @@ -137,7 +137,7 @@ internal override void Grow(int newBlockCount) internal override void PackPendingValues() { - Debug.Assert(pendingOff > 0); + Debugging.Assert(() => pendingOff > 0); minValues[valuesOff] = pending[0]; averages[valuesOff] = pendingOff == 1 ? 0 : (float)(pending[pendingOff - 1] - pending[0]) / (pendingOff - 1); diff --git a/src/Lucene.Net/Util/Packed/MonotonicBlockPackedReader.cs b/src/Lucene.Net/Util/Packed/MonotonicBlockPackedReader.cs index 5c64cef9f1..fb9550a49a 100644 --- a/src/Lucene.Net/Util/Packed/MonotonicBlockPackedReader.cs +++ b/src/Lucene.Net/Util/Packed/MonotonicBlockPackedReader.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Store; using System; -using System.Diagnostics; namespace Lucene.Net.Util.Packed { @@ -78,7 +78,7 @@ public MonotonicBlockPackedReader(IndexInput @in, int packedIntsVersion, int blo public override long Get(long index) { - Debug.Assert(index >= 0 && index < valueCount); + Debugging.Assert(() => index >= 0 && index < valueCount); int block = (int)((long)((ulong)index >> blockShift)); int idx = (int)(index & blockMask); // LUCENENET NOTE: IMPORTANT: The cast to float is critical here for it to work in x86 diff --git a/src/Lucene.Net/Util/Packed/MonotonicBlockPackedWriter.cs b/src/Lucene.Net/Util/Packed/MonotonicBlockPackedWriter.cs index ce0d0a3dbb..5ddc277eec 100644 --- a/src/Lucene.Net/Util/Packed/MonotonicBlockPackedWriter.cs +++ b/src/Lucene.Net/Util/Packed/MonotonicBlockPackedWriter.cs @@ -1,5 +1,5 @@ using System; -using System.Diagnostics; +using Lucene.Net.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Packed @@ -66,14 +66,14 @@ public MonotonicBlockPackedWriter(DataOutput @out, int blockSize) public override void Add(long l) { - Debug.Assert(l >= 0); + Debugging.Assert(() => l >= 0); base.Add(l); } [MethodImpl(MethodImplOptions.NoInlining)] protected override void Flush() { - Debug.Assert(m_off > 0); + Debugging.Assert(() => m_off > 0); // TODO: perform a true linear regression? long min = m_values[0]; diff --git a/src/Lucene.Net/Util/Packed/Packed16ThreeBlocks.cs b/src/Lucene.Net/Util/Packed/Packed16ThreeBlocks.cs index 538a77729e..d8a96b7704 100644 --- a/src/Lucene.Net/Util/Packed/Packed16ThreeBlocks.cs +++ b/src/Lucene.Net/Util/Packed/Packed16ThreeBlocks.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; // this file has been automatically generated, DO NOT EDIT @@ -69,9 +69,9 @@ public override long Get(int index) public override int Get(int index, long[] arr, int off, int len) { - Debug.Assert(len > 0, "len must be > 0 (got " + len + ")"); - Debug.Assert(index >= 0 && index < m_valueCount); - Debug.Assert(off + len <= arr.Length); + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(() => off + len <= arr.Length); int gets = Math.Min(m_valueCount - index, len); for (int i = index * 3, end = (index + gets) * 3; i < end; i += 3) @@ -91,9 +91,9 @@ public override void Set(int index, long value) public override int Set(int index, long[] arr, int off, int len) { - Debug.Assert(len > 0, "len must be > 0 (got " + len + ")"); - Debug.Assert(index >= 0 && index < m_valueCount); - Debug.Assert(off + len <= arr.Length); + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(() => off + len <= arr.Length); int sets = Math.Min(m_valueCount - index, len); for (int i = off, o = index * 3, end = off + sets; i < end; ++i) diff --git a/src/Lucene.Net/Util/Packed/Packed64.cs b/src/Lucene.Net/Util/Packed/Packed64.cs index 77dec8f39b..16b94eb9ea 100644 --- a/src/Lucene.Net/Util/Packed/Packed64.cs +++ b/src/Lucene.Net/Util/Packed/Packed64.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; using System.IO; namespace Lucene.Net.Util.Packed @@ -79,11 +79,11 @@ public Packed64(int valueCount, int bitsPerValue) /*var a = ~0L << (int)((uint)(BLOCK_SIZE - bitsPerValue) >> (BLOCK_SIZE - bitsPerValue)); //original var b = (uint)(~0L << (BLOCK_SIZE - bitsPerValue)) >> (BLOCK_SIZE - bitsPerValue); //mod - Debug.Assert(a == b, "a: " + a, ", b: " + b);*/ + Debugging.Assert(a == b, "a: " + a, ", b: " + b);*/ maskRight = (long)((ulong)(~0L << (BLOCK_SIZE - bitsPerValue)) >> (BLOCK_SIZE - bitsPerValue)); //mod - //Debug.Assert((long)((ulong)(~0L << (BLOCK_SIZE - bitsPerValue)) >> (BLOCK_SIZE - bitsPerValue)) == (uint)(~0L << (BLOCK_SIZE - bitsPerValue)) >> (BLOCK_SIZE - bitsPerValue)); + //Debugging.Assert((long)((ulong)(~0L << (BLOCK_SIZE - bitsPerValue)) >> (BLOCK_SIZE - bitsPerValue)) == (uint)(~0L << (BLOCK_SIZE - bitsPerValue)) >> (BLOCK_SIZE - bitsPerValue)); bpvMinusBlockSize = bitsPerValue - BLOCK_SIZE; } @@ -157,7 +157,7 @@ public override long Get(int index) { var mod = (long) ((ulong) (Blocks[elementPos]) >> (int) (-endBits)) & MaskRight; var og = ((long) ((ulong) Blocks[elementPos] >> (int) -endBits)) & MaskRight; - Debug.Assert(mod == og); + Debugging.Assert(mod == og); //return (long)((ulong)(Blocks[elementPos]) >> (int)(-endBits)) & MaskRight; return ((long)((ulong)Blocks[elementPos] >> (int)-endBits)) & MaskRight; @@ -166,7 +166,7 @@ public override long Get(int index) var a = (((Blocks[elementPos] << (int)endBits) | (long)(((ulong)(Blocks[elementPos + 1])) >> (int)(BLOCK_SIZE - endBits))) & MaskRight); var b = ((Blocks[elementPos] << (int)endBits) | ((long)((ulong)Blocks[elementPos + 1] >> (int)(BLOCK_SIZE - endBits)))) & MaskRight; - Debug.Assert(a == b); + Debugging.Assert(a == b); //return (((Blocks[elementPos] << (int)endBits) | (long)(((ulong)(Blocks[elementPos + 1])) >> (int)(BLOCK_SIZE - endBits))) & MaskRight); return ((Blocks[elementPos] << (int)endBits) | ((long)((ulong)Blocks[elementPos + 1] >> (int)(BLOCK_SIZE - endBits)))) & MaskRight; @@ -174,10 +174,10 @@ public override long Get(int index) public override int Get(int index, long[] arr, int off, int len) { - Debug.Assert(len > 0, "len must be > 0 (got " + len + ")"); - Debug.Assert(index >= 0 && index < m_valueCount); + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); len = Math.Min(len, m_valueCount - index); - Debug.Assert(off + len <= arr.Length); + Debugging.Assert(() => off + len <= arr.Length); int originalIndex = index; PackedInt32s.IDecoder decoder = BulkOperation.Of(PackedInt32s.Format.PACKED, m_bitsPerValue); @@ -198,15 +198,15 @@ public override int Get(int index, long[] arr, int off, int len) } // bulk get - Debug.Assert(index % decoder.Int64ValueCount == 0); + Debugging.Assert(() => index % decoder.Int64ValueCount == 0); int blockIndex = (int)((ulong)((long)index * m_bitsPerValue) >> BLOCK_BITS); - Debug.Assert((((long)index * m_bitsPerValue) & MOD_MASK) == 0); + Debugging.Assert(() => (((long)index * m_bitsPerValue) & MOD_MASK) == 0); int iterations = len / decoder.Int64ValueCount; decoder.Decode(blocks, blockIndex, arr, off, iterations); int gotValues = iterations * decoder.Int64ValueCount; index += gotValues; len -= gotValues; - Debug.Assert(len >= 0); + Debugging.Assert(() => len >= 0); if (index > originalIndex) { @@ -216,7 +216,7 @@ public override int Get(int index, long[] arr, int off, int len) else { // no progress so far => already at a block boundary but no full block to get - Debug.Assert(index == originalIndex); + Debugging.Assert(() => index == originalIndex); return base.Get(index, arr, off, len); } } @@ -242,10 +242,10 @@ public override void Set(int index, long value) public override int Set(int index, long[] arr, int off, int len) { - Debug.Assert(len > 0, "len must be > 0 (got " + len + ")"); - Debug.Assert(index >= 0 && index < m_valueCount); + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); len = Math.Min(len, m_valueCount - index); - Debug.Assert(off + len <= arr.Length); + Debugging.Assert(() => off + len <= arr.Length); int originalIndex = index; PackedInt32s.IEncoder encoder = BulkOperation.Of(PackedInt32s.Format.PACKED, m_bitsPerValue); @@ -266,15 +266,15 @@ public override int Set(int index, long[] arr, int off, int len) } // bulk set - Debug.Assert(index % encoder.Int64ValueCount == 0); + Debugging.Assert(() => index % encoder.Int64ValueCount == 0); int blockIndex = (int)((ulong)((long)index * m_bitsPerValue) >> BLOCK_BITS); - Debug.Assert((((long)index * m_bitsPerValue) & MOD_MASK) == 0); + Debugging.Assert(() => (((long)index * m_bitsPerValue) & MOD_MASK) == 0); int iterations = len / encoder.Int64ValueCount; encoder.Encode(arr, off, blocks, blockIndex, iterations); int setValues = iterations * encoder.Int64ValueCount; index += setValues; len -= setValues; - Debug.Assert(len >= 0); + Debugging.Assert(() => len >= 0); if (index > originalIndex) { @@ -284,7 +284,7 @@ public override int Set(int index, long[] arr, int off, int len) else { // no progress so far => already at a block boundary but no full block to get - Debug.Assert(index == originalIndex); + Debugging.Assert(() => index == originalIndex); return base.Set(index, arr, off, len); } } @@ -306,8 +306,8 @@ public override long RamBytesUsed() public override void Fill(int fromIndex, int toIndex, long val) { - Debug.Assert(PackedInt32s.BitsRequired(val) <= BitsPerValue); - Debug.Assert(fromIndex <= toIndex); + Debugging.Assert(() => PackedInt32s.BitsRequired(val) <= BitsPerValue); + Debugging.Assert(() => fromIndex <= toIndex); // minimum number of values that use an exact number of full blocks int nAlignedValues = 64 / Gcd(64, m_bitsPerValue); @@ -329,7 +329,7 @@ public override void Fill(int fromIndex, int toIndex, long val) Set(fromIndex++, val); } } - Debug.Assert(fromIndex % nAlignedValues == 0); + Debugging.Assert(() => fromIndex % nAlignedValues == 0); // compute the long[] blocks for nAlignedValues consecutive values and // use them to set as many values as possible without applying any mask @@ -343,7 +343,7 @@ public override void Fill(int fromIndex, int toIndex, long val) values.Set(i, val); } nAlignedValuesBlocks = values.blocks; - Debug.Assert(nAlignedBlocks <= nAlignedValuesBlocks.Length); + Debugging.Assert(() => nAlignedBlocks <= nAlignedValuesBlocks.Length); } int startBlock = (int)((ulong)((long)fromIndex * m_bitsPerValue) >> 6); int endBlock = (int)((ulong)((long)toIndex * m_bitsPerValue) >> 6); diff --git a/src/Lucene.Net/Util/Packed/Packed64SingleBlock.cs b/src/Lucene.Net/Util/Packed/Packed64SingleBlock.cs index 421790087b..39355a61bf 100644 --- a/src/Lucene.Net/Util/Packed/Packed64SingleBlock.cs +++ b/src/Lucene.Net/Util/Packed/Packed64SingleBlock.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; // this file has been automatically generated, DO NOT EDIT @@ -50,7 +50,7 @@ private static int RequiredCapacity(int valueCount, int valuesPerBlock) internal Packed64SingleBlock(int valueCount, int bitsPerValue) : base(valueCount, bitsPerValue) { - Debug.Assert(IsSupported(bitsPerValue)); + Debugging.Assert(() => IsSupported(bitsPerValue)); int valuesPerBlock = 64 / bitsPerValue; blocks = new long[RequiredCapacity(valueCount, valuesPerBlock)]; } @@ -71,10 +71,10 @@ public override long RamBytesUsed() public override int Get(int index, long[] arr, int off, int len) { - Debug.Assert(len > 0, "len must be > 0 (got " + len + ")"); - Debug.Assert(index >= 0 && index < m_valueCount); + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); len = Math.Min(len, m_valueCount - index); - Debug.Assert(off + len <= arr.Length); + Debugging.Assert(() => off + len <= arr.Length); int originalIndex = index; @@ -95,10 +95,10 @@ public override int Get(int index, long[] arr, int off, int len) } // bulk get - Debug.Assert(index % valuesPerBlock == 0); + Debugging.Assert(() => index % valuesPerBlock == 0); PackedInt32s.IDecoder decoder = BulkOperation.Of(PackedInt32s.Format.PACKED_SINGLE_BLOCK, m_bitsPerValue); - Debug.Assert(decoder.Int64BlockCount == 1); - Debug.Assert(decoder.Int64ValueCount == valuesPerBlock); + Debugging.Assert(() => decoder.Int64BlockCount == 1); + Debugging.Assert(() => decoder.Int64ValueCount == valuesPerBlock); int blockIndex = index / valuesPerBlock; int nblocks = (index + len) / valuesPerBlock - blockIndex; decoder.Decode(blocks, blockIndex, arr, off, nblocks); @@ -115,17 +115,17 @@ public override int Get(int index, long[] arr, int off, int len) { // no progress so far => already at a block boundary but no full block to // get - Debug.Assert(index == originalIndex); + Debugging.Assert(() => index == originalIndex); return base.Get(index, arr, off, len); } } public override int Set(int index, long[] arr, int off, int len) { - Debug.Assert(len > 0, "len must be > 0 (got " + len + ")"); - Debug.Assert(index >= 0 && index < m_valueCount); + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); len = Math.Min(len, m_valueCount - index); - Debug.Assert(off + len <= arr.Length); + Debugging.Assert(() => off + len <= arr.Length); int originalIndex = index; @@ -146,10 +146,10 @@ public override int Set(int index, long[] arr, int off, int len) } // bulk set - Debug.Assert(index % valuesPerBlock == 0); + Debugging.Assert(() => index % valuesPerBlock == 0); BulkOperation op = BulkOperation.Of(PackedInt32s.Format.PACKED_SINGLE_BLOCK, m_bitsPerValue); - Debug.Assert(op.Int64BlockCount == 1); - Debug.Assert(op.Int64ValueCount == valuesPerBlock); + Debugging.Assert(() => op.Int64BlockCount == 1); + Debugging.Assert(() => op.Int64ValueCount == valuesPerBlock); int blockIndex = index / valuesPerBlock; int nblocks = (index + len) / valuesPerBlock - blockIndex; op.Encode(arr, off, blocks, blockIndex, nblocks); @@ -166,16 +166,16 @@ public override int Set(int index, long[] arr, int off, int len) { // no progress so far => already at a block boundary but no full block to // set - Debug.Assert(index == originalIndex); + Debugging.Assert(() => index == originalIndex); return base.Set(index, arr, off, len); } } public override void Fill(int fromIndex, int toIndex, long val) { - Debug.Assert(fromIndex >= 0); - Debug.Assert(fromIndex <= toIndex); - Debug.Assert(PackedInt32s.BitsRequired(val) <= m_bitsPerValue); + Debugging.Assert(() => fromIndex >= 0); + Debugging.Assert(() => fromIndex <= toIndex); + Debugging.Assert(() => PackedInt32s.BitsRequired(val) <= m_bitsPerValue); int valuesPerBlock = 64 / m_bitsPerValue; if (toIndex - fromIndex <= valuesPerBlock << 1) @@ -194,13 +194,13 @@ public override void Fill(int fromIndex, int toIndex, long val) { Set(fromIndex++, val); } - Debug.Assert(fromIndex % valuesPerBlock == 0); + Debugging.Assert(() => fromIndex % valuesPerBlock == 0); } // bulk set of the inner blocks int fromBlock = fromIndex / valuesPerBlock; int toBlock = toIndex / valuesPerBlock; - Debug.Assert(fromBlock * valuesPerBlock == fromIndex); + Debugging.Assert(() => fromBlock * valuesPerBlock == fromIndex); long blockValue = 0L; for (int i = 0; i < valuesPerBlock; ++i) diff --git a/src/Lucene.Net/Util/Packed/Packed8ThreeBlocks.cs b/src/Lucene.Net/Util/Packed/Packed8ThreeBlocks.cs index e63da2dee8..45d690c01d 100644 --- a/src/Lucene.Net/Util/Packed/Packed8ThreeBlocks.cs +++ b/src/Lucene.Net/Util/Packed/Packed8ThreeBlocks.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; // this file has been automatically generated, DO NOT EDIT @@ -66,9 +66,9 @@ public override long Get(int index) public override int Get(int index, long[] arr, int off, int len) { - Debug.Assert(len > 0, "len must be > 0 (got " + len + ")"); - Debug.Assert(index >= 0 && index < m_valueCount); - Debug.Assert(off + len <= arr.Length); + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(() => off + len <= arr.Length); int gets = Math.Min(m_valueCount - index, len); for (int i = index * 3, end = (index + gets) * 3; i < end; i += 3) @@ -88,9 +88,9 @@ public override void Set(int index, long value) public override int Set(int index, long[] arr, int off, int len) { - Debug.Assert(len > 0, "len must be > 0 (got " + len + ")"); - Debug.Assert(index >= 0 && index < m_valueCount); - Debug.Assert(off + len <= arr.Length); + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(() => off + len <= arr.Length); int sets = Math.Min(m_valueCount - index, len); for (int i = off, o = index * 3, end = off + sets; i < end; ++i) diff --git a/src/Lucene.Net/Util/Packed/PackedDataInput.cs b/src/Lucene.Net/Util/Packed/PackedDataInput.cs index 0764ec700f..bf0921353f 100644 --- a/src/Lucene.Net/Util/Packed/PackedDataInput.cs +++ b/src/Lucene.Net/Util/Packed/PackedDataInput.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; namespace Lucene.Net.Util.Packed { @@ -52,7 +52,7 @@ public PackedDataInput(DataInput @in) /// public long ReadInt64(int bitsPerValue) { - Debug.Assert(bitsPerValue > 0 && bitsPerValue <= 64, bitsPerValue.ToString()); + Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, bitsPerValue.ToString); long r = 0; while (bitsPerValue > 0) { diff --git a/src/Lucene.Net/Util/Packed/PackedDataOutput.cs b/src/Lucene.Net/Util/Packed/PackedDataOutput.cs index d18ad90414..87f83e2710 100644 --- a/src/Lucene.Net/Util/Packed/PackedDataOutput.cs +++ b/src/Lucene.Net/Util/Packed/PackedDataOutput.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Packed @@ -53,7 +53,7 @@ public PackedDataOutput(DataOutput @out) /// public void WriteInt64(long value, int bitsPerValue) { - Debug.Assert(bitsPerValue == 64 || (value >= 0 && value <= PackedInt32s.MaxValue(bitsPerValue))); + Debugging.Assert(() => bitsPerValue == 64 || (value >= 0 && value <= PackedInt32s.MaxValue(bitsPerValue))); while (bitsPerValue > 0) { if (remainingBits == 0) diff --git a/src/Lucene.Net/Util/Packed/PackedInts.cs b/src/Lucene.Net/Util/Packed/PackedInts.cs index 54839b1fce..c9da5f8d8b 100644 --- a/src/Lucene.Net/Util/Packed/PackedInts.cs +++ b/src/Lucene.Net/Util/Packed/PackedInts.cs @@ -1,8 +1,8 @@ using J2N.Numerics; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; namespace Lucene.Net.Util.Packed @@ -242,7 +242,7 @@ public virtual bool IsSupported(int bitsPerValue) /// public virtual float OverheadPerValue(int bitsPerValue) { - Debug.Assert(IsSupported(bitsPerValue)); + Debugging.Assert(() => IsSupported(bitsPerValue)); return 0f; } @@ -251,7 +251,7 @@ public virtual float OverheadPerValue(int bitsPerValue) /// public virtual float OverheadRatio(int bitsPerValue) { - Debug.Assert(IsSupported(bitsPerValue)); + Debugging.Assert(() => IsSupported(bitsPerValue)); return OverheadPerValue(bitsPerValue) / bitsPerValue; } } @@ -532,9 +532,9 @@ public abstract class Reader : NumericDocValues /// public virtual int Get(int index, long[] arr, int off, int len) { - Debug.Assert(len > 0, "len must be > 0 (got " + len + ")"); - Debug.Assert(index >= 0 && index < Count); - Debug.Assert(off + len <= arr.Length); + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < Count); + Debugging.Assert(() => off + len <= arr.Length); int gets = Math.Min(Count - index, len); for (int i = index, o = off, end = index + gets; i < end; ++i, ++o) @@ -573,7 +573,7 @@ public virtual int Get(int index, long[] arr, int off, int len) /// public virtual object GetArray() { - Debug.Assert(!HasArray); + Debugging.Assert(() => !HasArray); return null; } @@ -633,7 +633,7 @@ protected ReaderIterator(int valueCount, int bitsPerValue, DataInput @in) public virtual long Next() { Int64sRef nextValues = Next(1); - Debug.Assert(nextValues.Length > 0); + Debugging.Assert(() => nextValues.Length > 0); long result = nextValues.Int64s[nextValues.Offset]; ++nextValues.Offset; --nextValues.Length; @@ -670,10 +670,10 @@ public abstract class Mutable : Reader /// public virtual int Set(int index, long[] arr, int off, int len) { - Debug.Assert(len > 0, "len must be > 0 (got " + len + ")"); - Debug.Assert(index >= 0 && index < Count); + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < Count); len = Math.Min(len, Count - index); - Debug.Assert(off + len <= arr.Length); + Debugging.Assert(() => off + len <= arr.Length); for (int i = index, o = off, end = index + len; i < end; ++i, ++o) { @@ -688,8 +688,8 @@ public virtual int Set(int index, long[] arr, int off, int len) /// public virtual void Fill(int fromIndex, int toIndex, long val) { - Debug.Assert(val <= MaxValue(BitsPerValue)); - Debug.Assert(fromIndex <= toIndex); + Debugging.Assert(() => val <= MaxValue(BitsPerValue)); + Debugging.Assert(() => fromIndex <= toIndex); for (int i = fromIndex; i < toIndex; ++i) { Set(i, val); @@ -738,7 +738,7 @@ internal abstract class ReaderImpl : Reader protected ReaderImpl(int valueCount, int bitsPerValue) { this.m_bitsPerValue = bitsPerValue; - Debug.Assert(bitsPerValue > 0 && bitsPerValue <= 64, "bitsPerValue=" + bitsPerValue); + Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); this.m_valueCount = valueCount; } @@ -757,7 +757,7 @@ public abstract class MutableImpl : Mutable protected MutableImpl(int valueCount, int bitsPerValue) { this.m_valueCount = valueCount; - Debug.Assert(bitsPerValue > 0 && bitsPerValue <= 64, "bitsPerValue=" + bitsPerValue); + Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); this.m_bitsPerValue = bitsPerValue; } @@ -786,8 +786,8 @@ public override long Get(int index) public override int Get(int index, long[] arr, int off, int len) { - Debug.Assert(len > 0, "len must be > 0 (got " + len + ")"); - Debug.Assert(index >= 0 && index < valueCount); + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < valueCount); len = Math.Min(len, valueCount - index); Arrays.Fill(arr, off, off + len, 0); return len; @@ -816,8 +816,8 @@ public abstract class Writer protected Writer(DataOutput @out, int valueCount, int bitsPerValue) { - Debug.Assert(bitsPerValue <= 64); - Debug.Assert(valueCount >= 0 || valueCount == -1); + Debugging.Assert(() => bitsPerValue <= 64); + Debugging.Assert(() => valueCount >= 0 || valueCount == -1); this.m_out = @out; this.m_valueCount = valueCount; this.m_bitsPerValue = bitsPerValue; @@ -825,7 +825,7 @@ protected Writer(DataOutput @out, int valueCount, int bitsPerValue) internal virtual void WriteHeader() { - Debug.Assert(m_valueCount != -1); + Debugging.Assert(() => m_valueCount != -1); CodecUtil.WriteHeader(m_out, CODEC_NAME, VERSION_CURRENT); m_out.WriteVInt32(m_bitsPerValue); m_out.WriteVInt32(m_valueCount); @@ -972,7 +972,7 @@ public static Reader GetReader(DataInput @in) { int version = CodecUtil.CheckHeader(@in, CODEC_NAME, VERSION_START, VERSION_CURRENT); int bitsPerValue = @in.ReadVInt32(); - Debug.Assert(bitsPerValue > 0 && bitsPerValue <= 64, "bitsPerValue=" + bitsPerValue); + Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); int valueCount = @in.ReadVInt32(); Format format = Format.ById(@in.ReadVInt32()); @@ -1014,7 +1014,7 @@ public static IReaderIterator GetReaderIterator(DataInput @in, int mem) { int version = CodecUtil.CheckHeader(@in, CODEC_NAME, VERSION_START, VERSION_CURRENT); int bitsPerValue = @in.ReadVInt32(); - Debug.Assert(bitsPerValue > 0 && bitsPerValue <= 64, "bitsPerValue=" + bitsPerValue); + Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); int valueCount = @in.ReadVInt32(); Format format = Format.ById(@in.ReadVInt32()); return GetReaderIteratorNoHeader(@in, format, version, valueCount, bitsPerValue, mem); @@ -1050,7 +1050,7 @@ public static Reader GetDirectReaderNoHeader(IndexInput @in, Format format, int long byteCount = format.ByteCount(version, valueCount, bitsPerValue); if (byteCount != format.ByteCount(VERSION_CURRENT, valueCount, bitsPerValue)) { - Debug.Assert(version == VERSION_START); + Debugging.Assert(() => version == VERSION_START); long endPointer = @in.GetFilePointer() + byteCount; // Some consumers of direct readers assume that reading the last value // will make the underlying IndexInput go to the end of the packed @@ -1137,7 +1137,7 @@ public static Reader GetDirectReader(IndexInput @in) { int version = CodecUtil.CheckHeader(@in, CODEC_NAME, VERSION_START, VERSION_CURRENT); int bitsPerValue = @in.ReadVInt32(); - Debug.Assert(bitsPerValue > 0 && bitsPerValue <= 64, "bitsPerValue=" + bitsPerValue); + Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); int valueCount = @in.ReadVInt32(); Format format = Format.ById(@in.ReadVInt32()); return GetDirectReaderNoHeader(@in, format, version, valueCount, bitsPerValue); @@ -1176,7 +1176,7 @@ public static Mutable GetMutable(int valueCount, int bitsPerValue, float accepta /// public static Mutable GetMutable(int valueCount, int bitsPerValue, PackedInt32s.Format format) { - Debug.Assert(valueCount >= 0); + Debugging.Assert(() => valueCount >= 0); if (format == PackedInt32s.Format.PACKED_SINGLE_BLOCK) { @@ -1303,7 +1303,7 @@ public static Writer GetWriterNoHeader(DataOutput @out, Format format, int value /// If there is a low-level I/O error. public static Writer GetWriter(DataOutput @out, int valueCount, int bitsPerValue, float acceptableOverheadRatio) { - Debug.Assert(valueCount >= 0); + Debugging.Assert(() => valueCount >= 0); FormatAndBits formatAndBits = FastestFormatAndBits(valueCount, bitsPerValue, acceptableOverheadRatio); Writer writer = GetWriterNoHeader(@out, formatAndBits.Format, valueCount, formatAndBits.BitsPerValue, DEFAULT_BUFFER_SIZE); @@ -1348,8 +1348,8 @@ public static long MaxValue(int bitsPerValue) /// public static void Copy(Reader src, int srcPos, Mutable dest, int destPos, int len, int mem) { - Debug.Assert(srcPos + len <= src.Count); - Debug.Assert(destPos + len <= dest.Count); + Debugging.Assert(() => srcPos + len <= src.Count); + Debugging.Assert(() => destPos + len <= dest.Count); int capacity = (int)((uint)mem >> 3); if (capacity == 0) { @@ -1370,17 +1370,17 @@ public static void Copy(Reader src, int srcPos, Mutable dest, int destPos, int l /// Same as but using a pre-allocated buffer. internal static void Copy(Reader src, int srcPos, Mutable dest, int destPos, int len, long[] buf) { - Debug.Assert(buf.Length > 0); + Debugging.Assert(() => buf.Length > 0); int remaining = 0; while (len > 0) { int read = src.Get(srcPos, buf, remaining, Math.Min(len, buf.Length - remaining)); - Debug.Assert(read > 0); + Debugging.Assert(() => read > 0); srcPos += read; len -= read; remaining += read; int written = dest.Set(destPos, buf, 0, remaining); - Debug.Assert(written > 0); + Debugging.Assert(() => written > 0); destPos += written; if (written < remaining) { @@ -1411,7 +1411,7 @@ public static Header ReadHeader(DataInput @in) { int version = CodecUtil.CheckHeader(@in, CODEC_NAME, VERSION_START, VERSION_CURRENT); int bitsPerValue = @in.ReadVInt32(); - Debug.Assert(bitsPerValue > 0 && bitsPerValue <= 64, "bitsPerValue=" + bitsPerValue); + Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); int valueCount = @in.ReadVInt32(); Format format = Format.ById(@in.ReadVInt32()); return new Header(format, valueCount, bitsPerValue, version); diff --git a/src/Lucene.Net/Util/Packed/PackedReaderIterator.cs b/src/Lucene.Net/Util/Packed/PackedReaderIterator.cs index 9868965fc6..7b44c4506b 100644 --- a/src/Lucene.Net/Util/Packed/PackedReaderIterator.cs +++ b/src/Lucene.Net/Util/Packed/PackedReaderIterator.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; using System.IO; namespace Lucene.Net.Util.Packed @@ -41,7 +41,7 @@ internal PackedReaderIterator(PackedInt32s.Format format, int packedIntsVersion, this.packedIntsVersion = packedIntsVersion; bulkOperation = BulkOperation.Of(format, bitsPerValue); iterations = Iterations(mem); - Debug.Assert(valueCount == 0 || iterations > 0); + Debugging.Assert(() => valueCount == 0 || iterations > 0); nextBlocks = new byte[iterations * bulkOperation.ByteBlockCount]; nextValues = new Int64sRef(new long[iterations * bulkOperation.ByteValueCount], 0, 0); nextValues.Offset = nextValues.Int64s.Length; @@ -61,9 +61,9 @@ private int Iterations(int mem) public override Int64sRef Next(int count) { - Debug.Assert(nextValues.Length >= 0); - Debug.Assert(count > 0); - Debug.Assert(nextValues.Offset + nextValues.Length <= nextValues.Int64s.Length); + Debugging.Assert(() => nextValues.Length >= 0); + Debugging.Assert(() => count > 0); + Debugging.Assert(() => nextValues.Offset + nextValues.Length <= nextValues.Int64s.Length); nextValues.Offset += nextValues.Length; diff --git a/src/Lucene.Net/Util/Packed/PackedWriter.cs b/src/Lucene.Net/Util/Packed/PackedWriter.cs index 4aabdb3cd2..aaea2434c9 100644 --- a/src/Lucene.Net/Util/Packed/PackedWriter.cs +++ b/src/Lucene.Net/Util/Packed/PackedWriter.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; -using System.Diagnostics; using System.IO; using System.Runtime.CompilerServices; @@ -55,8 +55,8 @@ internal PackedWriter(PackedInt32s.Format format, DataOutput @out, int valueCoun public override void Add(long v) { - Debug.Assert(m_bitsPerValue == 64 || (v >= 0 && v <= PackedInt32s.MaxValue(m_bitsPerValue)), m_bitsPerValue.ToString()); - Debug.Assert(!finished); + Debugging.Assert(() => m_bitsPerValue == 64 || (v >= 0 && v <= PackedInt32s.MaxValue(m_bitsPerValue)), m_bitsPerValue.ToString); + Debugging.Assert(() => !finished); if (m_valueCount != -1 && written >= m_valueCount) { throw new EndOfStreamException("Writing past end of stream"); @@ -71,7 +71,7 @@ public override void Add(long v) public override void Finish() { - Debug.Assert(!finished); + Debugging.Assert(() => !finished); if (m_valueCount != -1) { while (written < m_valueCount) diff --git a/src/Lucene.Net/Util/Packed/PagedMutable.cs b/src/Lucene.Net/Util/Packed/PagedMutable.cs index d662e5ec3c..05f872cf53 100644 --- a/src/Lucene.Net/Util/Packed/PagedMutable.cs +++ b/src/Lucene.Net/Util/Packed/PagedMutable.cs @@ -1,4 +1,4 @@ -using System.Diagnostics; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Util.Packed { @@ -58,7 +58,7 @@ internal PagedMutable(long size, int pageSize, int bitsPerValue, PackedInt32s.Fo protected override Mutable NewMutable(int valueCount, int bitsPerValue) { - Debug.Assert(this.bitsPerValue >= bitsPerValue); + Debugging.Assert(() => this.bitsPerValue >= bitsPerValue); return PackedInt32s.GetMutable(valueCount, this.bitsPerValue, format); } diff --git a/src/Lucene.Net/Util/PagedBytes.cs b/src/Lucene.Net/Util/PagedBytes.cs index b7b518564e..e80ab68336 100644 --- a/src/Lucene.Net/Util/PagedBytes.cs +++ b/src/Lucene.Net/Util/PagedBytes.cs @@ -1,7 +1,7 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Diagnostics; namespace Lucene.Net.Util { @@ -95,8 +95,8 @@ internal Reader(PagedBytes pagedBytes) /// public void FillSlice(BytesRef b, long start, int length) { - Debug.Assert(length >= 0, "length=" + length); - Debug.Assert(length <= blockSize + 1, "length=" + length); + Debugging.Assert(() => length >= 0, () => "length=" + length); + Debugging.Assert(() => length <= blockSize + 1, () => "length=" + length); b.Length = length; if (length == 0) { @@ -144,7 +144,7 @@ public void Fill(BytesRef b, long start) { b.Length = ((block[offset] & 0x7f) << 8) | (block[1 + offset] & 0xff); b.Offset = offset + 2; - Debug.Assert(b.Length > 0); + Debugging.Assert(() => b.Length > 0); } } @@ -162,7 +162,7 @@ public long RamBytesUsed() /// public PagedBytes(int blockBits) { - Debug.Assert(blockBits > 0 && blockBits <= 31, blockBits.ToString()); + Debugging.Assert(() => blockBits > 0 && blockBits <= 31, blockBits.ToString); this.blockSize = 1 << blockBits; this.blockBits = blockBits; blockMask = blockSize - 1; @@ -222,7 +222,7 @@ public void Copy(BytesRef bytes, BytesRef @out) currentBlock = new byte[blockSize]; upto = 0; //left = blockSize; // LUCENENET: Unnecessary assignment - Debug.Assert(bytes.Length <= blockSize); + Debugging.Assert(() => bytes.Length <= blockSize); // TODO: we could also support variable block sizes } @@ -376,7 +376,7 @@ public override byte ReadByte() public override void ReadBytes(byte[] b, int offset, int len) { - Debug.Assert(b.Length >= offset + len); + Debugging.Assert(() => b.Length >= offset + len); int offsetEnd = offset + len; while (true) { @@ -432,7 +432,7 @@ public override void WriteByte(byte b) public override void WriteBytes(byte[] b, int offset, int length) { - Debug.Assert(b.Length >= offset + length); + Debugging.Assert(() => b.Length >= offset + length); if (length == 0) { return; diff --git a/src/Lucene.Net/Util/QueryBuilder.cs b/src/Lucene.Net/Util/QueryBuilder.cs index 9156909d8f..7ef3f13c38 100644 --- a/src/Lucene.Net/Util/QueryBuilder.cs +++ b/src/Lucene.Net/Util/QueryBuilder.cs @@ -1,8 +1,8 @@ using J2N.Collections.Generic.Extensions; using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; namespace Lucene.Net.Util @@ -186,7 +186,7 @@ public virtual bool EnablePositionIncrements /// Slop factor for phrase/multiphrase queries. protected Query CreateFieldQuery(Analyzer analyzer, Occur @operator, string field, string queryText, bool quoted, int phraseSlop) { - Debug.Assert(@operator == Occur.SHOULD || @operator == Occur.MUST); + Debugging.Assert(() => @operator == Occur.SHOULD || @operator == Occur.MUST); // Use the analyzer to get all the tokens, and then build a TermQuery, // PhraseQuery, or nothing based on the term count CachingTokenFilter buffer = null; @@ -263,7 +263,7 @@ protected Query CreateFieldQuery(Analyzer analyzer, Occur @operator, string fiel try { bool hasNext = buffer.IncrementToken(); - Debug.Assert(hasNext == true); + Debugging.Assert(() => hasNext == true); termAtt.FillBytesRef(); } catch (IOException) @@ -289,7 +289,7 @@ protected Query CreateFieldQuery(Analyzer analyzer, Occur @operator, string fiel try { bool hasNext = buffer.IncrementToken(); - Debug.Assert(hasNext == true); + Debugging.Assert(() => hasNext == true); termAtt.FillBytesRef(); } catch (IOException) @@ -311,7 +311,7 @@ protected Query CreateFieldQuery(Analyzer analyzer, Occur @operator, string fiel try { bool hasNext = buffer.IncrementToken(); - Debug.Assert(hasNext == true); + Debugging.Assert(() => hasNext == true); termAtt.FillBytesRef(); } catch (IOException) @@ -354,7 +354,7 @@ protected Query CreateFieldQuery(Analyzer analyzer, Occur @operator, string fiel try { bool hasNext = buffer.IncrementToken(); - Debug.Assert(hasNext == true); + Debugging.Assert(() => hasNext == true); termAtt.FillBytesRef(); if (posIncrAtt != null) { @@ -405,7 +405,7 @@ protected Query CreateFieldQuery(Analyzer analyzer, Occur @operator, string fiel try { bool hasNext = buffer.IncrementToken(); - Debug.Assert(hasNext == true); + Debugging.Assert(() => hasNext == true); termAtt.FillBytesRef(); if (posIncrAtt != null) { diff --git a/src/Lucene.Net/Util/RamUsageEstimator.cs b/src/Lucene.Net/Util/RamUsageEstimator.cs index f31d9ee154..493104b172 100644 --- a/src/Lucene.Net/Util/RamUsageEstimator.cs +++ b/src/Lucene.Net/Util/RamUsageEstimator.cs @@ -1,13 +1,13 @@ +using J2N.Numerics; using J2N.Runtime.CompilerServices; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Reflection; using System.Runtime.CompilerServices; -using J2N.Numerics; using JCG = J2N.Collections.Generic; namespace Lucene.Net.Util @@ -779,8 +779,8 @@ public IdentityHashSet(int initialCapacity, float loadFactor) { initialCapacity = Math.Max(MIN_CAPACITY, initialCapacity); - Debug.Assert(initialCapacity > 0, "Initial capacity must be between (0, " + int.MaxValue + "]."); - Debug.Assert(loadFactor > 0 && loadFactor < 1, "Load factor must be between (0, 1)."); + Debugging.Assert(() => initialCapacity > 0, () => "Initial capacity must be between (0, " + int.MaxValue + "]."); + Debugging.Assert(() => loadFactor > 0 && loadFactor < 1, () => "Load factor must be between (0, 1)."); this.LoadFactor = loadFactor; AllocateBuffers(RoundCapacity(initialCapacity)); } @@ -790,7 +790,7 @@ public IdentityHashSet(int initialCapacity, float loadFactor) /// public bool Add(KType e) { - Debug.Assert(e != null, "Null keys not allowed."); + Debugging.Assert(() => e != null, () => "Null keys not allowed."); if (Assigned >= resizeThreshold) { @@ -864,7 +864,7 @@ private void ExpandAndRehash() { object[] oldKeys = this.keys; - Debug.Assert(Assigned >= resizeThreshold); + Debugging.Assert(() => Assigned >= resizeThreshold); AllocateBuffers(NextCapacity(keys.Length)); /* @@ -903,8 +903,8 @@ private void AllocateBuffers(int capacity) /// private int NextCapacity(int current) // LUCENENET NOTE: made private, since protected is not valid in a sealed class { - Debug.Assert(current > 0 && ((current & (current - 1)) == 0), "Capacity must be a power of two."); - Debug.Assert((current << 1) > 0, "Maximum capacity exceeded (" + ((int)((uint)0x80000000 >> 1)) + ")."); + Debugging.Assert(() => current > 0 && ((current & (current - 1)) == 0), () => "Capacity must be a power of two."); + Debugging.Assert(() => (current << 1) > 0, () => "Maximum capacity exceeded (" + ((int)((uint)0x80000000 >> 1)) + ")."); if (current < MIN_CAPACITY / 2) { diff --git a/src/Lucene.Net/Util/RecyclingByteBlockAllocator.cs b/src/Lucene.Net/Util/RecyclingByteBlockAllocator.cs index 55e5ecc7a8..bd76164237 100644 --- a/src/Lucene.Net/Util/RecyclingByteBlockAllocator.cs +++ b/src/Lucene.Net/Util/RecyclingByteBlockAllocator.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; namespace Lucene.Net.Util { @@ -109,7 +109,7 @@ public override void RecycleByteBlocks(byte[][] blocks, int start, int end) blocks[i] = null; } bytesUsed.AddAndGet(-(end - stop) * m_blockSize); - Debug.Assert(bytesUsed.Get() >= 0); + Debugging.Assert(() => bytesUsed.Get() >= 0); } /// The number of currently buffered blocks. @@ -129,7 +129,7 @@ public override void RecycleByteBlocks(byte[][] blocks, int start, int end) /// The number of actually removed buffers. public int FreeBlocks(int num) { - Debug.Assert(num >= 0, "free blocks must be >= 0 but was: " + num); + Debugging.Assert(() => num >= 0, () => "free blocks must be >= 0 but was: " + num); int stop; int count; if (num > freeBlocks) @@ -147,7 +147,7 @@ public int FreeBlocks(int num) freeByteBlocks[--freeBlocks] = null; } bytesUsed.AddAndGet(-count * m_blockSize); - Debug.Assert(bytesUsed.Get() >= 0); + Debugging.Assert(() => bytesUsed.Get() >= 0); return count; } } diff --git a/src/Lucene.Net/Util/RecyclingIntBlockAllocator.cs b/src/Lucene.Net/Util/RecyclingIntBlockAllocator.cs index 541346a498..41fd4869db 100644 --- a/src/Lucene.Net/Util/RecyclingIntBlockAllocator.cs +++ b/src/Lucene.Net/Util/RecyclingIntBlockAllocator.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; namespace Lucene.Net.Util { @@ -120,7 +120,7 @@ public override void RecycleInt32Blocks(int[][] blocks, int start, int end) blocks[i] = null; } bytesUsed.AddAndGet(-(end - stop) * (m_blockSize * RamUsageEstimator.NUM_BYTES_INT32)); - Debug.Assert(bytesUsed.Get() >= 0); + Debugging.Assert(() => bytesUsed.Get() >= 0); } /// The number of currently buffered blocks. @@ -140,7 +140,7 @@ public override void RecycleInt32Blocks(int[][] blocks, int start, int end) /// The number of actually removed buffers. public int FreeBlocks(int num) { - Debug.Assert(num >= 0, "free blocks must be >= 0 but was: " + num); + Debugging.Assert(() => num >= 0, () => "free blocks must be >= 0 but was: " + num); int stop; int count; if (num > freeBlocks) @@ -158,7 +158,7 @@ public int FreeBlocks(int num) freeByteBlocks[--freeBlocks] = null; } bytesUsed.AddAndGet(-count * m_blockSize * RamUsageEstimator.NUM_BYTES_INT32); - Debug.Assert(bytesUsed.Get() >= 0); + Debugging.Assert(() => bytesUsed.Get() >= 0); return count; } } diff --git a/src/Lucene.Net/Util/RollingBuffer.cs b/src/Lucene.Net/Util/RollingBuffer.cs index c17d36453a..c6479cab2f 100644 --- a/src/Lucene.Net/Util/RollingBuffer.cs +++ b/src/Lucene.Net/Util/RollingBuffer.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; namespace Lucene.Net.Util { @@ -138,7 +138,7 @@ public virtual T Get(int pos) nextPos++; count++; } - Debug.Assert(InBounds(pos)); + Debugging.Assert(() => InBounds(pos)); int index = GetIndex(pos); return buffer[index]; } @@ -152,8 +152,8 @@ public virtual T Get(int pos) public virtual void FreeBefore(int pos) { int toFree = count - (nextPos - pos); - Debug.Assert(toFree >= 0); - Debug.Assert(toFree <= count, "toFree=" + toFree + " count=" + count); + Debugging.Assert(() => toFree >= 0); + Debugging.Assert(() => toFree <= count, () => "toFree=" + toFree + " count=" + count); int index = nextWrite - count; if (index < 0) { diff --git a/src/Lucene.Net/Util/SentinelIntSet.cs b/src/Lucene.Net/Util/SentinelIntSet.cs index a6847740d8..b1e4d7e804 100644 --- a/src/Lucene.Net/Util/SentinelIntSet.cs +++ b/src/Lucene.Net/Util/SentinelIntSet.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; using System.Diagnostics.CodeAnalysis; namespace Lucene.Net.Util @@ -114,7 +114,7 @@ public virtual int Hash(int key) /// (internal) Returns the slot for this key. public virtual int GetSlot(int key) { - Debug.Assert(key != EmptyVal); + Debugging.Assert(() => key != EmptyVal); int h = Hash(key); int s = h & (keys.Length - 1); if (keys[s] == key || keys[s] == EmptyVal) @@ -134,7 +134,7 @@ public virtual int GetSlot(int key) /// (internal) Returns the slot for this key, or -slot-1 if not found. public virtual int Find(int key) { - Debug.Assert(key != EmptyVal); + Debugging.Assert(() => key != EmptyVal); int h = Hash(key); int s = h & (keys.Length - 1); if (keys[s] == key) diff --git a/src/Lucene.Net/Util/Sorter.cs b/src/Lucene.Net/Util/Sorter.cs index 78f61d23da..6ce3f33eeb 100644 --- a/src/Lucene.Net/Util/Sorter.cs +++ b/src/Lucene.Net/Util/Sorter.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; namespace Lucene.Net.Util { @@ -185,7 +185,7 @@ internal void Reverse(int from, int to) internal void Rotate(int lo, int mid, int hi) { - Debug.Assert(lo <= mid && mid <= hi); + Debugging.Assert(() => lo <= mid && mid <= hi); if (lo == mid || mid == hi) { return; diff --git a/src/Lucene.Net/Util/TimSorter.cs b/src/Lucene.Net/Util/TimSorter.cs index a7bd40eafa..c4622d99b5 100644 --- a/src/Lucene.Net/Util/TimSorter.cs +++ b/src/Lucene.Net/Util/TimSorter.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Util @@ -67,7 +67,7 @@ protected TimSorter(int maxTempSlots) /// Minimum run length for an array of length . internal static int MinRun(int length) { - Debug.Assert(length >= MINRUN); + Debugging.Assert(() => length >= MINRUN); int n = length; int r = 0; while (n >= 64) @@ -76,7 +76,7 @@ internal static int MinRun(int length) n = (int)((uint)n >> 1); } int minRun = n + r; - Debug.Assert(minRun >= MINRUN && minRun <= THRESHOLD); + Debugging.Assert(() => minRun >= MINRUN && minRun <= THRESHOLD); return minRun; } @@ -91,7 +91,7 @@ internal virtual int RunBase(int i) return runEnds[stackSize - i - 1]; } - internal virtual int RunEnd(int i) + internal virtual int RunEnd(int i) // LUCENENET TODO: API - change to indexer { return runEnds[stackSize - i]; } @@ -114,7 +114,7 @@ internal virtual void PushRunLen(int len) internal virtual int NextRun() { int runBase = RunEnd(0); - Debug.Assert(runBase < to); + Debugging.Assert(() => runBase < to); if (runBase == to - 1) { return 1; @@ -198,7 +198,7 @@ internal virtual void Reset(int from, int to) internal virtual void MergeAt(int n) { - Debug.Assert(stackSize >= 2); + Debugging.Assert(() => stackSize >= 2); Merge(RunBase(n + 1), RunBase(n), RunEnd(n)); for (int j = n + 1; j > 0; --j) { @@ -249,7 +249,7 @@ public override void Sort(int from, int to) PushRunLen(NextRun()); } while (RunEnd(0) < to); ExhaustStack(); - Debug.Assert(RunEnd(0) == to); + Debugging.Assert(() => RunEnd(0) == to); } internal override void DoRotate(int lo, int mid, int hi) @@ -297,7 +297,7 @@ internal override void DoRotate(int lo, int mid, int hi) internal virtual void MergeLo(int lo, int mid, int hi) { - Debug.Assert(Compare(lo, mid) > 0); + Debugging.Assert(() => Compare(lo, mid) > 0); int len1 = mid - lo; Save(lo, len1); Copy(mid, lo); @@ -335,12 +335,12 @@ internal virtual void MergeLo(int lo, int mid, int hi) { Restore(i++, dest); } - Debug.Assert(j == dest); + Debugging.Assert(() => j == dest); } internal virtual void MergeHi(int lo, int mid, int hi) { - Debug.Assert(Compare(mid - 1, hi - 1) > 0); + Debugging.Assert(() => Compare(mid - 1, hi - 1) > 0); int len2 = hi - mid; Save(mid, len2); Copy(mid - 1, hi - 1); @@ -378,7 +378,7 @@ internal virtual void MergeHi(int lo, int mid, int hi) { Restore(j--, dest); } - Debug.Assert(i == dest); + Debugging.Assert(() => i == dest); } internal virtual int LowerSaved(int from, int to, int val) diff --git a/src/Lucene.Net/Util/UnicodeUtil.cs b/src/Lucene.Net/Util/UnicodeUtil.cs index 09865d49af..36e1086036 100644 --- a/src/Lucene.Net/Util/UnicodeUtil.cs +++ b/src/Lucene.Net/Util/UnicodeUtil.cs @@ -1,7 +1,7 @@ using J2N; using J2N.Text; +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; using System.Runtime.CompilerServices; using System.Text; @@ -830,7 +830,7 @@ public static void UTF8toUTF16(byte[] utf8, int offset, int length, CharsRef cha int b = utf8[offset++] & 0xff; if (b < 0xc0) { - Debug.Assert(b < 0x80); + Debugging.Assert(() => b < 0x80); @out[out_offset++] = (char)b; } else if (b < 0xe0) @@ -844,7 +844,7 @@ public static void UTF8toUTF16(byte[] utf8, int offset, int length, CharsRef cha } else { - Debug.Assert(b < 0xf8, "b = 0x" + b.ToString("x")); + Debugging.Assert(() => b < 0xf8, () => "b = 0x" + b.ToString("x")); int ch = ((b & 0x7) << 18) + ((utf8[offset] & 0x3f) << 12) + ((utf8[offset + 1] & 0x3f) << 6) + (utf8[offset + 2] & 0x3f); offset += 3; if (ch < UNI_MAX_BMP) diff --git a/src/Lucene.Net/Util/WAH8DocIdSet.cs b/src/Lucene.Net/Util/WAH8DocIdSet.cs index 0c3da7730d..82a81feaaf 100644 --- a/src/Lucene.Net/Util/WAH8DocIdSet.cs +++ b/src/Lucene.Net/Util/WAH8DocIdSet.cs @@ -1,7 +1,7 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Linq; @@ -154,7 +154,7 @@ public static WAH8DocIdSet Intersect(ICollection docIdSets, int in wordNum = iterators[i].wordNum; goto mainContinue; } - Debug.Assert(iterators[i].wordNum == wordNum); + Debugging.Assert(() => iterators[i].wordNum == wordNum); word &= iterators[i].word; if (word == 0) { @@ -164,7 +164,7 @@ public static WAH8DocIdSet Intersect(ICollection docIdSets, int in } } // Found a common word - Debug.Assert(word != 0); + Debugging.Assert(() => word != 0); builder.AddWord(wordNum, word); ++wordNum; mainContinue:; @@ -250,7 +250,7 @@ protected internal override bool LessThan(Iterator a, Iterator b) internal static int WordNum(int docID) { - Debug.Assert(docID >= 0); + Debugging.Assert(() => docID >= 0); return (int)((uint)docID >> 3); } @@ -300,8 +300,8 @@ public virtual object SetIndexInterval(int indexInterval) internal virtual void WriteHeader(bool reverse, int cleanLength, int dirtyLength) { int cleanLengthMinus2 = cleanLength - 2; - Debug.Assert(cleanLengthMinus2 >= 0); - Debug.Assert(dirtyLength >= 0); + Debugging.Assert(() => cleanLengthMinus2 >= 0); + Debugging.Assert(() => dirtyLength >= 0); int token = ((cleanLengthMinus2 & 0x03) << 4) | (dirtyLength & 0x07); if (reverse) { @@ -330,15 +330,15 @@ private bool SequenceIsConsistent() { for (int i = 1; i < dirtyWords.Length; ++i) { - Debug.Assert(dirtyWords.Bytes[i - 1] != 0 || dirtyWords.Bytes[i] != 0); - Debug.Assert((byte)dirtyWords.Bytes[i - 1] != 0xFF || (byte)dirtyWords.Bytes[i] != 0xFF); + Debugging.Assert(() => dirtyWords.Bytes[i - 1] != 0 || dirtyWords.Bytes[i] != 0); + Debugging.Assert(() => (byte)dirtyWords.Bytes[i - 1] != 0xFF || (byte)dirtyWords.Bytes[i] != 0xFF); } return true; } internal virtual void WriteSequence() { - Debug.Assert(SequenceIsConsistent()); + Debugging.Assert(SequenceIsConsistent); try { WriteHeader(reverse, clean, dirtyWords.Length); @@ -354,8 +354,8 @@ internal virtual void WriteSequence() internal virtual void AddWord(int wordNum, byte word) { - Debug.Assert(wordNum > lastWordNum); - Debug.Assert(word != 0); + Debugging.Assert(() => wordNum > lastWordNum); + Debugging.Assert(() => word != 0); if (!reverse) { @@ -397,7 +397,7 @@ internal virtual void AddWord(int wordNum, byte word) } else { - Debug.Assert(lastWordNum >= 0); + Debugging.Assert(() => lastWordNum >= 0); switch (wordNum - lastWordNum) { case 1: @@ -447,7 +447,7 @@ public virtual WAH8DocIdSet Build() { if (cardinality == 0) { - Debug.Assert(lastWordNum == -1); + Debugging.Assert(() => lastWordNum == -1); return EMPTY; } WriteSequence(); @@ -470,15 +470,15 @@ public virtual WAH8DocIdSet Build() positions.Add(0L); wordNums.Add(0L); Iterator it = new Iterator(data, cardinality, int.MaxValue, SINGLE_ZERO_BUFFER, SINGLE_ZERO_BUFFER); - Debug.Assert(it.@in.Position == 0); - Debug.Assert(it.wordNum == -1); + Debugging.Assert(() => it.@in.Position == 0); + Debugging.Assert(() => it.wordNum == -1); for (int i = 1; i < valueCount; ++i) { // skip indexInterval sequences for (int j = 0; j < indexInterval; ++j) { bool readSequence = it.ReadSequence(); - Debug.Assert(readSequence); + Debugging.Assert(() => readSequence); it.SkipDirtyBytes(); } int position = it.@in.Position; @@ -678,15 +678,15 @@ internal virtual bool ReadSequence() allOnesLength = ReadCleanLength(@in, token); } dirtyLength = ReadDirtyLength(@in, token); - Debug.Assert(@in.Length - @in.Position >= dirtyLength, @in.Position + " " + @in.Length + " " + dirtyLength); + Debugging.Assert(() => @in.Length - @in.Position >= dirtyLength, () => @in.Position + " " + @in.Length + " " + dirtyLength); ++sequenceNum; return true; } internal virtual void SkipDirtyBytes(int count) { - Debug.Assert(count >= 0); - Debug.Assert(count <= allOnesLength + dirtyLength); + Debugging.Assert(() => count >= 0); + Debugging.Assert(() => count <= allOnesLength + dirtyLength); wordNum += count; if (count <= allOnesLength) { @@ -732,7 +732,7 @@ internal virtual void NextWord() word = @in.ReadByte(); ++wordNum; --dirtyLength; - Debug.Assert(word != 0); // never more than one consecutive 0 + Debugging.Assert(() => word != 0); // never more than one consecutive 0 return; } } @@ -747,8 +747,8 @@ internal virtual int ForwardBinarySearch(int targetWordNum) // advance forward and double the window at each step int indexSize = (int)wordNums.Count; int lo = sequenceNum / indexInterval, hi = lo + 1; - Debug.Assert(sequenceNum == -1 || wordNums.Get(lo) <= wordNum); - Debug.Assert(lo + 1 == wordNums.Count || wordNums.Get(lo + 1) > wordNum); + Debugging.Assert(() => sequenceNum == -1 || wordNums.Get(lo) <= wordNum); + Debugging.Assert(() => lo + 1 == wordNums.Count || wordNums.Get(lo + 1) > wordNum); while (true) { if (hi >= indexSize) @@ -779,14 +779,14 @@ internal virtual int ForwardBinarySearch(int targetWordNum) hi = mid - 1; } } - Debug.Assert(wordNums.Get(hi) <= targetWordNum); - Debug.Assert(hi + 1 == wordNums.Count || wordNums.Get(hi + 1) > targetWordNum); + Debugging.Assert(() => wordNums.Get(hi) <= targetWordNum); + Debugging.Assert(() => hi + 1 == wordNums.Count || wordNums.Get(hi + 1) > targetWordNum); return hi; } internal virtual void AdvanceWord(int targetWordNum) { - Debug.Assert(targetWordNum > wordNum); + Debugging.Assert(() => targetWordNum > wordNum); int delta = targetWordNum - wordNum; if (delta <= allOnesLength + dirtyLength + 1) { @@ -795,7 +795,7 @@ internal virtual void AdvanceWord(int targetWordNum) else { SkipDirtyBytes(); - Debug.Assert(dirtyLength == 0); + Debugging.Assert(() => dirtyLength == 0); if (delta > indexThreshold) { // use the index @@ -847,7 +847,7 @@ public override int NextDoc() return docID = NO_MORE_DOCS; } bitList = BitUtil.BitList(word); - Debug.Assert(bitList != 0); + Debugging.Assert(() => bitList != 0); docID = (wordNum << 3) | ((bitList & 0x0F) - 1); bitList = (int)((uint)bitList >> 4); return docID; @@ -855,7 +855,7 @@ public override int NextDoc() public override int Advance(int target) { - Debug.Assert(target > docID); + Debugging.Assert(() => target > docID); int targetWordNum = WordNum(target); if (targetWordNum > this.wordNum) { From c7610556c163423323b7a6422c01fe5002ae347c Mon Sep 17 00:00:00 2001 From: Shad Storhaug Date: Thu, 13 Aug 2020 23:09:25 +0700 Subject: [PATCH 04/13] Converted test framework to use Lucene.Net.Diagnostics.Debugging.Assert() instead of Lucene.Net.Diagnostics.Debug.Assert() --- .../Analysis/LookaheadTokenFilter.cs | 24 +- .../Analysis/MockCharFilter.cs | 4 +- .../Analysis/MockReaderWrapper.cs | 8 +- .../Analysis/MockTokenizer.cs | 22 +- .../Asserting/AssertingDocValuesFormat.cs | 84 +++---- .../Codecs/Asserting/AssertingNormsFormat.cs | 8 +- .../Asserting/AssertingPostingsFormat.cs | 70 +++--- .../Asserting/AssertingStoredFieldsFormat.cs | 22 +- .../Asserting/AssertingTermVectorsFormat.cs | 48 ++-- .../Dummy/DummyCompressingCodec.cs | 4 +- .../Lucene3x/PreFlexRWFieldInfosWriter.cs | 6 +- .../Codecs/Lucene3x/PreFlexRWFieldsWriter.cs | 12 +- .../Codecs/Lucene3x/PreFlexRWNormsConsumer.cs | 4 +- .../Lucene3x/PreFlexRWStoredFieldsWriter.cs | 6 +- .../Lucene3x/PreFlexRWTermVectorsWriter.cs | 8 +- .../Codecs/Lucene3x/TermInfosWriter.cs | 14 +- .../Lucene40/Lucene40DocValuesWriter.cs | 18 +- .../Lucene40/Lucene40FieldInfosWriter.cs | 10 +- .../Codecs/Lucene40/Lucene40PostingsWriter.cs | 18 +- .../Codecs/Lucene40/Lucene40SkipListWriter.cs | 10 +- .../Lucene42/Lucene42DocValuesConsumer.cs | 4 +- .../Lucene42/Lucene42FieldInfosWriter.cs | 6 +- .../MockVariableIntBlockPostingsFormat.cs | 4 +- .../MockRandom/MockRandomPostingsFormat.cs | 4 +- .../Codecs/RAMOnly/RAMOnlyPostingsFormat.cs | 12 +- .../Index/AllDeletedFilterReader.cs | 4 +- .../Index/AssertingAtomicReader.cs | 224 +++++++++--------- .../Index/BaseDocValuesFormatTestCase.cs | 66 +++--- .../Index/BasePostingsFormatTestCase.cs | 4 +- .../Index/MockRandomMergePolicy.cs | 4 +- .../Index/RandomCodec.cs | 6 +- .../RandomDocumentsWriterPerThreadPool.cs | 12 +- .../Index/RandomIndexWriter.cs | 4 +- .../ThreadedIndexingAndSearchingTestCase.cs | 6 +- .../Search/AssertingBulkScorer.cs | 4 +- .../Search/AssertingCollector.cs | 4 +- .../Search/AssertingScorer.cs | 10 +- .../Search/QueryUtils.cs | 6 +- .../Search/RandomSimilarityProvider.cs | 4 +- .../Search/ShardSearchingTestBase.cs | 22 +- .../Store/MockDirectoryWrapper.cs | 12 +- .../Support/Diagnostics/Debug.cs | 46 ---- .../JavaCompatibility/LuceneTestCase.cs | 4 +- .../Util/Automaton/AutomatonTestUtil.cs | 6 +- .../Util/BaseDocIdSetTestCase.cs | 4 +- .../Util/FailOnNonBulkMergesInfoStream.cs | 4 +- .../Util/Fst/FSTTester.cs | 10 +- .../Util/LuceneTestCase.cs | 16 +- .../Util/NullInfoStream.cs | 8 +- .../Util/TestRuleAssertionsRequired.cs | 2 +- .../Util/TestRuleSetupAndRestoreClassEnv.cs | 16 +- .../Util/ThrottledIndexOutput.cs | 4 +- 52 files changed, 448 insertions(+), 494 deletions(-) delete mode 100644 src/Lucene.Net.TestFramework/Support/Diagnostics/Debug.cs diff --git a/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs b/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs index 262356c864..67d081e0c8 100644 --- a/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs +++ b/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs @@ -1,8 +1,8 @@ using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Util; using System.Collections.Generic; using Console = Lucene.Net.Util.SystemConsole; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Analysis { @@ -78,7 +78,7 @@ public virtual void Add(AttributeSource.State state) public virtual AttributeSource.State NextState() { - Debug.Assert(NextRead < InputTokens.Count); + Debugging.Assert(() => NextRead < InputTokens.Count); return InputTokens[NextRead++]; } } @@ -141,7 +141,7 @@ protected virtual void InsertToken() m_positions.Get(m_inputPos).Add(CaptureState()); tokenPending = false; } - Debug.Assert(!insertPending); + Debugging.Assert(() => !insertPending); insertPending = true; } @@ -184,8 +184,8 @@ protected virtual bool PeekToken() { Console.WriteLine("LTF.peekToken inputPos=" + m_inputPos + " outputPos=" + m_outputPos + " tokenPending=" + tokenPending); } - Debug.Assert(!m_end); - Debug.Assert(m_inputPos == -1 || m_outputPos <= m_inputPos); + Debugging.Assert(() => !m_end); + Debugging.Assert(() => m_inputPos == -1 || m_outputPos <= m_inputPos); if (tokenPending) { m_positions.Get(m_inputPos).Add(CaptureState()); @@ -199,7 +199,7 @@ protected virtual bool PeekToken() if (gotToken) { m_inputPos += m_posIncAtt.PositionIncrement; - Debug.Assert(m_inputPos >= 0); + Debugging.Assert(() => m_inputPos >= 0); if (DEBUG) { Console.WriteLine(" now inputPos=" + m_inputPos); @@ -216,7 +216,7 @@ protected virtual bool PeekToken() else { // Make sure our input isn't messing up offsets: - Debug.Assert(startPosData.StartOffset == startOffset, "prev startOffset=" + startPosData.StartOffset + " vs new startOffset=" + startOffset + " inputPos=" + m_inputPos); + Debugging.Assert(() => startPosData.StartOffset == startOffset, () => "prev startOffset=" + startPosData.StartOffset + " vs new startOffset=" + startOffset + " inputPos=" + m_inputPos); } int endOffset = m_offsetAtt.EndOffset; @@ -227,7 +227,7 @@ protected virtual bool PeekToken() else { // Make sure our input isn't messing up offsets: - Debug.Assert(endPosData.EndOffset == endOffset, "prev endOffset=" + endPosData.EndOffset + " vs new endOffset=" + endOffset + " inputPos=" + m_inputPos); + Debugging.Assert(() => endPosData.EndOffset == endOffset, () => "prev endOffset=" + endPosData.EndOffset + " vs new endOffset=" + endOffset + " inputPos=" + m_inputPos); } tokenPending = true; @@ -314,7 +314,7 @@ protected virtual bool NextToken() { Console.WriteLine(" return inserted token"); } - Debug.Assert(InsertedTokenConsistent()); + Debugging.Assert(InsertedTokenConsistent); insertPending = false; return true; } @@ -340,7 +340,7 @@ protected virtual bool NextToken() { Console.WriteLine(" return inserted token"); } - Debug.Assert(InsertedTokenConsistent()); + Debugging.Assert(InsertedTokenConsistent); insertPending = false; return true; } @@ -364,8 +364,8 @@ private bool InsertedTokenConsistent() { int posLen = m_posLenAtt.PositionLength; Position endPosData = m_positions.Get(m_outputPos + posLen); - Debug.Assert(endPosData.EndOffset != -1); - Debug.Assert(m_offsetAtt.EndOffset == endPosData.EndOffset, "offsetAtt.endOffset=" + m_offsetAtt.EndOffset + " vs expected=" + endPosData.EndOffset); + Debugging.Assert(() => endPosData.EndOffset != -1); + Debugging.Assert(() => m_offsetAtt.EndOffset == endPosData.EndOffset, () => "offsetAtt.endOffset=" + m_offsetAtt.EndOffset + " vs expected=" + endPosData.EndOffset); return true; } diff --git a/src/Lucene.Net.TestFramework/Analysis/MockCharFilter.cs b/src/Lucene.Net.TestFramework/Analysis/MockCharFilter.cs index c4ad5f5cbc..5f15220d5d 100644 --- a/src/Lucene.Net.TestFramework/Analysis/MockCharFilter.cs +++ b/src/Lucene.Net.TestFramework/Analysis/MockCharFilter.cs @@ -1,8 +1,8 @@ +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.IO; using JCG = J2N.Collections.Generic; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Analysis { @@ -112,7 +112,7 @@ protected override int Correct(int currentOff) ret = currentOff; } - Debug.Assert(ret >= 0, "currentOff=" + currentOff + ",diff=" + (ret - currentOff)); + Debugging.Assert(() => ret >= 0, () => "currentOff=" + currentOff + ",diff=" + (ret - currentOff)); return ret; } diff --git a/src/Lucene.Net.TestFramework/Analysis/MockReaderWrapper.cs b/src/Lucene.Net.TestFramework/Analysis/MockReaderWrapper.cs index c16af6c395..51e9e52bc1 100644 --- a/src/Lucene.Net.TestFramework/Analysis/MockReaderWrapper.cs +++ b/src/Lucene.Net.TestFramework/Analysis/MockReaderWrapper.cs @@ -1,7 +1,7 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Util; using System; using System.IO; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Analysis { @@ -47,7 +47,7 @@ public virtual void ThrowExcAfterChar(int charUpto) { excAtChar = charUpto; // You should only call this on init!: - Debug.Assert(0 == readSoFar); + Debugging.Assert(() => 0 == readSoFar); } public virtual void ThrowExcNext() @@ -91,10 +91,10 @@ public override int Read(char[] cbuf, int off, int len) if (excAtChar != -1) { int left = excAtChar - readSoFar; - Debug.Assert(left != 0); + Debugging.Assert(() => left != 0); read = input.Read(cbuf, off, Math.Min(realLen, left)); //Characters are left - Debug.Assert(read != 0); + Debugging.Assert(() => read != 0); readSoFar += read; } else diff --git a/src/Lucene.Net.TestFramework/Analysis/MockTokenizer.cs b/src/Lucene.Net.TestFramework/Analysis/MockTokenizer.cs index 4be7f18c5c..f2c076aeb8 100644 --- a/src/Lucene.Net.TestFramework/Analysis/MockTokenizer.cs +++ b/src/Lucene.Net.TestFramework/Analysis/MockTokenizer.cs @@ -1,13 +1,13 @@ using J2N; using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Util; using System; +using System.Globalization; using System.IO; +using Assert = Lucene.Net.TestFramework.Assert; using CharacterRunAutomaton = Lucene.Net.Util.Automaton.CharacterRunAutomaton; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! using RegExp = Lucene.Net.Util.Automaton.RegExp; -using Assert = Lucene.Net.TestFramework.Assert; -using System.Globalization; namespace Lucene.Net.Analysis { @@ -140,7 +140,7 @@ public MockTokenizer(AttributeFactory factory, TextReader input) public sealed override bool IncrementToken() { - Debug.Assert(!enableChecks || (streamState == State.RESET || streamState == State.INCREMENT), "IncrementToken() called while in wrong state: " + streamState); + Debugging.Assert(() => !enableChecks || (streamState == State.RESET || streamState == State.INCREMENT), () => "IncrementToken() called while in wrong state: " + streamState); ClearAttributes(); for (; ; ) { @@ -219,7 +219,7 @@ protected virtual int ReadCodePoint() } else { - Debug.Assert(!char.IsLowSurrogate((char)ch), "unpaired low surrogate: " + ch.ToString("x")); + Debugging.Assert(() => !char.IsLowSurrogate((char)ch), () => "unpaired low surrogate: " + ch.ToString("x")); off++; if (char.IsHighSurrogate((char)ch)) { @@ -227,12 +227,12 @@ protected virtual int ReadCodePoint() if (ch2 >= 0) { off++; - Debug.Assert(char.IsLowSurrogate((char)ch2), "unpaired high surrogate: " + ch.ToString("x") + ", followed by: " + ch2.ToString("x")); + Debugging.Assert(() => char.IsLowSurrogate((char)ch2), () => "unpaired high surrogate: " + ch.ToString("x") + ", followed by: " + ch2.ToString("x")); return Character.ToCodePoint((char)ch, (char)ch2); } else { - Debug.Assert(false, "stream ends with unpaired high surrogate: " + ch.ToString("x")); + Debugging.Assert(() => false, () => "stream ends with unpaired high surrogate: " + ch.ToString("x")); } } return ch; @@ -300,7 +300,7 @@ public override void Reset() state = runAutomaton.InitialState; lastOffset = off = 0; bufferedCodePoint = -1; - Debug.Assert(!enableChecks || streamState != State.RESET, "Double Reset()"); + Debugging.Assert(() => !enableChecks || streamState != State.RESET, () => "Double Reset()"); streamState = State.RESET; } @@ -312,14 +312,14 @@ protected override void Dispose(bool disposing) // in some exceptional cases (e.g. TestIndexWriterExceptions) a test can prematurely close() // these tests should disable this check, by default we check the normal workflow. // TODO: investigate the CachingTokenFilter "double-close"... for now we ignore this - Debug.Assert(!enableChecks || streamState == State.END || streamState == State.CLOSE, "Dispose() called in wrong state: " + streamState); + Debugging.Assert(() => !enableChecks || streamState == State.END || streamState == State.CLOSE, () => "Dispose() called in wrong state: " + streamState); streamState = State.CLOSE; } } internal override bool SetReaderTestPoint() { - Debug.Assert(!enableChecks || streamState == State.CLOSE, "SetReader() called in wrong state: " + streamState); + Debugging.Assert(() => !enableChecks || streamState == State.CLOSE, () => "SetReader() called in wrong state: " + streamState); streamState = State.SETREADER; return true; } @@ -333,7 +333,7 @@ public override void End() // these tests should disable this check (in general you should consume the entire stream) try { - Debug.Assert(!enableChecks || streamState == State.INCREMENT_FALSE, "End() called before IncrementToken() returned false!"); + Debugging.Assert(() => !enableChecks || streamState == State.INCREMENT_FALSE, () => "End() called before IncrementToken() returned false!"); } finally { diff --git a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingDocValuesFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingDocValuesFormat.cs index f63298d5fc..6cad1fc7ca 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingDocValuesFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingDocValuesFormat.cs @@ -1,9 +1,9 @@ using Lucene.Net.Codecs.Lucene45; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Util; using System; using System.Collections.Generic; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Codecs.Asserting { @@ -40,15 +40,15 @@ public AssertingDocValuesFormat() public override DocValuesConsumer FieldsConsumer(SegmentWriteState state) { DocValuesConsumer consumer = @in.FieldsConsumer(state); - Debug.Assert(consumer != null); + Debugging.Assert(() => consumer != null); return new AssertingDocValuesConsumer(consumer, state.SegmentInfo.DocCount); } public override DocValuesProducer FieldsProducer(SegmentReadState state) { - Debug.Assert(state.FieldInfos.HasDocValues); + Debugging.Assert(() => state.FieldInfos.HasDocValues); DocValuesProducer producer = @in.FieldsProducer(state); - Debug.Assert(producer != null); + Debugging.Assert(() => producer != null); return new AssertingDocValuesProducer(producer, state.SegmentInfo.DocCount); } @@ -70,7 +70,7 @@ public override void AddNumericField(FieldInfo field, IEnumerable values) { count++; } - Debug.Assert(count == maxDoc); + Debugging.Assert(() => count == maxDoc); CheckIterator(values.GetEnumerator(), maxDoc, true); @in.AddNumericField(field, values); } @@ -80,10 +80,10 @@ public override void AddBinaryField(FieldInfo field, IEnumerable value int count = 0; foreach (BytesRef b in values) { - Debug.Assert(b == null || b.IsValid()); + Debugging.Assert(() => b == null || b.IsValid()); count++; } - Debug.Assert(count == maxDoc); + Debugging.Assert(() => count == maxDoc); CheckIterator(values.GetEnumerator(), maxDoc, true); @in.AddBinaryField(field, values); } @@ -94,25 +94,25 @@ public override void AddSortedField(FieldInfo field, IEnumerable value BytesRef lastValue = null; foreach (BytesRef b in values) { - Debug.Assert(b != null); - Debug.Assert(b.IsValid()); + Debugging.Assert(() => b != null); + Debugging.Assert(() => b.IsValid()); if (valueCount > 0) { - Debug.Assert(b.CompareTo(lastValue) > 0); + Debugging.Assert(() => b.CompareTo(lastValue) > 0); } lastValue = BytesRef.DeepCopyOf(b); valueCount++; } - Debug.Assert(valueCount <= maxDoc); + Debugging.Assert(() => valueCount <= maxDoc); FixedBitSet seenOrds = new FixedBitSet(valueCount); int count = 0; foreach (long? v in docToOrd) { - Debug.Assert(v != null); + Debugging.Assert(() => v != null); int ord = (int)v.Value; - Debug.Assert(ord >= -1 && ord < valueCount); + Debugging.Assert(() => ord >= -1 && ord < valueCount); if (ord >= 0) { seenOrds.Set(ord); @@ -120,8 +120,8 @@ public override void AddSortedField(FieldInfo field, IEnumerable value count++; } - Debug.Assert(count == maxDoc); - Debug.Assert(seenOrds.Cardinality() == valueCount); + Debugging.Assert(() => count == maxDoc); + Debugging.Assert(() => seenOrds.Cardinality() == valueCount); CheckIterator(values.GetEnumerator(), valueCount, false); CheckIterator(docToOrd.GetEnumerator(), maxDoc, false); @in.AddSortedField(field, values, docToOrd); @@ -133,11 +133,11 @@ public override void AddSortedSetField(FieldInfo field, IEnumerable va BytesRef lastValue = null; foreach (BytesRef b in values) { - Debug.Assert(b != null); - Debug.Assert(b.IsValid()); + Debugging.Assert(() => b != null); + Debugging.Assert(() => b.IsValid()); if (valueCount > 0) { - Debug.Assert(b.CompareTo(lastValue) > 0); + Debugging.Assert(() => b.CompareTo(lastValue) > 0); } lastValue = BytesRef.DeepCopyOf(b); valueCount++; @@ -150,9 +150,9 @@ public override void AddSortedSetField(FieldInfo field, IEnumerable va { foreach (long? v in docToOrdCount) { - Debug.Assert(v != null); + Debugging.Assert(() => v != null); int count = (int)v.Value; - Debug.Assert(count >= 0); + Debugging.Assert(() => count >= 0); docCount++; ordCount += count; @@ -161,18 +161,18 @@ public override void AddSortedSetField(FieldInfo field, IEnumerable va { ordIterator.MoveNext(); long? o = ordIterator.Current; - Debug.Assert(o != null); + Debugging.Assert(() => o != null); long ord = o.Value; - Debug.Assert(ord >= 0 && ord < valueCount); - Debug.Assert(ord > lastOrd, "ord=" + ord + ",lastOrd=" + lastOrd); + Debugging.Assert(() => ord >= 0 && ord < valueCount); + Debugging.Assert(() => ord > lastOrd, () => "ord=" + ord + ",lastOrd=" + lastOrd); seenOrds.Set(ord); lastOrd = ord; } } - Debug.Assert(ordIterator.MoveNext() == false); + Debugging.Assert(() => ordIterator.MoveNext() == false); - Debug.Assert(docCount == maxDoc); - Debug.Assert(seenOrds.Cardinality() == valueCount); + Debugging.Assert(() => docCount == maxDoc); + Debugging.Assert(() => seenOrds.Cardinality() == valueCount); CheckIterator(values.GetEnumerator(), valueCount, false); CheckIterator(docToOrdCount.GetEnumerator(), maxDoc, false); CheckIterator(ords.GetEnumerator(), ordCount, false); @@ -203,10 +203,10 @@ public override void AddNumericField(FieldInfo field, IEnumerable values) int count = 0; foreach (long? v in values) { - Debug.Assert(v != null); + Debugging.Assert(() => v != null); count++; } - Debug.Assert(count == maxDoc); + Debugging.Assert(() => count == maxDoc); CheckIterator(values.GetEnumerator(), maxDoc, false); @in.AddNumericField(field, values); } @@ -240,9 +240,9 @@ private static void CheckIterator(IEnumerator iterator, long expectedSize, for (long i = 0; i < expectedSize; i++) { bool hasNext = iterator.MoveNext(); - Debug.Assert(hasNext); + Debugging.Assert(() => hasNext); T v = iterator.Current; - Debug.Assert(allowNull || v != null); + Debugging.Assert(() => allowNull || v != null); // LUCENE.NET specific. removed call to Reset(). //try @@ -255,7 +255,7 @@ private static void CheckIterator(IEnumerator iterator, long expectedSize, // // ok //} } - Debug.Assert(!iterator.MoveNext()); + Debugging.Assert(() => !iterator.MoveNext()); /*try { //iterator.next(); @@ -285,42 +285,42 @@ internal AssertingDocValuesProducer(DocValuesProducer @in, int maxDoc) public override NumericDocValues GetNumeric(FieldInfo field) { - Debug.Assert(field.DocValuesType == DocValuesType.NUMERIC || field.NormType == DocValuesType.NUMERIC); + Debugging.Assert(() => field.DocValuesType == DocValuesType.NUMERIC || field.NormType == DocValuesType.NUMERIC); NumericDocValues values = @in.GetNumeric(field); - Debug.Assert(values != null); + Debugging.Assert(() => values != null); return new AssertingNumericDocValues(values, maxDoc); } public override BinaryDocValues GetBinary(FieldInfo field) { - Debug.Assert(field.DocValuesType == DocValuesType.BINARY); + Debugging.Assert(() => field.DocValuesType == DocValuesType.BINARY); BinaryDocValues values = @in.GetBinary(field); - Debug.Assert(values != null); + Debugging.Assert(() => values != null); return new AssertingBinaryDocValues(values, maxDoc); } public override SortedDocValues GetSorted(FieldInfo field) { - Debug.Assert(field.DocValuesType == DocValuesType.SORTED); + Debugging.Assert(() => field.DocValuesType == DocValuesType.SORTED); SortedDocValues values = @in.GetSorted(field); - Debug.Assert(values != null); + Debugging.Assert(() => values != null); return new AssertingSortedDocValues(values, maxDoc); } public override SortedSetDocValues GetSortedSet(FieldInfo field) { - Debug.Assert(field.DocValuesType == DocValuesType.SORTED_SET); + Debugging.Assert(() => field.DocValuesType == DocValuesType.SORTED_SET); SortedSetDocValues values = @in.GetSortedSet(field); - Debug.Assert(values != null); + Debugging.Assert(() => values != null); return new AssertingSortedSetDocValues(values, maxDoc); } public override IBits GetDocsWithField(FieldInfo field) { - Debug.Assert(field.DocValuesType != DocValuesType.NONE); + Debugging.Assert(() => field.DocValuesType != DocValuesType.NONE); IBits bits = @in.GetDocsWithField(field); - Debug.Assert(bits != null); - Debug.Assert(bits.Length == maxDoc); + Debugging.Assert(() => bits != null); + Debugging.Assert(() => bits.Length == maxDoc); return new AssertingBits(bits); } diff --git a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingNormsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingNormsFormat.cs index 28f822a3af..87efcbaad2 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingNormsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingNormsFormat.cs @@ -1,7 +1,7 @@ using Lucene.Net.Codecs.Lucene42; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using static Lucene.Net.Codecs.Asserting.AssertingDocValuesFormat; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Codecs.Asserting { @@ -32,15 +32,15 @@ public class AssertingNormsFormat : NormsFormat public override DocValuesConsumer NormsConsumer(SegmentWriteState state) { DocValuesConsumer consumer = @in.NormsConsumer(state); - Debug.Assert(consumer != null); + Debugging.Assert(() => consumer != null); return new AssertingNormsConsumer(consumer, state.SegmentInfo.DocCount); } public override DocValuesProducer NormsProducer(SegmentReadState state) { - Debug.Assert(state.FieldInfos.HasNorms); + Debugging.Assert(() => state.FieldInfos.HasNorms); DocValuesProducer producer = @in.NormsProducer(state); - Debug.Assert(producer != null); + Debugging.Assert(() => producer != null); return new AssertingDocValuesProducer(producer, state.SegmentInfo.DocCount); } } diff --git a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingPostingsFormat.cs index d12c609e61..cc99fd3503 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingPostingsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingPostingsFormat.cs @@ -1,9 +1,9 @@ using Lucene.Net.Codecs.Lucene41; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Util; using System; using System.Collections.Generic; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Codecs.Asserting { @@ -67,7 +67,7 @@ protected override void Dispose(bool disposing) public override IEnumerator GetEnumerator() { IEnumerator iterator = @in.GetEnumerator(); - Debug.Assert(iterator != null); + Debugging.Assert(() => iterator != null); return iterator; } @@ -105,7 +105,7 @@ internal AssertingFieldsConsumer(FieldsConsumer @in) public override TermsConsumer AddField(FieldInfo field) { TermsConsumer consumer = @in.AddField(field); - Debug.Assert(consumer != null); + Debugging.Assert(() => consumer != null); return new AssertingTermsConsumer(consumer, field); } @@ -144,28 +144,28 @@ internal AssertingTermsConsumer(TermsConsumer @in, FieldInfo fieldInfo) public override PostingsConsumer StartTerm(BytesRef text) { - Debug.Assert(state == TermsConsumerState.INITIAL || state == TermsConsumerState.START && lastPostingsConsumer.docFreq == 0); + Debugging.Assert(() => state == TermsConsumerState.INITIAL || state == TermsConsumerState.START && lastPostingsConsumer.docFreq == 0); state = TermsConsumerState.START; - Debug.Assert(lastTerm == null || @in.Comparer.Compare(text, lastTerm) > 0); + Debugging.Assert(() => lastTerm == null || @in.Comparer.Compare(text, lastTerm) > 0); lastTerm = BytesRef.DeepCopyOf(text); return lastPostingsConsumer = new AssertingPostingsConsumer(@in.StartTerm(text), fieldInfo, visitedDocs); } public override void FinishTerm(BytesRef text, TermStats stats) { - Debug.Assert(state == TermsConsumerState.START); + Debugging.Assert(() => state == TermsConsumerState.START); state = TermsConsumerState.INITIAL; - Debug.Assert(text.Equals(lastTerm)); - Debug.Assert(stats.DocFreq > 0); // otherwise, this method should not be called. - Debug.Assert(stats.DocFreq == lastPostingsConsumer.docFreq); + Debugging.Assert(() => text.Equals(lastTerm)); + Debugging.Assert(() => stats.DocFreq > 0); // otherwise, this method should not be called. + Debugging.Assert(() => stats.DocFreq == lastPostingsConsumer.docFreq); sumDocFreq += stats.DocFreq; if (fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY) { - Debug.Assert(stats.TotalTermFreq == -1); + Debugging.Assert(() => stats.TotalTermFreq == -1); } else { - Debug.Assert(stats.TotalTermFreq == lastPostingsConsumer.totalTermFreq); + Debugging.Assert(() => stats.TotalTermFreq == lastPostingsConsumer.totalTermFreq); sumTotalTermFreq += stats.TotalTermFreq; } @in.FinishTerm(text, stats); @@ -173,20 +173,20 @@ public override void FinishTerm(BytesRef text, TermStats stats) public override void Finish(long sumTotalTermFreq, long sumDocFreq, int docCount) { - Debug.Assert(state == TermsConsumerState.INITIAL || state == TermsConsumerState.START && lastPostingsConsumer.docFreq == 0); + Debugging.Assert(() => state == TermsConsumerState.INITIAL || state == TermsConsumerState.START && lastPostingsConsumer.docFreq == 0); state = TermsConsumerState.FINISHED; - Debug.Assert(docCount >= 0); - Debug.Assert(docCount == visitedDocs.Cardinality()); - Debug.Assert(sumDocFreq >= docCount); - Debug.Assert(sumDocFreq == this.sumDocFreq); + Debugging.Assert(() => docCount >= 0); + Debugging.Assert(() => docCount == visitedDocs.Cardinality()); + Debugging.Assert(() => sumDocFreq >= docCount); + Debugging.Assert(() => sumDocFreq == this.sumDocFreq); if (fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY) { - Debug.Assert(sumTotalTermFreq == -1); + Debugging.Assert(() => sumTotalTermFreq == -1); } else { - Debug.Assert(sumTotalTermFreq >= sumDocFreq); - Debug.Assert(sumTotalTermFreq == this.sumTotalTermFreq); + Debugging.Assert(() => sumTotalTermFreq >= sumDocFreq); + Debugging.Assert(() => sumTotalTermFreq == this.sumTotalTermFreq); } @in.Finish(sumTotalTermFreq, sumDocFreq, docCount); } @@ -222,17 +222,17 @@ internal AssertingPostingsConsumer(PostingsConsumer @in, FieldInfo fieldInfo, Op public override void StartDoc(int docID, int freq) { - Debug.Assert(state == PostingsConsumerState.INITIAL); + Debugging.Assert(() => state == PostingsConsumerState.INITIAL); state = PostingsConsumerState.START; - Debug.Assert(docID >= 0); + Debugging.Assert(() => docID >= 0); if (fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY) { - Debug.Assert(freq == -1); + Debugging.Assert(() => freq == -1); this.freq = 0; // we don't expect any positions here } else { - Debug.Assert(freq > 0); + Debugging.Assert(() => freq > 0); this.freq = freq; totalTermFreq += freq; } @@ -246,41 +246,41 @@ public override void StartDoc(int docID, int freq) public override void AddPosition(int position, BytesRef payload, int startOffset, int endOffset) { - Debug.Assert(state == PostingsConsumerState.START); - Debug.Assert(positionCount < freq); + Debugging.Assert(() => state == PostingsConsumerState.START); + Debugging.Assert(() => positionCount < freq); positionCount++; - Debug.Assert(position >= lastPosition || position == -1); // we still allow -1 from old 3.x indexes + Debugging.Assert(() => position >= lastPosition || position == -1); // we still allow -1 from old 3.x indexes lastPosition = position; if (fieldInfo.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) { - Debug.Assert(startOffset >= 0); - Debug.Assert(startOffset >= lastStartOffset); + Debugging.Assert(() => startOffset >= 0); + Debugging.Assert(() => startOffset >= lastStartOffset); lastStartOffset = startOffset; - Debug.Assert(endOffset >= startOffset); + Debugging.Assert(() => endOffset >= startOffset); } else { - Debug.Assert(startOffset == -1); - Debug.Assert(endOffset == -1); + Debugging.Assert(() => startOffset == -1); + Debugging.Assert(() => endOffset == -1); } if (payload != null) { - Debug.Assert(fieldInfo.HasPayloads); + Debugging.Assert(() => fieldInfo.HasPayloads); } @in.AddPosition(position, payload, startOffset, endOffset); } public override void FinishDoc() { - Debug.Assert(state == PostingsConsumerState.START); + Debugging.Assert(() => state == PostingsConsumerState.START); state = PostingsConsumerState.INITIAL; if (fieldInfo.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) { - Debug.Assert(positionCount == 0); // we should not have fed any positions! + Debugging.Assert(() => positionCount == 0); // we should not have fed any positions! } else { - Debug.Assert(positionCount == freq); + Debugging.Assert(() => positionCount == freq); } @in.FinishDoc(); } diff --git a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingStoredFieldsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingStoredFieldsFormat.cs index 3e870395f9..ca5761756f 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingStoredFieldsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingStoredFieldsFormat.cs @@ -1,7 +1,7 @@ using Lucene.Net.Codecs.Lucene41; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Codecs.Asserting { @@ -58,7 +58,7 @@ protected override void Dispose(bool disposing) public override void VisitDocument(int n, StoredFieldVisitor visitor) { - Debug.Assert(n >= 0 && n < maxDoc); + Debugging.Assert(() => n >= 0 && n < maxDoc); @in.VisitDocument(n, visitor); } @@ -100,9 +100,9 @@ internal AssertingStoredFieldsWriter(StoredFieldsWriter @in) public override void StartDocument(int numStoredFields) { - Debug.Assert(docStatus != Status.STARTED); + Debugging.Assert(() => docStatus != Status.STARTED); @in.StartDocument(numStoredFields); - Debug.Assert(fieldCount == 0); + Debugging.Assert(() => fieldCount == 0); fieldCount = numStoredFields; numWritten++; docStatus = Status.STARTED; @@ -110,17 +110,17 @@ public override void StartDocument(int numStoredFields) public override void FinishDocument() { - Debug.Assert(docStatus == Status.STARTED); - Debug.Assert(fieldCount == 0); + Debugging.Assert(() => docStatus == Status.STARTED); + Debugging.Assert(() => fieldCount == 0); @in.FinishDocument(); docStatus = Status.FINISHED; } public override void WriteField(FieldInfo info, IIndexableField field) { - Debug.Assert(docStatus == Status.STARTED); + Debugging.Assert(() => docStatus == Status.STARTED); @in.WriteField(info, field); - Debug.Assert(fieldCount > 0); + Debugging.Assert(() => fieldCount > 0); fieldCount--; } @@ -131,10 +131,10 @@ public override void Abort() public override void Finish(FieldInfos fis, int numDocs) { - Debug.Assert(docStatus == (numDocs > 0 ? Status.FINISHED : Status.UNDEFINED)); + Debugging.Assert(() => docStatus == (numDocs > 0 ? Status.FINISHED : Status.UNDEFINED)); @in.Finish(fis, numDocs); - Debug.Assert(fieldCount == 0); - Debug.Assert(numDocs == numWritten); + Debugging.Assert(() => fieldCount == 0); + Debugging.Assert(() => numDocs == numWritten); } protected override void Dispose(bool disposing) diff --git a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingTermVectorsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingTermVectorsFormat.cs index 0e735e67f7..10a94d5dd1 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingTermVectorsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingTermVectorsFormat.cs @@ -1,9 +1,9 @@ using Lucene.Net.Codecs.Lucene40; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; using System.Collections.Generic; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Codecs.Asserting { @@ -103,8 +103,8 @@ internal AssertingTermVectorsWriter(TermVectorsWriter @in) public override void StartDocument(int numVectorFields) { - Debug.Assert(fieldCount == 0); - Debug.Assert(docStatus != Status.STARTED); + Debugging.Assert(() => fieldCount == 0); + Debugging.Assert(() => docStatus != Status.STARTED); @in.StartDocument(numVectorFields); docStatus = Status.STARTED; fieldCount = numVectorFields; @@ -113,17 +113,17 @@ public override void StartDocument(int numVectorFields) public override void FinishDocument() { - Debug.Assert(fieldCount == 0); - Debug.Assert(docStatus == Status.STARTED); + Debugging.Assert(() => fieldCount == 0); + Debugging.Assert(() => docStatus == Status.STARTED); @in.FinishDocument(); docStatus = Status.FINISHED; } public override void StartField(FieldInfo info, int numTerms, bool positions, bool offsets, bool payloads) { - Debug.Assert(termCount == 0); - Debug.Assert(docStatus == Status.STARTED); - Debug.Assert(fieldStatus != Status.STARTED); + Debugging.Assert(() => termCount == 0); + Debugging.Assert(() => docStatus == Status.STARTED); + Debugging.Assert(() => fieldStatus != Status.STARTED); @in.StartField(info, numTerms, positions, offsets, payloads); fieldStatus = Status.STARTED; termCount = numTerms; @@ -132,8 +132,8 @@ public override void StartField(FieldInfo info, int numTerms, bool positions, bo public override void FinishField() { - Debug.Assert(termCount == 0); - Debug.Assert(fieldStatus == Status.STARTED); + Debugging.Assert(() => termCount == 0); + Debugging.Assert(() => fieldStatus == Status.STARTED); @in.FinishField(); fieldStatus = Status.FINISHED; --fieldCount; @@ -141,9 +141,9 @@ public override void FinishField() public override void StartTerm(BytesRef term, int freq) { - Debug.Assert(docStatus == Status.STARTED); - Debug.Assert(fieldStatus == Status.STARTED); - Debug.Assert(termStatus != Status.STARTED); + Debugging.Assert(() => docStatus == Status.STARTED); + Debugging.Assert(() => fieldStatus == Status.STARTED); + Debugging.Assert(() => termStatus != Status.STARTED); @in.StartTerm(term, freq); termStatus = Status.STARTED; positionCount = hasPositions ? freq : 0; @@ -151,10 +151,10 @@ public override void StartTerm(BytesRef term, int freq) public override void FinishTerm() { - Debug.Assert(positionCount == 0); - Debug.Assert(docStatus == Status.STARTED); - Debug.Assert(fieldStatus == Status.STARTED); - Debug.Assert(termStatus == Status.STARTED); + Debugging.Assert(() => positionCount == 0); + Debugging.Assert(() => docStatus == Status.STARTED); + Debugging.Assert(() => fieldStatus == Status.STARTED); + Debugging.Assert(() => termStatus == Status.STARTED); @in.FinishTerm(); termStatus = Status.FINISHED; --termCount; @@ -162,9 +162,9 @@ public override void FinishTerm() public override void AddPosition(int position, int startOffset, int endOffset, BytesRef payload) { - Debug.Assert(docStatus == Status.STARTED); - Debug.Assert(fieldStatus == Status.STARTED); - Debug.Assert(termStatus == Status.STARTED); + Debugging.Assert(() => docStatus == Status.STARTED); + Debugging.Assert(() => fieldStatus == Status.STARTED); + Debugging.Assert(() => termStatus == Status.STARTED); @in.AddPosition(position, startOffset, endOffset, payload); --positionCount; } @@ -176,10 +176,10 @@ public override void Abort() public override void Finish(FieldInfos fis, int numDocs) { - Debug.Assert(docCount == numDocs); - Debug.Assert(docStatus == (numDocs > 0 ? Status.FINISHED : Status.UNDEFINED)); - Debug.Assert(fieldStatus != Status.STARTED); - Debug.Assert(termStatus != Status.STARTED); + Debugging.Assert(() => docCount == numDocs); + Debugging.Assert(() => docStatus == (numDocs > 0 ? Status.FINISHED : Status.UNDEFINED)); + Debugging.Assert(() => fieldStatus != Status.STARTED); + Debugging.Assert(() => termStatus != Status.STARTED); @in.Finish(fis, numDocs); } diff --git a/src/Lucene.Net.TestFramework/Codecs/Compressing/Dummy/DummyCompressingCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Compressing/Dummy/DummyCompressingCodec.cs index ba2ebfbb81..dacda7f8f6 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Compressing/Dummy/DummyCompressingCodec.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Compressing/Dummy/DummyCompressingCodec.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Store; using Lucene.Net.Util; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Codecs.Compressing.Dummy { @@ -57,7 +57,7 @@ private class DecompressorAnonymousInnerClassHelper : Decompressor { public override void Decompress(DataInput @in, int originalLength, int offset, int length, BytesRef bytes) { - Debug.Assert(offset + length <= originalLength); + Debugging.Assert(() => offset + length <= originalLength); if (bytes.Bytes.Length < originalLength) { bytes.Bytes = new byte[ArrayUtil.Oversize(originalLength, 1)]; diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosWriter.cs index 9cddcce3e1..d46a3adfc1 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosWriter.cs @@ -1,7 +1,7 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Codecs.Lucene3x { @@ -79,7 +79,7 @@ public override void Write(Directory directory, string segmentName, string segme if (fi.IsIndexed) { bits |= IS_INDEXED; - Debug.Assert(fi.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !fi.HasPayloads); + Debugging.Assert(() => fi.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !fi.HasPayloads); if (fi.IndexOptions == IndexOptions.DOCS_ONLY) { bits |= OMIT_TERM_FREQ_AND_POSITIONS; @@ -103,7 +103,7 @@ public override void Write(Directory directory, string segmentName, string segme // only in RW case output.WriteByte((byte)(sbyte)(fi.NormType == Index.DocValuesType.NONE ? 0 : 1)); } - Debug.Assert(fi.Attributes == null); // not used or supported + Debugging.Assert(() => fi.Attributes == null); // not used or supported } success = true; } diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldsWriter.cs index 5f43e53ef9..59641729d4 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldsWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldsWriter.cs @@ -1,9 +1,9 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; using System; using System.Collections.Generic; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Codecs.Lucene3x { @@ -81,7 +81,7 @@ public PreFlexRWFieldsWriter(SegmentWriteState state) public override TermsConsumer AddField(FieldInfo field) { - Debug.Assert(field.Number != -1); + Debugging.Assert(() => field.Number != -1); if (field.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0) { throw new NotSupportedException("this codec cannot index offsets"); @@ -164,7 +164,7 @@ public override void StartDoc(int docID, int termDocFreq) lastDocID = docID; - Debug.Assert(docID < outerInstance.outerInstance.totalNumDocs, "docID=" + docID + " totalNumDocs=" + outerInstance.outerInstance.totalNumDocs); + Debugging.Assert(() => docID < outerInstance.outerInstance.totalNumDocs, () => "docID=" + docID + " totalNumDocs=" + outerInstance.outerInstance.totalNumDocs); if (outerInstance.omitTF) { @@ -188,9 +188,9 @@ public override void StartDoc(int docID, int termDocFreq) public override void AddPosition(int position, BytesRef payload, int startOffset, int endOffset) { - Debug.Assert(outerInstance.outerInstance.proxOut != null); - Debug.Assert(startOffset == -1); - Debug.Assert(endOffset == -1); + Debugging.Assert(() => outerInstance.outerInstance.proxOut != null); + Debugging.Assert(() => startOffset == -1); + Debugging.Assert(() => endOffset == -1); //System.out.println(" w pos=" + position + " payl=" + payload); int delta = position - lastPosition; lastPosition = position; diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs index 4bf5dcf72b..6831153de3 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs @@ -1,9 +1,9 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; using System; using System.Collections.Generic; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! using AssertionError = Lucene.Net.Diagnostics.AssertionException; namespace Lucene.Net.Codecs.Lucene3x @@ -75,7 +75,7 @@ public PreFlexRWNormsConsumer(Directory directory, string segment, IOContext con public override void AddNumericField(FieldInfo field, IEnumerable values) { - Debug.Assert(field.Number > lastFieldNumber, "writing norms fields out of order" + lastFieldNumber + " -> " + field.Number); + Debugging.Assert(() => field.Number > lastFieldNumber, () => "writing norms fields out of order" + lastFieldNumber + " -> " + field.Number); foreach (var n in values) { if (((sbyte)(byte)(long)n) < sbyte.MinValue || ((sbyte)(byte)(long)n) > sbyte.MaxValue) diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs index 8eb4908260..e7bd7d905b 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs @@ -1,9 +1,9 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; using System; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Codecs.Lucene3x { @@ -36,7 +36,7 @@ internal sealed class PreFlexRWStoredFieldsWriter : StoredFieldsWriter public PreFlexRWStoredFieldsWriter(Directory directory, string segment, IOContext context) { - Debug.Assert(directory != null); + Debugging.Assert(() => directory != null); this.directory = directory; this.segment = segment; @@ -188,7 +188,7 @@ public override void WriteField(FieldInfo info, IIndexableField field) fieldsStream.WriteInt64(J2N.BitConversion.DoubleToInt64Bits(field.GetDoubleValue().Value)); break; default: - Debug.Assert(false); + Debugging.Assert(() => false); break; } } diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsWriter.cs index a173c04e3f..1f8fb40a63 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsWriter.cs @@ -1,10 +1,10 @@ using J2N.Text; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; using System; using System.Collections.Generic; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Codecs.Lucene3x { @@ -75,7 +75,7 @@ public override void StartDocument(int numVectorFields) public override void StartField(FieldInfo info, int numTerms, bool positions, bool offsets, bool payloads) { - Debug.Assert(lastFieldName == null || info.Name.CompareToOrdinal(lastFieldName) > 0, "fieldName=" + info.Name + " lastFieldName=" + lastFieldName); + Debugging.Assert(() => lastFieldName == null || info.Name.CompareToOrdinal(lastFieldName) > 0, () => "fieldName=" + info.Name + " lastFieldName=" + lastFieldName); lastFieldName = info.Name; if (payloads) { @@ -98,7 +98,7 @@ public override void StartField(FieldInfo info, int numTerms, bool positions, bo } tvf.WriteByte((byte)bits); - Debug.Assert(fieldCount <= numVectorFields); + Debugging.Assert(() => fieldCount <= numVectorFields); if (fieldCount == numVectorFields) { // last field of the document @@ -148,7 +148,7 @@ public override void StartTerm(BytesRef term, int freq) public override void AddPosition(int position, int startOffset, int endOffset, BytesRef payload) { - Debug.Assert(payload == null); + Debugging.Assert(() => payload == null); if (positions && offsets) { // write position delta diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/TermInfosWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/TermInfosWriter.cs index 9d72885339..3030f4ce33 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/TermInfosWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/TermInfosWriter.cs @@ -1,10 +1,10 @@ using J2N.Text; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; using System; using System.IO; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! using Directory = Lucene.Net.Store.Directory; namespace Lucene.Net.Codecs.Lucene3x @@ -135,7 +135,7 @@ private void Initialize(Directory directory, string segment, FieldInfos fis, int output.WriteInt32(indexInterval); // write indexInterval output.WriteInt32(skipInterval); // write skipInterval output.WriteInt32(maxSkipLevels); // write maxSkipLevels - Debug.Assert(InitUTF16Results()); + Debugging.Assert(InitUTF16Results); success = true; } finally @@ -202,10 +202,10 @@ private int CompareToLastTerm(int fieldNumber, BytesRef term) } scratchBytes.CopyBytes(term); - Debug.Assert(lastTerm.Offset == 0); + Debugging.Assert(() => lastTerm.Offset == 0); UnicodeUtil.UTF8toUTF16(lastTerm.Bytes, 0, lastTerm.Length, utf16Result1); - Debug.Assert(scratchBytes.Offset == 0); + Debugging.Assert(() => scratchBytes.Offset == 0); UnicodeUtil.UTF8toUTF16(scratchBytes.Bytes, 0, scratchBytes.Length, utf16Result2); int len; @@ -243,10 +243,10 @@ private int CompareToLastTerm(int fieldNumber, BytesRef term) /// public void Add(int fieldNumber, BytesRef term, TermInfo ti) { - Debug.Assert(CompareToLastTerm(fieldNumber, term) < 0 || (isIndex && term.Length == 0 && lastTerm.Length == 0), "Terms are out of order: field=" + FieldName(fieldInfos, fieldNumber) + " (number " + fieldNumber + ")" + " lastField=" + FieldName(fieldInfos, lastFieldNumber) + " (number " + lastFieldNumber + ")" + " text=" + term.Utf8ToString() + " lastText=" + lastTerm.Utf8ToString()); + Debugging.Assert(() => CompareToLastTerm(fieldNumber, term) < 0 || (isIndex && term.Length == 0 && lastTerm.Length == 0), () => "Terms are out of order: field=" + FieldName(fieldInfos, fieldNumber) + " (number " + fieldNumber + ")" + " lastField=" + FieldName(fieldInfos, lastFieldNumber) + " (number " + lastFieldNumber + ")" + " text=" + term.Utf8ToString() + " lastText=" + lastTerm.Utf8ToString()); - Debug.Assert(ti.FreqPointer >= lastTi.FreqPointer, "freqPointer out of order (" + ti.FreqPointer + " < " + lastTi.FreqPointer + ")"); - Debug.Assert(ti.ProxPointer >= lastTi.ProxPointer, "proxPointer out of order (" + ti.ProxPointer + " < " + lastTi.ProxPointer + ")"); + Debugging.Assert(() => ti.FreqPointer >= lastTi.FreqPointer, () => "freqPointer out of order (" + ti.FreqPointer + " < " + lastTi.FreqPointer + ")"); + Debugging.Assert(() => ti.ProxPointer >= lastTi.ProxPointer, () => "proxPointer out of order (" + ti.ProxPointer + " < " + lastTi.ProxPointer + ")"); if (!isIndex && size % indexInterval == 0) { diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40DocValuesWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40DocValuesWriter.cs index 0085aa212c..cd0094980e 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40DocValuesWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40DocValuesWriter.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Support; @@ -8,7 +9,6 @@ using System.Globalization; using System.Linq; using JCG = J2N.Collections.Generic; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Codecs.Lucene40 { @@ -333,7 +333,7 @@ private void AddVarStraightBytesField(FieldInfo field, IndexOutput data, IndexOu index.WriteVInt64(maxAddress); int maxDoc = state.SegmentInfo.DocCount; - Debug.Assert(maxDoc != int.MaxValue); // unsupported by the 4.0 impl + Debugging.Assert(() => maxDoc != int.MaxValue); // unsupported by the 4.0 impl PackedInt32s.Writer w = PackedInt32s.GetWriter(index, maxDoc + 1, PackedInt32s.BitsRequired(maxAddress), PackedInt32s.DEFAULT); long currentPosition = 0; @@ -346,7 +346,7 @@ private void AddVarStraightBytesField(FieldInfo field, IndexOutput data, IndexOu } } // write sentinel - Debug.Assert(currentPosition == maxAddress); + Debugging.Assert(() => currentPosition == maxAddress); w.Add(currentPosition); w.Finish(); } @@ -375,7 +375,7 @@ private void AddFixedDerefBytesField(FieldInfo field, IndexOutput data, IndexOut /* ordinals */ int valueCount = dictionary.Count; - Debug.Assert(valueCount > 0); + Debugging.Assert(() => valueCount > 0); index.WriteInt32(valueCount); int maxDoc = state.SegmentInfo.DocCount; PackedInt32s.Writer w = PackedInt32s.GetWriter(index, maxDoc, PackedInt32s.BitsRequired(valueCount - 1), PackedInt32s.DEFAULT); @@ -439,7 +439,7 @@ private void AddVarDerefBytesField(FieldInfo field, IndexOutput data, IndexOutpu // the little vint encoding used for var-deref private static void WriteVInt16(IndexOutput o, int i) { - Debug.Assert(i >= 0 && i <= short.MaxValue); + Debugging.Assert(() => i >= 0 && i <= short.MaxValue); if (i < 128) { o.WriteByte((byte)(sbyte)i); @@ -545,7 +545,7 @@ private void AddFixedSortedBytesField(FieldInfo field, IndexOutput data, IndexOu index.WriteInt32(valueCount); int maxDoc = state.SegmentInfo.DocCount; - Debug.Assert(valueCount > 0); + Debugging.Assert(() => valueCount > 0); PackedInt32s.Writer w = PackedInt32s.GetWriter(index, maxDoc, PackedInt32s.BitsRequired(valueCount - 1), PackedInt32s.DEFAULT); foreach (long n in docToOrd) { @@ -578,7 +578,7 @@ private void AddVarSortedBytesField(FieldInfo field, IndexOutput data, IndexOutp long maxAddress = data.GetFilePointer() - startPos; index.WriteInt64(maxAddress); - Debug.Assert(valueCount != int.MaxValue); // unsupported by the 4.0 impl + Debugging.Assert(() => valueCount != int.MaxValue); // unsupported by the 4.0 impl PackedInt32s.Writer w = PackedInt32s.GetWriter(index, valueCount + 1, PackedInt32s.BitsRequired(maxAddress), PackedInt32s.DEFAULT); long currentPosition = 0; @@ -588,14 +588,14 @@ private void AddVarSortedBytesField(FieldInfo field, IndexOutput data, IndexOutp currentPosition += v.Length; } // write sentinel - Debug.Assert(currentPosition == maxAddress); + Debugging.Assert(() => currentPosition == maxAddress); w.Add(currentPosition); w.Finish(); /* ordinals */ int maxDoc = state.SegmentInfo.DocCount; - Debug.Assert(valueCount > 0); + Debugging.Assert(() => valueCount > 0); PackedInt32s.Writer ords = PackedInt32s.GetWriter(index, maxDoc, PackedInt32s.BitsRequired(valueCount - 1), PackedInt32s.DEFAULT); foreach (long n in docToOrd) { diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40FieldInfosWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40FieldInfosWriter.cs index ec1bdaf31f..a11809e193 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40FieldInfosWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40FieldInfosWriter.cs @@ -1,8 +1,8 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; using System; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Codecs.Lucene40 { @@ -66,7 +66,7 @@ public override void Write(Directory directory, string segmentName, string segme if (fi.IsIndexed) { bits |= Lucene40FieldInfosFormat.IS_INDEXED; - Debug.Assert(indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); + Debugging.Assert(() => indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); if (indexOptions == IndexOptions.DOCS_ONLY) { bits |= Lucene40FieldInfosFormat.OMIT_TERM_FREQ_AND_POSITIONS; @@ -87,7 +87,7 @@ public override void Write(Directory directory, string segmentName, string segme // pack the DV types in one byte byte dv = DocValuesByte(fi.DocValuesType, fi.GetAttribute(Lucene40FieldInfosReader.LEGACY_DV_TYPE_KEY)); byte nrm = DocValuesByte(fi.NormType, fi.GetAttribute(Lucene40FieldInfosReader.LEGACY_NORM_TYPE_KEY)); - Debug.Assert((dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0); + Debugging.Assert(() => (dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0); var val = (byte)(0xff & ((nrm << 4) | (byte)dv)); output.WriteByte(val); output.WriteStringStringMap(fi.Attributes); @@ -113,12 +113,12 @@ public virtual byte DocValuesByte(DocValuesType type, string legacyTypeAtt) { if (type == DocValuesType.NONE) { - Debug.Assert(legacyTypeAtt == null); + Debugging.Assert(() => legacyTypeAtt == null); return 0; } else { - Debug.Assert(legacyTypeAtt != null); + Debugging.Assert(() => legacyTypeAtt != null); //return (sbyte)LegacyDocValuesType.ordinalLookup[legacyTypeAtt]; return (byte)legacyTypeAtt.ToLegacyDocValuesType(); } diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40PostingsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40PostingsWriter.cs index e597defba0..0cdfac0eb3 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40PostingsWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40PostingsWriter.cs @@ -1,7 +1,7 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Codecs.Lucene40 { @@ -208,7 +208,7 @@ public override void StartDoc(int docID, int termDocFreq) skipListWriter.BufferSkip(df); } - Debug.Assert(docID < totalNumDocs, "docID=" + docID + " totalNumDocs=" + totalNumDocs); + Debugging.Assert(() => docID < totalNumDocs, () => "docID=" + docID + " totalNumDocs=" + totalNumDocs); lastDocID = docID; if (indexOptions == IndexOptions.DOCS_ONLY) @@ -234,12 +234,12 @@ public override void StartDoc(int docID, int termDocFreq) public override void AddPosition(int position, BytesRef payload, int startOffset, int endOffset) { //if (DEBUG) System.out.println("SPW: addPos pos=" + position + " payload=" + (payload == null ? "null" : (payload.Length + " bytes")) + " proxFP=" + proxOut.getFilePointer()); - Debug.Assert(indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0, "invalid indexOptions: " + indexOptions); - Debug.Assert(proxOut != null); + Debugging.Assert(() => indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0, () => "invalid indexOptions: " + indexOptions); + Debugging.Assert(() => proxOut != null); int delta = position - lastPosition; - Debug.Assert(delta >= 0, "position=" + position + " lastPosition=" + lastPosition); // not quite right (if pos=0 is repeated twice we don't catch it) + Debugging.Assert(() => delta >= 0, () => "position=" + position + " lastPosition=" + lastPosition); // not quite right (if pos=0 is repeated twice we don't catch it) lastPosition = position; @@ -271,7 +271,7 @@ public override void AddPosition(int position, BytesRef payload, int startOffset // and the numbers aren't that much smaller anyways. int offsetDelta = startOffset - lastOffset; int offsetLength = endOffset - startOffset; - Debug.Assert(offsetDelta >= 0 && offsetLength >= 0, "startOffset=" + startOffset + ",lastOffset=" + lastOffset + ",endOffset=" + endOffset); + Debugging.Assert(() => offsetDelta >= 0 && offsetLength >= 0, () => "startOffset=" + startOffset + ",lastOffset=" + lastOffset + ",endOffset=" + endOffset); if (offsetLength != lastOffsetLength) { proxOut.WriteVInt32(offsetDelta << 1 | 1); @@ -308,11 +308,11 @@ public override void FinishTerm(BlockTermState state) { StandardTermState state_ = (StandardTermState)state; // if (DEBUG) System.out.println("SPW: finishTerm seg=" + segment + " freqStart=" + freqStart); - Debug.Assert(state_.DocFreq > 0); + Debugging.Assert(() => state_.DocFreq > 0); // TODO: wasteful we are counting this (counting # docs // for this term) in two places? - Debug.Assert(state_.DocFreq == df); + Debugging.Assert(() => state_.DocFreq == df); state_.FreqStart = freqStart; state_.ProxStart = proxStart; if (df >= skipMinimum) @@ -337,7 +337,7 @@ public override void EncodeTerm(long[] empty, DataOutput @out, FieldInfo fieldIn @out.WriteVInt64(state_.FreqStart - lastState.FreqStart); if (state_.SkipOffset != -1) { - Debug.Assert(state_.SkipOffset > 0); + Debugging.Assert(() => state_.SkipOffset > 0); @out.WriteVInt64(state_.SkipOffset); } if (indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0) diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40SkipListWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40SkipListWriter.cs index 282f62f201..f8b50ee4ae 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40SkipListWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40SkipListWriter.cs @@ -1,7 +1,7 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Store; using Lucene.Net.Support; using System; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Codecs.Lucene40 { @@ -67,8 +67,8 @@ public Lucene40SkipListWriter(int skipInterval, int numberOfSkipLevels, int docC /// public virtual void SetSkipData(int doc, bool storePayloads, int payloadLength, bool storeOffsets, int offsetLength) { - Debug.Assert(storePayloads || payloadLength == -1); - Debug.Assert(storeOffsets || offsetLength == -1); + Debugging.Assert(() => storePayloads || payloadLength == -1); + Debugging.Assert(() => storeOffsets || offsetLength == -1); this.curDoc = doc; this.curStorePayloads = storePayloads; this.curPayloadLength = payloadLength; @@ -120,8 +120,8 @@ protected override void WriteSkipData(int level, IndexOutput skipBuffer) if (curStorePayloads || curStoreOffsets) { - Debug.Assert(curStorePayloads || curPayloadLength == lastSkipPayloadLength[level]); - Debug.Assert(curStoreOffsets || curOffsetLength == lastSkipOffsetLength[level]); + Debugging.Assert(() => curStorePayloads || curPayloadLength == lastSkipPayloadLength[level]); + Debugging.Assert(() => curStoreOffsets || curOffsetLength == lastSkipOffsetLength[level]); if (curPayloadLength == lastSkipPayloadLength[level] && curOffsetLength == lastSkipOffsetLength[level]) { diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42DocValuesConsumer.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42DocValuesConsumer.cs index 58dcb647b7..9c87bf37a9 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42DocValuesConsumer.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42DocValuesConsumer.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; @@ -9,7 +10,6 @@ using JCG = J2N.Collections.Generic; using static Lucene.Net.Util.Fst.FST; using static Lucene.Net.Util.Packed.PackedInt32s; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Codecs.Lucene42 { @@ -125,7 +125,7 @@ internal virtual void AddNumericField(FieldInfo field, IEnumerable values ++count; } - Debug.Assert(count == maxDoc); + Debugging.Assert(() => count == maxDoc); } if (uniqueValues != null) diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42FieldInfosWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42FieldInfosWriter.cs index 5a1d1b0f8b..9889f63d78 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42FieldInfosWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42FieldInfosWriter.cs @@ -1,8 +1,8 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; using System; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Codecs.Lucene42 { @@ -66,7 +66,7 @@ public override void Write(Directory directory, string segmentName, string segme if (fi.IsIndexed) { bits |= Lucene42FieldInfosFormat.IS_INDEXED; - Debug.Assert(indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); + Debugging.Assert(() => indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); if (indexOptions == IndexOptions.DOCS_ONLY) { bits |= Lucene42FieldInfosFormat.OMIT_TERM_FREQ_AND_POSITIONS; @@ -87,7 +87,7 @@ public override void Write(Directory directory, string segmentName, string segme // pack the DV types in one byte var dv = DocValuesByte(fi.DocValuesType); var nrm = DocValuesByte(fi.NormType); - Debug.Assert((dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0); + Debugging.Assert(() => (dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0); var val = (byte)(0xff & ((nrm << 4) | (byte)dv)); output.WriteByte(val); output.WriteStringStringMap(fi.Attributes); diff --git a/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockVariableIntBlockPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockVariableIntBlockPostingsFormat.cs index e8b6cce002..d8e4ed8af6 100644 --- a/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockVariableIntBlockPostingsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockVariableIntBlockPostingsFormat.cs @@ -1,10 +1,10 @@ using Lucene.Net.Codecs.BlockTerms; using Lucene.Net.Codecs.IntBlock; using Lucene.Net.Codecs.Sep; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Codecs.MockIntBlock { @@ -107,7 +107,7 @@ public virtual int ReadBlock() { buffer[0] = input.ReadVInt32(); int count = buffer[0] <= 3 ? baseBlockSize - 1 : 2 * baseBlockSize - 1; - Debug.Assert(buffer.Length >= count, "buffer.length=" + buffer.Length + " count=" + count); + Debugging.Assert(() => buffer.Length >= count, () => "buffer.length=" + buffer.Length + " count=" + count); for (int i = 0; i < count; i++) { buffer[i + 1] = input.ReadVInt32(); diff --git a/src/Lucene.Net.TestFramework/Codecs/MockRandom/MockRandomPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/MockRandom/MockRandomPostingsFormat.cs index ab3e43c116..75925c1334 100644 --- a/src/Lucene.Net.TestFramework/Codecs/MockRandom/MockRandomPostingsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/MockRandom/MockRandomPostingsFormat.cs @@ -5,13 +5,13 @@ using Lucene.Net.Codecs.MockSep; using Lucene.Net.Codecs.Pulsing; using Lucene.Net.Codecs.Sep; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; using System; using System.Collections.Generic; using Console = Lucene.Net.Util.SystemConsole; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Codecs.MockRandom { @@ -88,7 +88,7 @@ public MockInt32StreamFactory(Random random) private static string GetExtension(string fileName) { int idx = fileName.IndexOf('.'); - Debug.Assert(idx != -1); + Debugging.Assert(() => idx != -1); return fileName.Substring(idx); } diff --git a/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs index 75eeebc8bf..d4565c2d1c 100644 --- a/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs @@ -1,5 +1,6 @@ using J2N.Text; using J2N.Threading.Atomic; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; @@ -7,7 +8,6 @@ using System.Collections.Generic; using System.Linq; using JCG = J2N.Collections.Generic; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Codecs.RAMOnly { @@ -290,8 +290,8 @@ public override IComparer Comparer public override void FinishTerm(BytesRef text, TermStats stats) { - Debug.Assert(stats.DocFreq > 0); - Debug.Assert(stats.DocFreq == current.docs.Count); + Debugging.Assert(() => stats.DocFreq > 0); + Debugging.Assert(() => stats.DocFreq == current.docs.Count); current.totalTermFreq = stats.TotalTermFreq; field.termToDocs[current.term] = current; } @@ -324,8 +324,8 @@ public override void StartDoc(int docID, int freq) public override void AddPosition(int position, BytesRef payload, int startOffset, int endOffset) { - Debug.Assert(startOffset == -1); - Debug.Assert(endOffset == -1); + Debugging.Assert(() => startOffset == -1); + Debugging.Assert(() => endOffset == -1); current.positions[posUpto] = position; if (payload != null && payload.Length > 0) { @@ -341,7 +341,7 @@ public override void AddPosition(int position, BytesRef payload, int startOffset public override void FinishDoc() { - Debug.Assert(posUpto == current.positions.Length); + Debugging.Assert(() => posUpto == current.positions.Length); } } diff --git a/src/Lucene.Net.TestFramework/Index/AllDeletedFilterReader.cs b/src/Lucene.Net.TestFramework/Index/AllDeletedFilterReader.cs index d14e6c0df0..2c56376f07 100644 --- a/src/Lucene.Net.TestFramework/Index/AllDeletedFilterReader.cs +++ b/src/Lucene.Net.TestFramework/Index/AllDeletedFilterReader.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Util; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Index { @@ -31,7 +31,7 @@ public AllDeletedFilterReader(AtomicReader @in) : base(@in) { liveDocs = new Bits.MatchNoBits(@in.MaxDoc); - Debug.Assert(MaxDoc == 0 || HasDeletions); + Debugging.Assert(() => MaxDoc == 0 || HasDeletions); } public override IBits LiveDocs => liveDocs; diff --git a/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs b/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs index 0183aac040..4657a161c2 100644 --- a/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs +++ b/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs @@ -1,9 +1,9 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Search; using Lucene.Net.Util; using Lucene.Net.Util.Automaton; using System; using System.Collections.Generic; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Index { @@ -36,7 +36,7 @@ public AssertingFields(Fields input) public override IEnumerator GetEnumerator() { IEnumerator iterator = base.GetEnumerator(); - Debug.Assert(iterator != null); + Debugging.Assert(() => iterator != null); return iterator; } @@ -59,8 +59,8 @@ public AssertingTerms(Terms input) public override TermsEnum Intersect(CompiledAutomaton automaton, BytesRef bytes) { TermsEnum termsEnum = m_input.Intersect(automaton, bytes); - Debug.Assert(termsEnum != null); - Debug.Assert(bytes == null || bytes.IsValid()); + Debugging.Assert(() => termsEnum != null); + Debugging.Assert(() => bytes == null || bytes.IsValid()); return new AssertingAtomicReader.AssertingTermsEnum(termsEnum); } @@ -73,7 +73,7 @@ public override TermsEnum GetIterator(TermsEnum reuse) reuse = ((AssertingAtomicReader.AssertingTermsEnum)reuse).m_input; } TermsEnum termsEnum = base.GetIterator(reuse); - Debug.Assert(termsEnum != null); + Debugging.Assert(() => termsEnum != null); return new AssertingAtomicReader.AssertingTermsEnum(termsEnum); } } @@ -102,7 +102,7 @@ public AssertingDocsEnum(DocsEnum @in, bool failOnUnsupportedDocID) try { int docid = @in.DocID; - Debug.Assert(docid == -1, @in.GetType() + ": invalid initial doc id: " + docid); + Debugging.Assert(() => docid == -1, () => @in.GetType() + ": invalid initial doc id: " + docid); } catch (NotSupportedException /*e*/) { @@ -116,9 +116,9 @@ public AssertingDocsEnum(DocsEnum @in, bool failOnUnsupportedDocID) public override int NextDoc() { - Debug.Assert(state != DocsEnumState.FINISHED, "NextDoc() called after NO_MORE_DOCS"); + Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "NextDoc() called after NO_MORE_DOCS"); int nextDoc = base.NextDoc(); - Debug.Assert(nextDoc > doc, "backwards NextDoc from " + doc + " to " + nextDoc + " " + m_input); + Debugging.Assert(() => nextDoc > doc, () => "backwards NextDoc from " + doc + " to " + nextDoc + " " + m_input); if (nextDoc == DocIdSetIterator.NO_MORE_DOCS) { state = DocsEnumState.FINISHED; @@ -127,16 +127,16 @@ public override int NextDoc() { state = DocsEnumState.ITERATING; } - Debug.Assert(base.DocID == nextDoc); + Debugging.Assert(() => base.DocID == nextDoc); return doc = nextDoc; } public override int Advance(int target) { - Debug.Assert(state != DocsEnumState.FINISHED, "Advance() called after NO_MORE_DOCS"); - Debug.Assert(target > doc, "target must be > DocID, got " + target + " <= " + doc); + Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "Advance() called after NO_MORE_DOCS"); + Debugging.Assert(() => target > doc, () => "target must be > DocID, got " + target + " <= " + doc); int advanced = base.Advance(target); - Debug.Assert(advanced >= target, "backwards advance from: " + target + " to: " + advanced); + Debugging.Assert(() => advanced >= target, () => "backwards advance from: " + target + " to: " + advanced); if (advanced == DocIdSetIterator.NO_MORE_DOCS) { state = DocsEnumState.FINISHED; @@ -145,7 +145,7 @@ public override int Advance(int target) { state = DocsEnumState.ITERATING; } - Debug.Assert(base.DocID == advanced); + Debugging.Assert(() => base.DocID == advanced); return doc = advanced; } @@ -153,7 +153,7 @@ public override int DocID { get { - Debug.Assert(doc == base.DocID, " invalid DocID in " + m_input.GetType() + " " + base.DocID + " instead of " + doc); + Debugging.Assert(() => doc == base.DocID, () => " invalid DocID in " + m_input.GetType() + " " + base.DocID + " instead of " + doc); return doc; } } @@ -162,10 +162,10 @@ public override int Freq { get { - Debug.Assert(state != DocsEnumState.START, "Freq called before NextDoc()/Advance()"); - Debug.Assert(state != DocsEnumState.FINISHED, "Freq called after NO_MORE_DOCS"); + Debugging.Assert(() => state != DocsEnumState.START, () => "Freq called before NextDoc()/Advance()"); + Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "Freq called after NO_MORE_DOCS"); int freq = base.Freq; - Debug.Assert(freq > 0); + Debugging.Assert(() => freq > 0); return freq; } } @@ -186,7 +186,7 @@ public AssertingNumericDocValues(NumericDocValues @in, int maxDoc) public override long Get(int docID) { - Debug.Assert(docID >= 0 && docID < maxDoc); + Debugging.Assert(() => docID >= 0 && docID < maxDoc); return @in.Get(docID); } } @@ -206,10 +206,10 @@ public AssertingBinaryDocValues(BinaryDocValues @in, int maxDoc) public override void Get(int docID, BytesRef result) { - Debug.Assert(docID >= 0 && docID < maxDoc); - Debug.Assert(result.IsValid()); + Debugging.Assert(() => docID >= 0 && docID < maxDoc); + Debugging.Assert(result.IsValid); @in.Get(docID, result); - Debug.Assert(result.IsValid()); + Debugging.Assert(result.IsValid); } } @@ -226,23 +226,23 @@ public AssertingSortedDocValues(SortedDocValues @in, int maxDoc) this.@in = @in; this.maxDoc = maxDoc; this.valueCount = @in.ValueCount; - Debug.Assert(valueCount >= 0 && valueCount <= maxDoc); + Debugging.Assert(() => valueCount >= 0 && valueCount <= maxDoc); } public override int GetOrd(int docID) { - Debug.Assert(docID >= 0 && docID < maxDoc); + Debugging.Assert(() => docID >= 0 && docID < maxDoc); int ord = @in.GetOrd(docID); - Debug.Assert(ord >= -1 && ord < valueCount); + Debugging.Assert(() => ord >= -1 && ord < valueCount); return ord; } public override void LookupOrd(int ord, BytesRef result) { - Debug.Assert(ord >= 0 && ord < valueCount); - Debug.Assert(result.IsValid()); + Debugging.Assert(() => ord >= 0 && ord < valueCount); + Debugging.Assert(result.IsValid); @in.LookupOrd(ord, result); - Debug.Assert(result.IsValid()); + Debugging.Assert(result.IsValid); } public override int ValueCount @@ -250,25 +250,25 @@ public override int ValueCount get { int valueCount = @in.ValueCount; - Debug.Assert(valueCount == this.valueCount); // should not change + Debugging.Assert(() => valueCount == this.valueCount); // should not change return valueCount; } } public override void Get(int docID, BytesRef result) { - Debug.Assert(docID >= 0 && docID < maxDoc); - Debug.Assert(result.IsValid()); + Debugging.Assert(() => docID >= 0 && docID < maxDoc); + Debugging.Assert(result.IsValid); @in.Get(docID, result); - Debug.Assert(result.IsValid()); + Debugging.Assert(result.IsValid); } public override int LookupTerm(BytesRef key) { - Debug.Assert(key.IsValid()); + Debugging.Assert(key.IsValid); int result = @in.LookupTerm(key); - Debug.Assert(result < valueCount); - Debug.Assert(key.IsValid()); + Debugging.Assert(() => result < valueCount); + Debugging.Assert(key.IsValid); return result; } } @@ -287,32 +287,32 @@ public AssertingSortedSetDocValues(SortedSetDocValues @in, int maxDoc) this.@in = @in; this.maxDoc = maxDoc; this.valueCount = @in.ValueCount; - Debug.Assert(valueCount >= 0); + Debugging.Assert(() => valueCount >= 0); } public override long NextOrd() { - Debug.Assert(lastOrd != NO_MORE_ORDS); + Debugging.Assert(() => lastOrd != NO_MORE_ORDS); long ord = @in.NextOrd(); - Debug.Assert(ord < valueCount); - Debug.Assert(ord == NO_MORE_ORDS || ord > lastOrd); + Debugging.Assert(() => ord < valueCount); + Debugging.Assert(() => ord == NO_MORE_ORDS || ord > lastOrd); lastOrd = ord; return ord; } public override void SetDocument(int docID) { - Debug.Assert(docID >= 0 && docID < maxDoc, "docid=" + docID + ",maxDoc=" + maxDoc); + Debugging.Assert(() => docID >= 0 && docID < maxDoc, () => "docid=" + docID + ",maxDoc=" + maxDoc); @in.SetDocument(docID); lastOrd = -2; } public override void LookupOrd(long ord, BytesRef result) { - Debug.Assert(ord >= 0 && ord < valueCount); - Debug.Assert(result.IsValid()); + Debugging.Assert(() => ord >= 0 && ord < valueCount); + Debugging.Assert(result.IsValid); @in.LookupOrd(ord, result); - Debug.Assert(result.IsValid()); + Debugging.Assert(result.IsValid); } public override long ValueCount @@ -320,17 +320,17 @@ public override long ValueCount get { long valueCount = @in.ValueCount; - Debug.Assert(valueCount == this.valueCount); // should not change + Debugging.Assert(() => valueCount == this.valueCount); // should not change return valueCount; } } public override long LookupTerm(BytesRef key) { - Debug.Assert(key.IsValid()); + Debugging.Assert(key.IsValid); long result = @in.LookupTerm(key); - Debug.Assert(result < valueCount); - Debug.Assert(key.IsValid()); + Debugging.Assert(() => result < valueCount); + Debugging.Assert(key.IsValid); return result; } } @@ -348,7 +348,7 @@ public AssertingBits(IBits @in) public virtual bool Get(int index) { - Debug.Assert(index >= 0 && index < Length); + Debugging.Assert(() => index >= 0 && index < Length); return @in.Get(index); } @@ -365,10 +365,10 @@ public AssertingAtomicReader(AtomicReader @in) : base(@in) { // check some basic reader sanity - Debug.Assert(@in.MaxDoc >= 0); - Debug.Assert(@in.NumDocs <= @in.MaxDoc); - Debug.Assert(@in.NumDeletedDocs + @in.NumDocs == @in.MaxDoc); - Debug.Assert(!@in.HasDeletions || @in.NumDeletedDocs > 0 && @in.NumDocs < @in.MaxDoc); + Debugging.Assert(() => @in.MaxDoc >= 0); + Debugging.Assert(() => @in.NumDocs <= @in.MaxDoc); + Debugging.Assert(() => @in.NumDeletedDocs + @in.NumDocs == @in.MaxDoc); + Debugging.Assert(() => !@in.HasDeletions || @in.NumDeletedDocs > 0 && @in.NumDocs < @in.MaxDoc); } public override Fields Fields @@ -409,7 +409,7 @@ public AssertingTermsEnum(TermsEnum @in) public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) { - Debug.Assert(state == State.POSITIONED, "Docs(...) called on unpositioned TermsEnum"); + Debugging.Assert(() => state == State.POSITIONED, () => "Docs(...) called on unpositioned TermsEnum"); // TODO: should we give this thing a random to be super-evil, // and randomly *not* unwrap? @@ -423,7 +423,7 @@ public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPositionsEnum reuse, DocsAndPositionsFlags flags) { - Debug.Assert(state == State.POSITIONED, "DocsAndPositions(...) called on unpositioned TermsEnum"); + Debugging.Assert(() => state == State.POSITIONED, () => "DocsAndPositions(...) called on unpositioned TermsEnum"); // TODO: should we give this thing a random to be super-evil, // and randomly *not* unwrap? @@ -439,7 +439,7 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPos // someone should not call next() after it returns null!!!! public override BytesRef Next() { - Debug.Assert(state == State.INITIAL || state == State.POSITIONED, "Next() called on unpositioned TermsEnum"); + Debugging.Assert(() => state == State.INITIAL || state == State.POSITIONED, () => "Next() called on unpositioned TermsEnum"); BytesRef result = base.Next(); if (result == null) { @@ -447,7 +447,7 @@ public override BytesRef Next() } else { - Debug.Assert(result.IsValid()); + Debugging.Assert(result.IsValid); state = State.POSITIONED; } return result; @@ -457,7 +457,7 @@ public override long Ord { get { - Debug.Assert(state == State.POSITIONED, "Ord called on unpositioned TermsEnum"); + Debugging.Assert(() => state == State.POSITIONED, () => "Ord called on unpositioned TermsEnum"); return base.Ord; } } @@ -466,7 +466,7 @@ public override int DocFreq { get { - Debug.Assert(state == State.POSITIONED, "DocFreq called on unpositioned TermsEnum"); + Debugging.Assert(() => state == State.POSITIONED, () => "DocFreq called on unpositioned TermsEnum"); return base.DocFreq; } } @@ -475,7 +475,7 @@ public override long TotalTermFreq { get { - Debug.Assert(state == State.POSITIONED, "TotalTermFreq called on unpositioned TermsEnum"); + Debugging.Assert(() => state == State.POSITIONED, () => "TotalTermFreq called on unpositioned TermsEnum"); return base.TotalTermFreq; } } @@ -484,9 +484,9 @@ public override BytesRef Term { get { - Debug.Assert(state == State.POSITIONED, "Term called on unpositioned TermsEnum"); + Debugging.Assert(() => state == State.POSITIONED, () => "Term called on unpositioned TermsEnum"); BytesRef ret = base.Term; - Debug.Assert(ret == null || ret.IsValid()); + Debugging.Assert(() => ret == null || ret.IsValid()); return ret; } } @@ -499,7 +499,7 @@ public override void SeekExact(long ord) public override SeekStatus SeekCeil(BytesRef term) { - Debug.Assert(term.IsValid()); + Debugging.Assert(term.IsValid); SeekStatus result = base.SeekCeil(term); if (result == SeekStatus.END) { @@ -514,7 +514,7 @@ public override SeekStatus SeekCeil(BytesRef term) public override bool SeekExact(BytesRef text) { - Debug.Assert(text.IsValid()); + Debugging.Assert(text.IsValid); if (base.SeekExact(text)) { state = State.POSITIONED; @@ -529,13 +529,13 @@ public override bool SeekExact(BytesRef text) public override TermState GetTermState() { - Debug.Assert(state == State.POSITIONED, "GetTermState() called on unpositioned TermsEnum"); + Debugging.Assert(() => state == State.POSITIONED, () => "GetTermState() called on unpositioned TermsEnum"); return base.GetTermState(); } public override void SeekExact(BytesRef term, TermState state) { - Debug.Assert(term.IsValid()); + Debugging.Assert(term.IsValid); base.SeekExact(term, state); this.state = State.POSITIONED; } @@ -556,15 +556,15 @@ public AssertingDocsAndPositionsEnum(DocsAndPositionsEnum @in) : base(@in) { int docid = @in.DocID; - Debug.Assert(docid == -1, "invalid initial doc id: " + docid); + Debugging.Assert(() => docid == -1, () => "invalid initial doc id: " + docid); doc = -1; } public override int NextDoc() { - Debug.Assert(state != DocsEnumState.FINISHED, "NextDoc() called after NO_MORE_DOCS"); + Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "NextDoc() called after NO_MORE_DOCS"); int nextDoc = base.NextDoc(); - Debug.Assert(nextDoc > doc, "backwards nextDoc from " + doc + " to " + nextDoc); + Debugging.Assert(() => nextDoc > doc, () => "backwards nextDoc from " + doc + " to " + nextDoc); positionCount = 0; if (nextDoc == DocIdSetIterator.NO_MORE_DOCS) { @@ -576,16 +576,16 @@ public override int NextDoc() state = DocsEnumState.ITERATING; positionMax = base.Freq; } - Debug.Assert(base.DocID == nextDoc); + Debugging.Assert(() => base.DocID == nextDoc); return doc = nextDoc; } public override int Advance(int target) { - Debug.Assert(state != DocsEnumState.FINISHED, "Advance() called after NO_MORE_DOCS"); - Debug.Assert(target > doc, "target must be > DocID, got " + target + " <= " + doc); + Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "Advance() called after NO_MORE_DOCS"); + Debugging.Assert(() => target > doc, () => "target must be > DocID, got " + target + " <= " + doc); int advanced = base.Advance(target); - Debug.Assert(advanced >= target, "backwards advance from: " + target + " to: " + advanced); + Debugging.Assert(() => advanced >= target, () => "backwards advance from: " + target + " to: " + advanced); positionCount = 0; if (advanced == DocIdSetIterator.NO_MORE_DOCS) { @@ -597,7 +597,7 @@ public override int Advance(int target) state = DocsEnumState.ITERATING; positionMax = base.Freq; } - Debug.Assert(base.DocID == advanced); + Debugging.Assert(() => base.DocID == advanced); return doc = advanced; } @@ -605,7 +605,7 @@ public override int DocID { get { - Debug.Assert(doc == base.DocID, " invalid DocID in " + m_input.GetType() + " " + base.DocID + " instead of " + doc); + Debugging.Assert(() => doc == base.DocID, () => " invalid DocID in " + m_input.GetType() + " " + base.DocID + " instead of " + doc); return doc; } } @@ -614,21 +614,21 @@ public override int Freq { get { - Debug.Assert(state != DocsEnumState.START, "Freq called before NextDoc()/Advance()"); - Debug.Assert(state != DocsEnumState.FINISHED, "Freq called after NO_MORE_DOCS"); + Debugging.Assert(() => state != DocsEnumState.START, () => "Freq called before NextDoc()/Advance()"); + Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "Freq called after NO_MORE_DOCS"); int freq = base.Freq; - Debug.Assert(freq > 0); + Debugging.Assert(() => freq > 0); return freq; } } public override int NextPosition() { - Debug.Assert(state != DocsEnumState.START, "NextPosition() called before NextDoc()/Advance()"); - Debug.Assert(state != DocsEnumState.FINISHED, "NextPosition() called after NO_MORE_DOCS"); - Debug.Assert(positionCount < positionMax, "NextPosition() called more than Freq times!"); + Debugging.Assert(() => state != DocsEnumState.START, () => "NextPosition() called before NextDoc()/Advance()"); + Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "NextPosition() called after NO_MORE_DOCS"); + Debugging.Assert(() => positionCount < positionMax, () => "NextPosition() called more than Freq times!"); int position = base.NextPosition(); - Debug.Assert(position >= 0 || position == -1, "invalid position: " + position); + Debugging.Assert(() => position >= 0 || position == -1, () => "invalid position: " + position); positionCount++; return position; } @@ -637,9 +637,9 @@ public override int StartOffset { get { - Debug.Assert(state != DocsEnumState.START, "StartOffset called before NextDoc()/Advance()"); - Debug.Assert(state != DocsEnumState.FINISHED, "StartOffset called after NO_MORE_DOCS"); - Debug.Assert(positionCount > 0, "StartOffset called before NextPosition()!"); + Debugging.Assert(() => state != DocsEnumState.START, () => "StartOffset called before NextDoc()/Advance()"); + Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "StartOffset called after NO_MORE_DOCS"); + Debugging.Assert(() => positionCount > 0, () => "StartOffset called before NextPosition()!"); return base.StartOffset; } } @@ -648,20 +648,20 @@ public override int EndOffset { get { - Debug.Assert(state != DocsEnumState.START, "EndOffset called before NextDoc()/Advance()"); - Debug.Assert(state != DocsEnumState.FINISHED, "EndOffset called after NO_MORE_DOCS"); - Debug.Assert(positionCount > 0, "EndOffset called before NextPosition()!"); + Debugging.Assert(() => state != DocsEnumState.START, () => "EndOffset called before NextDoc()/Advance()"); + Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "EndOffset called after NO_MORE_DOCS"); + Debugging.Assert(() => positionCount > 0, () => "EndOffset called before NextPosition()!"); return base.EndOffset; } } public override BytesRef GetPayload() { - Debug.Assert(state != DocsEnumState.START, "GetPayload() called before NextDoc()/Advance()"); - Debug.Assert(state != DocsEnumState.FINISHED, "GetPayload() called after NO_MORE_DOCS"); - Debug.Assert(positionCount > 0, "GetPayload() called before NextPosition()!"); + Debugging.Assert(() => state != DocsEnumState.START, () => "GetPayload() called before NextDoc()/Advance()"); + Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "GetPayload() called after NO_MORE_DOCS"); + Debugging.Assert(() => positionCount > 0, () => "GetPayload() called before NextPosition()!"); BytesRef payload = base.GetPayload(); - Debug.Assert(payload == null || payload.IsValid() && payload.Length > 0, "GetPayload() returned payload with invalid length!"); + Debugging.Assert(() => payload == null || payload.IsValid() && payload.Length > 0, () => "GetPayload() returned payload with invalid length!"); return payload; } } @@ -681,13 +681,13 @@ public override NumericDocValues GetNumericDocValues(string field) FieldInfo fi = FieldInfos.FieldInfo(field); if (dv != null) { - Debug.Assert(fi != null); - Debug.Assert(fi.DocValuesType == DocValuesType.NUMERIC); + Debugging.Assert(() => fi != null); + Debugging.Assert(() => fi.DocValuesType == DocValuesType.NUMERIC); return new AssertingNumericDocValues(dv, MaxDoc); } else { - Debug.Assert(fi == null || fi.DocValuesType != DocValuesType.NUMERIC); + Debugging.Assert(() => fi == null || fi.DocValuesType != DocValuesType.NUMERIC); return null; } } @@ -698,13 +698,13 @@ public override BinaryDocValues GetBinaryDocValues(string field) FieldInfo fi = FieldInfos.FieldInfo(field); if (dv != null) { - Debug.Assert(fi != null); - Debug.Assert(fi.DocValuesType == DocValuesType.BINARY); + Debugging.Assert(() => fi != null); + Debugging.Assert(() => fi.DocValuesType == DocValuesType.BINARY); return new AssertingBinaryDocValues(dv, MaxDoc); } else { - Debug.Assert(fi == null || fi.DocValuesType != DocValuesType.BINARY); + Debugging.Assert(() => fi == null || fi.DocValuesType != DocValuesType.BINARY); return null; } } @@ -715,13 +715,13 @@ public override SortedDocValues GetSortedDocValues(string field) FieldInfo fi = FieldInfos.FieldInfo(field); if (dv != null) { - Debug.Assert(fi != null); - Debug.Assert(fi.DocValuesType == DocValuesType.SORTED); + Debugging.Assert(() => fi != null); + Debugging.Assert(() => fi.DocValuesType == DocValuesType.SORTED); return new AssertingSortedDocValues(dv, MaxDoc); } else { - Debug.Assert(fi == null || fi.DocValuesType != DocValuesType.SORTED); + Debugging.Assert(() => fi == null || fi.DocValuesType != DocValuesType.SORTED); return null; } } @@ -732,13 +732,13 @@ public override SortedSetDocValues GetSortedSetDocValues(string field) FieldInfo fi = FieldInfos.FieldInfo(field); if (dv != null) { - Debug.Assert(fi != null); - Debug.Assert(fi.DocValuesType == DocValuesType.SORTED_SET); + Debugging.Assert(() => fi != null); + Debugging.Assert(() => fi.DocValuesType == DocValuesType.SORTED_SET); return new AssertingSortedSetDocValues(dv, MaxDoc); } else { - Debug.Assert(fi == null || fi.DocValuesType != DocValuesType.SORTED_SET); + Debugging.Assert(() => fi == null || fi.DocValuesType != DocValuesType.SORTED_SET); return null; } } @@ -749,13 +749,13 @@ public override NumericDocValues GetNormValues(string field) FieldInfo fi = FieldInfos.FieldInfo(field); if (dv != null) { - Debug.Assert(fi != null); - Debug.Assert(fi.HasNorms); + Debugging.Assert(() => fi != null); + Debugging.Assert(() => fi.HasNorms); return new AssertingNumericDocValues(dv, MaxDoc); } else { - Debug.Assert(fi == null || fi.HasNorms == false); + Debugging.Assert(() => fi == null || fi.HasNorms == false); return null; } } @@ -769,13 +769,13 @@ public override IBits LiveDocs IBits liveDocs = base.LiveDocs; if (liveDocs != null) { - Debug.Assert(MaxDoc == liveDocs.Length); + Debugging.Assert(() => MaxDoc == liveDocs.Length); liveDocs = new AssertingBits(liveDocs); } else { - Debug.Assert(MaxDoc == NumDocs); - Debug.Assert(!HasDeletions); + Debugging.Assert(() => MaxDoc == NumDocs); + Debugging.Assert(() => !HasDeletions); } return liveDocs; } @@ -787,14 +787,14 @@ public override IBits GetDocsWithField(string field) FieldInfo fi = FieldInfos.FieldInfo(field); if (docsWithField != null) { - Debug.Assert(fi != null); - Debug.Assert(fi.HasDocValues); - Debug.Assert(MaxDoc == docsWithField.Length); + Debugging.Assert(() => fi != null); + Debugging.Assert(() => fi.HasDocValues); + Debugging.Assert(() => MaxDoc == docsWithField.Length); docsWithField = new AssertingBits(docsWithField); } else { - Debug.Assert(fi == null || fi.HasDocValues == false); + Debugging.Assert(() => fi == null || fi.HasDocValues == false); } return docsWithField; } diff --git a/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs b/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs index 1f58b28cce..0ba88c9b26 100644 --- a/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs +++ b/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs @@ -1,6 +1,7 @@ using J2N.Threading; using Lucene.Net.Analysis; using Lucene.Net.Codecs.Lucene42; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index.Extensions; using Lucene.Net.Search; @@ -12,7 +13,6 @@ using System.Globalization; using System.Threading; using JCG = J2N.Collections.Generic; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! using Assert = Lucene.Net.TestFramework.Assert; using static Lucene.Net.Index.TermsEnum; using J2N.Collections.Generic.Extensions; @@ -116,7 +116,7 @@ public virtual void TestOneNumber() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader))).GetNumericDocValues("dv"); Assert.AreEqual(5L, dv.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - 5L required because types don't match (xUnit checks this) } @@ -159,7 +159,7 @@ public virtual void TestOneSingle() // LUCENENET specific - renamed from TestOne { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetNumericDocValues("dv"); Assert.AreEqual((long)J2N.BitConversion.SingleToInt32Bits(5.7f), dv.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - cast required because types don't match (xUnit checks this) } @@ -201,7 +201,7 @@ public virtual void TestTwoNumbers() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetNumericDocValues("dv1"); Assert.AreEqual(5L, dv.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - 5L required because types don't match (xUnit checks this) dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetNumericDocValues("dv2"); @@ -248,7 +248,7 @@ public virtual void TestTwoBinaryValues() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetBinaryDocValues("dv1"); dv.Get(hits.ScoreDocs[i].Doc, scratch); Assert.AreEqual(new BytesRef(longTerm), scratch); @@ -297,7 +297,7 @@ public virtual void TestTwoFieldsMixed() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetNumericDocValues("dv1"); Assert.AreEqual(5L, dv.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - 5L required because types don't match (xUnit checks this) BinaryDocValues dv2 = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetBinaryDocValues("dv2"); @@ -346,7 +346,7 @@ public virtual void TestThreeFieldsMixed() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv1"); int ord = dv.GetOrd(0); dv.LookupOrd(ord, scratch); @@ -399,7 +399,7 @@ public virtual void TestThreeFieldsMixed2() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv2"); int ord = dv.GetOrd(0); dv.LookupOrd(ord, scratch); @@ -438,7 +438,7 @@ public virtual void TestTwoDocumentsNumeric() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv"); Assert.AreEqual(1L, dv.Get(0)); // LUCENENET specific - 1L required because types don't match (xUnit checks this) Assert.AreEqual(2L, dv.Get(1)); // LUCENENET specific - 2L required because types don't match (xUnit checks this) @@ -473,7 +473,7 @@ public virtual void TestTwoDocumentsMerged() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv"); for (int i = 0; i < 2; i++) { @@ -517,7 +517,7 @@ public virtual void TestBigNumericRange() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv"); Assert.AreEqual(long.MinValue, dv.Get(0)); Assert.AreEqual(long.MaxValue, dv.Get(1)); @@ -549,7 +549,7 @@ public virtual void TestBigNumericRange2() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv"); Assert.AreEqual(-8841491950446638677L, dv.Get(0)); Assert.AreEqual(9062230939892376225L, dv.Get(1)); @@ -591,7 +591,7 @@ public virtual void TestBytes() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); dv.Get(hits.ScoreDocs[i].Doc, scratch); Assert.AreEqual(new BytesRef("hello world"), scratch); @@ -627,7 +627,7 @@ public virtual void TestBytesTwoDocumentsMerged() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); BytesRef scratch = new BytesRef(); for (int i = 0; i < 2; i++) @@ -684,7 +684,7 @@ public virtual void TestSortedBytes() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); dv.LookupOrd(dv.GetOrd(hits.ScoreDocs[i].Doc), scratch); Assert.AreEqual(new BytesRef("hello world"), scratch); @@ -717,7 +717,7 @@ public virtual void TestSortedBytesTwoDocuments() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); BytesRef scratch = new BytesRef(); dv.LookupOrd(dv.GetOrd(0), scratch); @@ -755,7 +755,7 @@ public virtual void TestSortedBytesThreeDocuments() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); Assert.AreEqual(2, dv.ValueCount); BytesRef scratch = new BytesRef(); @@ -797,7 +797,7 @@ public virtual void TestSortedBytesTwoDocumentsMerged() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); Assert.AreEqual(2, dv.ValueCount); // 2 ords BytesRef scratch = new BytesRef(); @@ -894,7 +894,7 @@ public virtual void TestBytesWithNewline() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); BytesRef scratch = new BytesRef(); dv.Get(0, scratch); @@ -925,7 +925,7 @@ public virtual void TestMissingSortedBytes() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); BytesRef scratch = new BytesRef(); dv.LookupOrd(dv.GetOrd(0), scratch); @@ -1045,7 +1045,7 @@ public virtual void TestEmptySortedBytes() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); BytesRef scratch = new BytesRef(); Assert.AreEqual(0, dv.GetOrd(0)); @@ -1080,7 +1080,7 @@ public virtual void TestEmptyBytes() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); BytesRef scratch = new BytesRef(); dv.Get(0, scratch); @@ -1114,7 +1114,7 @@ public virtual void TestVeryLargeButLegalBytes() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); BytesRef scratch = new BytesRef(); dv.Get(0, scratch); @@ -1146,7 +1146,7 @@ public virtual void TestVeryLargeButLegalSortedBytes() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); BytesRef scratch = new BytesRef(); dv.Get(0, scratch); @@ -1174,7 +1174,7 @@ public virtual void TestCodecUsesOwnBytes() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); var mybytes = new byte[20]; BytesRef scratch = new BytesRef(mybytes); @@ -1205,7 +1205,7 @@ public virtual void TestCodecUsesOwnSortedBytes() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); var mybytes = new byte[20]; BytesRef scratch = new BytesRef(mybytes); @@ -1239,7 +1239,7 @@ public virtual void TestCodecUsesOwnBytesEachTime() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); BytesRef scratch = new BytesRef(); dv.Get(0, scratch); @@ -1277,7 +1277,7 @@ public virtual void TestCodecUsesOwnSortedBytesEachTime() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); BytesRef scratch = new BytesRef(); dv.Get(0, scratch); @@ -1494,7 +1494,7 @@ private void DoTestNumericsVsStoredFields(Int64Producer longs) int numDocs = AtLeast(300); // numDocs should be always > 256 so that in case of a codec that optimizes // for numbers of values <= 256, all storage layouts are tested - Debug.Assert(numDocs > 256); + Debugging.Assert(() => numDocs > 256); for (int i = 0; i < numDocs; i++) { idField.SetStringValue(Convert.ToString(i, CultureInfo.InvariantCulture)); @@ -1580,7 +1580,7 @@ private void DoTestMissingVsFieldCache(Int64Producer longs) int numDocs = AtLeast(300); // numDocs should be always > 256 so that in case of a codec that optimizes // for numbers of values <= 256, all storage layouts are tested - Debug.Assert(numDocs > 256); + Debugging.Assert(() => numDocs > 256); for (int i = 0; i < numDocs; i++) { idField.SetStringValue(Convert.ToString(i, CultureInfo.InvariantCulture)); @@ -2649,13 +2649,13 @@ private void DoTestSortedSetVsStoredFields(int minLength, int maxLength, int max } for (int j = 0; j < stringValues.Length; j++) { - Debug.Assert(docValues != null); + Debugging.Assert(() => docValues != null); long ord = docValues.NextOrd(); - Debug.Assert(ord != SortedSetDocValues.NO_MORE_ORDS); + Debugging.Assert(() => ord != SortedSetDocValues.NO_MORE_ORDS); docValues.LookupOrd(ord, scratch); Assert.AreEqual(stringValues[j], scratch.Utf8ToString()); } - Debug.Assert(docValues == null || docValues.NextOrd() == SortedSetDocValues.NO_MORE_ORDS); + Debugging.Assert(() => docValues == null || docValues.NextOrd() == SortedSetDocValues.NO_MORE_ORDS); } } } // ir.Dispose(); diff --git a/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs b/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs index f3f2791eac..de001078e6 100644 --- a/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs +++ b/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs @@ -1,5 +1,6 @@ using J2N.Threading; using Lucene.Net.Codecs; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Store; using Lucene.Net.Util; @@ -8,7 +9,6 @@ using System.IO; using JCG = J2N.Collections.Generic; using Console = Lucene.Net.Util.SystemConsole; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! using Assert = Lucene.Net.TestFramework.Assert; using Directory = Lucene.Net.Store.Directory; using J2N.Collections.Generic.Extensions; @@ -239,7 +239,7 @@ public override int NextPosition() posUpto = freq; return 0; } - Debug.Assert(posUpto < freq); + Debugging.Assert(() => posUpto < freq); if (posUpto == 0 && random.NextBoolean()) { diff --git a/src/Lucene.Net.TestFramework/Index/MockRandomMergePolicy.cs b/src/Lucene.Net.TestFramework/Index/MockRandomMergePolicy.cs index 23620f3b40..dca1e12806 100644 --- a/src/Lucene.Net.TestFramework/Index/MockRandomMergePolicy.cs +++ b/src/Lucene.Net.TestFramework/Index/MockRandomMergePolicy.cs @@ -1,9 +1,9 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using Lucene.Net.Util; using System; using System.Collections.Generic; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Index { @@ -106,7 +106,7 @@ public override MergeSpecification FindForcedMerges(SegmentInfos segmentInfos, i { foreach (SegmentCommitInfo info in merge.Segments) { - Debug.Assert(segmentsToMerge.ContainsKey(info)); + Debugging.Assert(() => segmentsToMerge.ContainsKey(info)); } } } diff --git a/src/Lucene.Net.TestFramework/Index/RandomCodec.cs b/src/Lucene.Net.TestFramework/Index/RandomCodec.cs index 8264666585..2017704d01 100644 --- a/src/Lucene.Net.TestFramework/Index/RandomCodec.cs +++ b/src/Lucene.Net.TestFramework/Index/RandomCodec.cs @@ -13,6 +13,7 @@ using Lucene.Net.Codecs.NestedPulsing; using Lucene.Net.Codecs.Pulsing; using Lucene.Net.Codecs.SimpleText; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using Lucene.Net.Util; using System; @@ -20,7 +21,6 @@ using System.Collections.Generic; using JCG = J2N.Collections.Generic; using Console = Lucene.Net.Util.SystemConsole; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! using J2N.Collections.Generic.Extensions; namespace Lucene.Net.Index @@ -92,7 +92,7 @@ public override PostingsFormat GetPostingsFormatForField(string name) } previousMappings[name] = codec; // Safety: - Debug.Assert(previousMappings.Count < 10000, "test went insane"); + Debugging.Assert(() => previousMappings.Count < 10000, () => "test went insane"); } //if (LuceneTestCase.VERBOSE) @@ -115,7 +115,7 @@ public override DocValuesFormat GetDocValuesFormatForField(string name) } previousDVMappings[name] = codec; // Safety: - Debug.Assert(previousDVMappings.Count < 10000, "test went insane"); + Debugging.Assert(() => previousDVMappings.Count < 10000, () => "test went insane"); } //if (LuceneTestCase.VERBOSE) diff --git a/src/Lucene.Net.TestFramework/Index/RandomDocumentsWriterPerThreadPool.cs b/src/Lucene.Net.TestFramework/Index/RandomDocumentsWriterPerThreadPool.cs index b02694cc27..97c8f63728 100644 --- a/src/Lucene.Net.TestFramework/Index/RandomDocumentsWriterPerThreadPool.cs +++ b/src/Lucene.Net.TestFramework/Index/RandomDocumentsWriterPerThreadPool.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using System; using System.Threading; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Index { @@ -36,7 +36,7 @@ internal class RandomDocumentsWriterPerThreadPool : DocumentsWriterPerThreadPool public RandomDocumentsWriterPerThreadPool(int maxNumPerThreads, Random random) : base(maxNumPerThreads) { - Debug.Assert(MaxThreadStates >= 1); + Debugging.Assert(() => MaxThreadStates >= 1); states = new ThreadState[maxNumPerThreads]; this.random = new Random(random.Next()); this.maxRetry = 1 + random.Next(10); @@ -56,14 +56,14 @@ public override ThreadState GetAndLock(Thread requestingThread, DocumentsWriter } } } - Debug.Assert(NumThreadStatesActive > 0); + Debugging.Assert(() => NumThreadStatesActive > 0); for (int i = 0; i < maxRetry; i++) { int ord = random.Next(NumThreadStatesActive); lock (this) { threadState = states[ord]; - Debug.Assert(threadState != null); + Debugging.Assert(() => threadState != null); } if (threadState.TryLock()) @@ -89,12 +89,12 @@ public override ThreadState GetAndLock(Thread requestingThread, DocumentsWriter if (newThreadState != null) // did we get a new state? { threadState = states[NumThreadStatesActive - 1] = newThreadState; - //Debug.Assert(threadState.HeldByCurrentThread); + //Debugging.Assert(threadState.HeldByCurrentThread); return threadState; } // if no new state is available lock the random one } - Debug.Assert(threadState != null); + Debugging.Assert(() => threadState != null); threadState.@Lock(); return threadState; } diff --git a/src/Lucene.Net.TestFramework/Index/RandomIndexWriter.cs b/src/Lucene.Net.TestFramework/Index/RandomIndexWriter.cs index 196f1b9b2e..146e070a7e 100644 --- a/src/Lucene.Net.TestFramework/Index/RandomIndexWriter.cs +++ b/src/Lucene.Net.TestFramework/Index/RandomIndexWriter.cs @@ -1,5 +1,6 @@ using Lucene.Net.Analysis; using Lucene.Net.Codecs; +using Lucene.Net.Diagnostics; using Lucene.Net.Search; using Lucene.Net.Store; using Lucene.Net.Util; @@ -7,7 +8,6 @@ using System.Collections; using System.Collections.Generic; using Console = Lucene.Net.Util.SystemConsole; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Index { @@ -440,7 +440,7 @@ private void _DoRandomForceMerge() // LUCENENET specific - added leading undersc Console.WriteLine("RIW: doRandomForceMerge(" + limit + ")"); } IndexWriter.ForceMerge(limit); - Debug.Assert(!doRandomForceMergeAssert || IndexWriter.SegmentCount <= limit, "limit=" + limit + " actual=" + IndexWriter.SegmentCount); + Debugging.Assert(() => !doRandomForceMergeAssert || IndexWriter.SegmentCount <= limit, () => "limit=" + limit + " actual=" + IndexWriter.SegmentCount); } } } diff --git a/src/Lucene.Net.TestFramework/Index/ThreadedIndexingAndSearchingTestCase.cs b/src/Lucene.Net.TestFramework/Index/ThreadedIndexingAndSearchingTestCase.cs index 20a69f9a4f..e2dbc3b832 100644 --- a/src/Lucene.Net.TestFramework/Index/ThreadedIndexingAndSearchingTestCase.cs +++ b/src/Lucene.Net.TestFramework/Index/ThreadedIndexingAndSearchingTestCase.cs @@ -1,6 +1,7 @@ using J2N.Threading; using J2N.Threading.Atomic; using Lucene.Net.Analysis; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index.Extensions; using Lucene.Net.Search; @@ -16,7 +17,6 @@ using System.Threading; using System.Threading.Tasks; using Console = Lucene.Net.Util.SystemConsole; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! using Directory = Lucene.Net.Store.Directory; namespace Lucene.Net.Index @@ -227,7 +227,7 @@ public override void Run() if (toDeleteSubDocs.Count > 0 && Random.NextBoolean()) { delSubDocs = toDeleteSubDocs[Random.Next(toDeleteSubDocs.Count)]; - Debug.Assert(!delSubDocs.Deleted); + Debugging.Assert(() => !delSubDocs.Deleted); toDeleteSubDocs.Remove(delSubDocs); // Update doc block, replacing prior packID packID = delSubDocs.PackID; @@ -364,7 +364,7 @@ public override void Run() foreach (SubDocs subDocs in toDeleteSubDocs) { - Debug.Assert(!subDocs.Deleted); + Debugging.Assert(() => !subDocs.Deleted); delPackIDs.Add(subDocs.PackID); outerInstance.DeleteDocuments(new Term("packID", subDocs.PackID)); subDocs.Deleted = true; diff --git a/src/Lucene.Net.TestFramework/Search/AssertingBulkScorer.cs b/src/Lucene.Net.TestFramework/Search/AssertingBulkScorer.cs index 1082d68cd8..4d88aa5b46 100644 --- a/src/Lucene.Net.TestFramework/Search/AssertingBulkScorer.cs +++ b/src/Lucene.Net.TestFramework/Search/AssertingBulkScorer.cs @@ -1,7 +1,7 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Util; using System; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Search { @@ -61,7 +61,7 @@ public override void Score(ICollector collector) try { bool remaining = @in.Score(collector, DocsEnum.NO_MORE_DOCS); - Debug.Assert(!remaining); + Debugging.Assert(() => !remaining); } #pragma warning disable 168 catch (NotSupportedException e) diff --git a/src/Lucene.Net.TestFramework/Search/AssertingCollector.cs b/src/Lucene.Net.TestFramework/Search/AssertingCollector.cs index 087b31e8a1..b2e77953ba 100644 --- a/src/Lucene.Net.TestFramework/Search/AssertingCollector.cs +++ b/src/Lucene.Net.TestFramework/Search/AssertingCollector.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using System; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Search { @@ -52,7 +52,7 @@ public virtual void Collect(int doc) { if (inOrder || !AcceptsDocsOutOfOrder) { - Debug.Assert(doc > lastCollected, "Out of order : " + lastCollected + " " + doc); + Debugging.Assert(() => doc > lastCollected, () => "Out of order : " + lastCollected + " " + doc); } @in.Collect(doc); lastCollected = doc; diff --git a/src/Lucene.Net.TestFramework/Search/AssertingScorer.cs b/src/Lucene.Net.TestFramework/Search/AssertingScorer.cs index df19e582ca..72351d5c74 100644 --- a/src/Lucene.Net.TestFramework/Search/AssertingScorer.cs +++ b/src/Lucene.Net.TestFramework/Search/AssertingScorer.cs @@ -1,8 +1,8 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Support; using System; using System.Collections.Generic; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! using System.Runtime.CompilerServices; namespace Lucene.Net.Search @@ -105,10 +105,10 @@ internal virtual bool Iterating() public override float GetScore() { - Debug.Assert(Iterating()); + Debugging.Assert(Iterating); float score = @in.GetScore(); - Debug.Assert(!float.IsNaN(score)); - Debug.Assert(!float.IsNaN(score)); + Debugging.Assert(() => !float.IsNaN(score)); + Debugging.Assert(() => !float.IsNaN(score)); return score; } @@ -125,7 +125,7 @@ public override int Freq { get { - Debug.Assert(Iterating()); + Debugging.Assert(Iterating); return @in.Freq; } } diff --git a/src/Lucene.Net.TestFramework/Search/QueryUtils.cs b/src/Lucene.Net.TestFramework/Search/QueryUtils.cs index 0ed15a7a9b..14db5823dd 100644 --- a/src/Lucene.Net.TestFramework/Search/QueryUtils.cs +++ b/src/Lucene.Net.TestFramework/Search/QueryUtils.cs @@ -1,4 +1,5 @@ using Lucene.Net.Analysis; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index; using Lucene.Net.Store; @@ -7,7 +8,6 @@ using System.Collections.Generic; using System.IO; using System.Text; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! using Assert = Lucene.Net.TestFramework.Assert; using Directory = Lucene.Net.Store.Directory; @@ -294,7 +294,7 @@ private static IndexReader[] LoadEmptyReaders() // LUCENENET: Avoid static const private static IndexReader MakeEmptyIndex(Random random, int numDocs) { - Debug.Assert(numDocs > 0); + Debugging.Assert(() => numDocs > 0); Directory d = new MockDirectoryWrapper(random, new RAMDirectory()); IndexWriter w = new IndexWriter(d, new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, new MockAnalyzer(random))); for (int i = 0; i < numDocs; i++) @@ -487,7 +487,7 @@ public virtual void SetNextReader(AtomicReaderContext context) leafPtr++; } lastReader[0] = (AtomicReader)context.Reader; - Debug.Assert(readerContextArray[leafPtr].Reader == context.Reader); + Debugging.Assert(() => readerContextArray[leafPtr].Reader == context.Reader); this.scorer = null; lastDoc[0] = -1; } diff --git a/src/Lucene.Net.TestFramework/Search/RandomSimilarityProvider.cs b/src/Lucene.Net.TestFramework/Search/RandomSimilarityProvider.cs index 25515b652a..f0cf8714d4 100644 --- a/src/Lucene.Net.TestFramework/Search/RandomSimilarityProvider.cs +++ b/src/Lucene.Net.TestFramework/Search/RandomSimilarityProvider.cs @@ -1,9 +1,9 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using Lucene.Net.Search.Similarities; using System; using System.Collections.Generic; using System.Text; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Search { @@ -71,7 +71,7 @@ public override Similarity Get(string field) { lock (this) { - Debug.Assert(field != null); + Debugging.Assert(() => field != null); if (!previousMappings.TryGetValue(field, out Similarity sim) || sim == null) { sim = knownSims[Math.Max(0, Math.Abs(perFieldSeed ^ field.GetHashCode())) % knownSims.Count]; diff --git a/src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs b/src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs index 945d2d6e99..9b770f4cb3 100644 --- a/src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs +++ b/src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs @@ -1,6 +1,7 @@ using J2N.Collections.Generic.Extensions; using J2N.Threading; using Lucene.Net.Analysis; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Index.Extensions; using Lucene.Net.Store; @@ -11,7 +12,6 @@ using System.Collections.Generic; using JCG = J2N.Collections.Generic; using Console = Lucene.Net.Util.SystemConsole; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! #if FEATURE_SERIALIZABLE_EXCEPTIONS using System.Runtime.Serialization; #endif @@ -220,7 +220,7 @@ internal virtual TopDocs SearchNode(int nodeID, long[] nodeVersions, Query q, So } else { - Debug.Assert(searchAfter == null); // not supported yet + Debugging.Assert(() => searchAfter == null); // not supported yet return s.LocalSearch(q, numHits, sort); } } @@ -306,7 +306,7 @@ public ShardIndexSearcher(ShardSearchingTestBase.NodeState nodeState, long[] nod this.outerInstance = nodeState; this.nodeVersions = nodeVersions; MyNodeID = nodeID; - Debug.Assert(MyNodeID == nodeState.MyNodeID, "myNodeID=" + nodeID + " NodeState.this.myNodeID=" + nodeState.MyNodeID); + Debugging.Assert(() => MyNodeID == nodeState.MyNodeID, () => "myNodeID=" + nodeID + " NodeState.this.myNodeID=" + nodeState.MyNodeID); } public override Query Rewrite(Query original) @@ -348,7 +348,7 @@ public override Query Rewrite(Query original) public override TermStatistics TermStatistics(Term term, TermContext context) { - Debug.Assert(term != null); + Debugging.Assert(() => term != null); long docFreq = 0; long totalTermFreq = 0; for (int nodeID = 0; nodeID < nodeVersions.Length; nodeID++) @@ -364,7 +364,7 @@ public override TermStatistics TermStatistics(Term term, TermContext context) subStats = outerInstance.termStatsCache[key]; // We pre-cached during rewrite so all terms // better be here... - Debug.Assert(subStats != null); + Debugging.Assert(() => subStats != null); } long nodeDocFreq = subStats.DocFreq; @@ -419,7 +419,7 @@ public override CollectionStatistics CollectionStatistics(string field) } // Collection stats are pre-shared on reopen, so, // we better not have a cache miss: - Debug.Assert(nodeStats != null, "myNodeID=" + MyNodeID + " nodeID=" + nodeID + " version=" + nodeVersions[nodeID] + " field=" + field); + Debugging.Assert(() => nodeStats != null, () => "myNodeID=" + MyNodeID + " nodeID=" + nodeID + " version=" + nodeVersions[nodeID] + " field=" + field); long nodeDocCount = nodeStats.DocCount; if (docCount >= 0 && nodeDocCount >= 0) @@ -451,7 +451,7 @@ public override CollectionStatistics CollectionStatistics(string field) sumDocFreq = -1; } - Debug.Assert(nodeStats.MaxDoc >= 0); + Debugging.Assert(() => nodeStats.MaxDoc >= 0); maxDoc += nodeStats.MaxDoc; } @@ -551,7 +551,7 @@ public virtual TopDocs LocalSearchAfter(ScoreDoc after, Query query, int numHits public override TopFieldDocs Search(Query query, int numHits, Sort sort) { - Debug.Assert(sort != null); + Debugging.Assert(() => sort != null); TopDocs[] shardHits = new TopDocs[nodeVersions.Length]; for (int nodeID = 0; nodeID < nodeVersions.Length; nodeID++) { @@ -604,7 +604,7 @@ public NodeState(ShardSearchingTestBase shardSearchingTestBase, Random random, i public void InitSearcher(long[] nodeVersions) { - Debug.Assert(currentShardSearcher == null); + Debugging.Assert(() => currentShardSearcher == null); Array.Copy(nodeVersions, 0, currentNodeVersions, 0, currentNodeVersions.Length); currentShardSearcher = new ShardIndexSearcher(this, GetCurrentNodeVersions(), Mgr.Acquire().IndexReader, MyNodeID); } @@ -781,8 +781,8 @@ protected virtual void Start(int numNodes, double runTimeSec, int maxSearcherAge for (int nodeID = 0; nodeID < numNodes; nodeID++) { IndexSearcher s = m_nodes[nodeID].Mgr.Acquire(); - Debug.Assert(nodeVersions[nodeID] == m_nodes[nodeID].Searchers.Record(s)); - Debug.Assert(s != null); + Debugging.Assert(() => nodeVersions[nodeID] == m_nodes[nodeID].Searchers.Record(s)); + Debugging.Assert(() => s != null); try { BroadcastNodeReopen(nodeID, nodeVersions[nodeID], s); diff --git a/src/Lucene.Net.TestFramework/Store/MockDirectoryWrapper.cs b/src/Lucene.Net.TestFramework/Store/MockDirectoryWrapper.cs index ff08f44247..16e968fdfb 100644 --- a/src/Lucene.Net.TestFramework/Store/MockDirectoryWrapper.cs +++ b/src/Lucene.Net.TestFramework/Store/MockDirectoryWrapper.cs @@ -1,4 +1,6 @@ +using J2N.Runtime.CompilerServices; using J2N.Threading.Atomic; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Index.Extensions; using Lucene.Net.Support; @@ -10,11 +12,9 @@ using System.Linq; using System.Runtime.CompilerServices; using System.Threading; -using JCG = J2N.Collections.Generic; using AssertionError = Lucene.Net.Diagnostics.AssertionException; using Console = Lucene.Net.Util.SystemConsole; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! -using J2N.Runtime.CompilerServices; +using JCG = J2N.Collections.Generic; #if FEATURE_SERIALIZABLE_EXCEPTIONS using System.Runtime.Serialization; #endif @@ -963,7 +963,7 @@ protected override void Dispose(bool disposing) { if (endSet.Contains(s) && !startSet.Contains(s)) { - Debug.Assert(pendingDeletions.Contains(s)); + Debugging.Assert(() => pendingDeletions.Contains(s)); if (LuceneTestCase.Verbose) { Console.WriteLine("MDW: Unreferenced check: Ignoring referenced file: " + s + " " + @@ -1024,7 +1024,7 @@ protected override void Dispose(bool disposing) extras += "\n\nThese files we had previously tried to delete, but couldn't: " + pendingDeletions; } - Debug.Assert(false, "unreferenced files: before delete:\n " + Arrays.ToString(startFiles) + "\n after delete:\n " + Arrays.ToString(endFiles) + extras); + Debugging.Assert(() => false, () => "unreferenced files: before delete:\n " + Arrays.ToString(startFiles) + "\n after delete:\n " + Arrays.ToString(endFiles) + extras); } DirectoryReader ir1 = DirectoryReader.Open(this); @@ -1034,7 +1034,7 @@ protected override void Dispose(bool disposing) DirectoryReader ir2 = DirectoryReader.Open(this); int numDocs2 = ir2.NumDocs; ir2.Dispose(); - Debug.Assert(numDocs1 == numDocs2, "numDocs changed after opening/closing IW: before=" + numDocs1 + " after=" + numDocs2); + Debugging.Assert(() => numDocs1 == numDocs2, () => "numDocs changed after opening/closing IW: before=" + numDocs1 + " after=" + numDocs2); } } } diff --git a/src/Lucene.Net.TestFramework/Support/Diagnostics/Debug.cs b/src/Lucene.Net.TestFramework/Support/Diagnostics/Debug.cs deleted file mode 100644 index e8d9a9deba..0000000000 --- a/src/Lucene.Net.TestFramework/Support/Diagnostics/Debug.cs +++ /dev/null @@ -1,46 +0,0 @@ -namespace Lucene.Net.Diagnostics -{ - /* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - /// - /// Provides a set of methods that help debug your code. - /// - internal static class Debug - { - /// - /// Checks for a condition; if the condition is false, throws an . - /// - /// The conditional expression to evaluate. If the condition is true, no exception is thrown. - public static void Assert(bool condition) - { - if (Debugging.AssertsEnabled && !condition) - throw new AssertionException(); - } - - /// - /// Checks for a condition; if the condition is false, throws an with the specified . - /// - /// The conditional expression to evaluate. If the condition is true, no exception is thrown. - /// The message to use - public static void Assert(bool condition, string message) - { - if (Debugging.AssertsEnabled && !condition) - throw new AssertionException(message); - } - } -} diff --git a/src/Lucene.Net.TestFramework/Support/JavaCompatibility/LuceneTestCase.cs b/src/Lucene.Net.TestFramework/Support/JavaCompatibility/LuceneTestCase.cs index b142907551..861bb3ec38 100644 --- a/src/Lucene.Net.TestFramework/Support/JavaCompatibility/LuceneTestCase.cs +++ b/src/Lucene.Net.TestFramework/Support/JavaCompatibility/LuceneTestCase.cs @@ -1,8 +1,8 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; using Assert = Lucene.Net.TestFramework.Assert; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! using JCG = J2N.Collections.Generic; namespace Lucene.Net.Util @@ -259,7 +259,7 @@ internal int randomInt(int max) [ExceptionToNetNumericConvention] // LUCENENET: This is for making test porting easier, keeping as-is internal int randomIntBetween(int min, int max) { - Debug.Assert(max >= min, "max must be >= min: " + min + ", " + max); + Debugging.Assert(() => max >= min, () => "max must be >= min: " + min + ", " + max); long range = (long)max - (long)min; if (range < int.MaxValue) { diff --git a/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs b/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs index d7f617eb37..2f2b3073a1 100644 --- a/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs +++ b/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs @@ -1,9 +1,9 @@ using J2N; using J2N.Runtime.CompilerServices; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using JCG = J2N.Collections.Generic; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Util.Automaton { @@ -191,7 +191,7 @@ internal static int GetRandomCodePoint(Random r, Transition t) // LUCENENET spec } } - Debug.Assert(code >= t.Min && code <= t.Max && (code < UnicodeUtil.UNI_SUR_HIGH_START || code > UnicodeUtil.UNI_SUR_LOW_END), "code=" + code + " min=" + t.Min + " max=" + t.Max); + Debugging.Assert(() => code >= t.Min && code <= t.Max && (code < UnicodeUtil.UNI_SUR_HIGH_START || code > UnicodeUtil.UNI_SUR_LOW_END), () => "code=" + code + " min=" + t.Min + " max=" + t.Max); return code; } @@ -399,7 +399,7 @@ public static void AssertNoDetachedStates(Automaton a) { int numStates = a.GetNumberOfStates(); a.ClearNumberedStates(); // force recomputation of cached numbered states - Debug.Assert(numStates == a.GetNumberOfStates(), "automaton has " + (numStates - a.GetNumberOfStates()) + " detached states"); + Debugging.Assert(() => numStates == a.GetNumberOfStates(), () => "automaton has " + (numStates - a.GetNumberOfStates()) + " detached states"); } } diff --git a/src/Lucene.Net.TestFramework/Util/BaseDocIdSetTestCase.cs b/src/Lucene.Net.TestFramework/Util/BaseDocIdSetTestCase.cs index 1be2cbf91b..83f4ded483 100644 --- a/src/Lucene.Net.TestFramework/Util/BaseDocIdSetTestCase.cs +++ b/src/Lucene.Net.TestFramework/Util/BaseDocIdSetTestCase.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Search; using System; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! using Assert = Lucene.Net.TestFramework.Assert; using BitSet = J2N.Collections.BitSet; @@ -56,7 +56,7 @@ public BaseDocIdSetTestCase(BeforeAfterClass beforeAfter) /// Create a random set which has of its bits set. protected static BitSet RandomSet(int numBits, int numBitsSet) { - Debug.Assert(numBitsSet <= numBits); + Debugging.Assert(() => numBitsSet <= numBits); BitSet set = new BitSet(numBits); Random random = Random; if (numBitsSet == numBits) diff --git a/src/Lucene.Net.TestFramework/Util/FailOnNonBulkMergesInfoStream.cs b/src/Lucene.Net.TestFramework/Util/FailOnNonBulkMergesInfoStream.cs index 4deb2eb12d..db6aea80dd 100644 --- a/src/Lucene.Net.TestFramework/Util/FailOnNonBulkMergesInfoStream.cs +++ b/src/Lucene.Net.TestFramework/Util/FailOnNonBulkMergesInfoStream.cs @@ -1,4 +1,4 @@ -using System.Diagnostics; +using Lucene.Net.Diagnostics; namespace Lucene.Net.Util { @@ -35,7 +35,7 @@ public override bool IsEnabled(string component) public override void Message(string component, string message) { - Debug.Assert(!message.Contains("non-bulk merges")); + Debugging.Assert(() => !message.Contains("non-bulk merges")); } } } \ No newline at end of file diff --git a/src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs b/src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs index 8465d97fdd..a7929e0136 100644 --- a/src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs +++ b/src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs @@ -1,6 +1,7 @@ using J2N; using J2N.Collections; using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using Lucene.Net.Store; using Lucene.Net.Util.Packed; using System; @@ -11,7 +12,6 @@ using System.Text; using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! using Directory = Lucene.Net.Store.Directory; using JCG = J2N.Collections.Generic; @@ -103,7 +103,7 @@ private static BytesRef ToBytesRef(Int32sRef ir) for (int i = 0; i < ir.Length; i++) { int x = ir.Int32s[ir.Offset + i]; - Debug.Assert(x >= 0 && x <= 255); + Debugging.Assert(() => x >= 0 && x <= 255); br.Bytes[i] = (byte)x; } br.Length = ir.Length; @@ -219,7 +219,7 @@ public virtual void DoTest(bool testPruning) // of the term prefix that matches private T Run(FST fst, Int32sRef term, int[] prefixLength) { - Debug.Assert(prefixLength == null || prefixLength.Length == 1); + Debugging.Assert(() => prefixLength == null || prefixLength.Length == 1); FST.Arc arc = fst.GetFirstArc(new FST.Arc()); T NO_OUTPUT = fst.Outputs.NoOutput; T output = NO_OUTPUT; @@ -690,7 +690,7 @@ private void VerifyUnPruned(int inputMode, FST fst) if (!termsMap.ContainsKey(term) && term.CompareTo(pairs[upto].Input) > 0) { int pos = pairs.BinarySearch(new InputOutput(term, default(T))); - Debug.Assert(pos < 0); + Debugging.Assert(() => pos < 0); upto = -(pos + 1); if (random.NextBoolean()) @@ -887,7 +887,7 @@ private void VerifyPruned(int inputMode, FST fst, int prune1, int prune2) } else { - Debug.Assert(prune2 > 0); + Debugging.Assert(() => prune2 > 0); if (prune2 > 1 && cmo.Count >= prune2) { keep = true; diff --git a/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs b/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs index 0afece7ae3..d89f9b4b93 100644 --- a/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs +++ b/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs @@ -1,5 +1,6 @@ using Lucene.Net.Analysis; using Lucene.Net.Codecs; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index; using Lucene.Net.Index.Extensions; @@ -27,7 +28,6 @@ using System.Text.RegularExpressions; using JCG = J2N.Collections.Generic; using Console = Lucene.Net.Util.SystemConsole; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! using Assert = Lucene.Net.TestFramework.Assert; using Directory = Lucene.Net.Store.Directory; using FieldInfo = Lucene.Net.Index.FieldInfo; @@ -2678,7 +2678,7 @@ public virtual void AssertTermsEquals(string info, IndexReader leftReader, Terms /// public virtual void AssertTermsStatisticsEquals(string info, Terms leftTerms, Terms rightTerms) { - Debug.Assert(leftTerms.Comparer == rightTerms.Comparer); + Debugging.Assert(() => leftTerms.Comparer == rightTerms.Comparer); if (leftTerms.DocCount != -1 && rightTerms.DocCount != -1) { Assert.AreEqual(leftTerms.DocCount, rightTerms.DocCount, info); @@ -3053,7 +3053,7 @@ public virtual void AssertNormsEquals(string info, IndexReader leftReader, Index /// public virtual void AssertStoredFieldsEquals(string info, IndexReader leftReader, IndexReader rightReader) { - Debug.Assert(leftReader.MaxDoc == rightReader.MaxDoc); + Debugging.Assert(() => leftReader.MaxDoc == rightReader.MaxDoc); for (int i = 0; i < leftReader.MaxDoc; i++) { Document leftDoc = leftReader.Document(i); @@ -3100,7 +3100,7 @@ public virtual void AssertStoredFieldEquals(string info, IIndexableField leftFie /// public virtual void AssertTermVectorsEquals(string info, IndexReader leftReader, IndexReader rightReader) { - Debug.Assert(leftReader.MaxDoc == rightReader.MaxDoc); + Debugging.Assert(() => leftReader.MaxDoc == rightReader.MaxDoc); for (int i = 0; i < leftReader.MaxDoc; i++) { Fields leftFields = leftReader.GetTermVectors(i); @@ -3270,7 +3270,7 @@ public virtual void AssertDocValuesEquals(string info, int num, NumericDocValues // TODO: this is kinda stupid, we don't delete documents in the test. public virtual void AssertDeletedDocsEquals(string info, IndexReader leftReader, IndexReader rightReader) { - Debug.Assert(leftReader.NumDeletedDocs == rightReader.NumDeletedDocs); + Debugging.Assert(() => leftReader.NumDeletedDocs == rightReader.NumDeletedDocs); IBits leftBits = MultiFields.GetLiveDocs(leftReader); IBits rightBits = MultiFields.GetLiveDocs(rightReader); @@ -3281,7 +3281,7 @@ public virtual void AssertDeletedDocsEquals(string info, IndexReader leftReader, return; } - Debug.Assert(leftReader.MaxDoc == rightReader.MaxDoc); + Debugging.Assert(() => leftReader.MaxDoc == rightReader.MaxDoc); Assert.AreEqual(leftBits.Length, rightBits.Length, info); for (int i = 0; i < leftReader.MaxDoc; i++) { @@ -3365,7 +3365,7 @@ public static bool SlowFileExists(Directory dir, string fileName) //// if (TempDirBase == null) //// { //// DirectoryInfo directory = new DirectoryInfo(System.IO.Path.GetTempPath()); - //// //Debug.Assert(directory.Exists && directory.Directory != null && directory.CanWrite()); + //// //Debugging.Assert(() => directory.Exists && directory.Directory != null && directory.CanWrite()); //// RandomizedContext ctx = RandomizedContext.Current; //// Type clazz = ctx.GetTargetType; @@ -3504,7 +3504,7 @@ public static FileInfo CreateTempFile() /// private static void RegisterToRemoveAfterSuite(FileSystemInfo f) { - Debug.Assert(f != null); + Debugging.Assert(() => f != null); if (LuceneTestCase.LeaveTemporary) { diff --git a/src/Lucene.Net.TestFramework/Util/NullInfoStream.cs b/src/Lucene.Net.TestFramework/Util/NullInfoStream.cs index c947785597..50f9d60fc1 100644 --- a/src/Lucene.Net.TestFramework/Util/NullInfoStream.cs +++ b/src/Lucene.Net.TestFramework/Util/NullInfoStream.cs @@ -1,4 +1,4 @@ -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! +using Lucene.Net.Diagnostics; namespace Lucene.Net.Util { @@ -29,13 +29,13 @@ public class NullInfoStream : InfoStream { public override void Message(string component, string message) { - Debug.Assert(component != null); - Debug.Assert(message != null); + Debugging.Assert(() => component != null); + Debugging.Assert(() => message != null); } public override bool IsEnabled(string component) { - Debug.Assert(component != null); + Debugging.Assert(() => component != null); return true; // to actually enable logging, we just ignore on message() } diff --git a/src/Lucene.Net.TestFramework/Util/TestRuleAssertionsRequired.cs b/src/Lucene.Net.TestFramework/Util/TestRuleAssertionsRequired.cs index fe08c3e5f9..f3cc2956c5 100644 --- a/src/Lucene.Net.TestFramework/Util/TestRuleAssertionsRequired.cs +++ b/src/Lucene.Net.TestFramework/Util/TestRuleAssertionsRequired.cs @@ -56,7 +56,7 @@ public override void Evaluate() { try { - Debug.Assert(false); + Debugging.Assert(false); string msg = "Test class requires enabled assertions, enable globally (-ea)" + " or for Solr/Lucene subpackages only: " + Description.ClassName; Console.Error.WriteLine(msg); throw new Exception(msg); diff --git a/src/Lucene.Net.TestFramework/Util/TestRuleSetupAndRestoreClassEnv.cs b/src/Lucene.Net.TestFramework/Util/TestRuleSetupAndRestoreClassEnv.cs index 515c22b29f..eaf64337ad 100644 --- a/src/Lucene.Net.TestFramework/Util/TestRuleSetupAndRestoreClassEnv.cs +++ b/src/Lucene.Net.TestFramework/Util/TestRuleSetupAndRestoreClassEnv.cs @@ -10,6 +10,7 @@ using Lucene.Net.Codecs.Lucene46; using Lucene.Net.Codecs.MockRandom; using Lucene.Net.Codecs.SimpleText; +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Search; using Lucene.Net.Search.Similarities; @@ -21,7 +22,6 @@ using System.Threading; using JCG = J2N.Collections.Generic; using Console = Lucene.Net.Util.SystemConsole; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: These are primarily here because they are referred to // in the XML documentation. Be sure to add a new option if a new test framework @@ -169,7 +169,7 @@ public override void Before(LuceneTestCase testInstance) !ShouldAvoidCodec("Lucene3x"))) // preflex-only setup { codec = Codec.ForName("Lucene3x"); - Debug.Assert((codec is PreFlexRWCodec), "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); + Debugging.Assert(() => (codec is PreFlexRWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); LuceneTestCase.OldFormatImpersonationIsActive = true; } else if ("Lucene40".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) || ("random".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) && @@ -179,8 +179,8 @@ public override void Before(LuceneTestCase testInstance) { codec = Codec.ForName("Lucene40"); LuceneTestCase.OldFormatImpersonationIsActive = true; - Debug.Assert((codec is Lucene40RWCodec), "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); - Debug.Assert((PostingsFormat.ForName("Lucene40") is Lucene40RWPostingsFormat), "fix your IPostingsFormatFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); + Debugging.Assert(() => (codec is Lucene40RWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); + Debugging.Assert(() => (PostingsFormat.ForName("Lucene40") is Lucene40RWPostingsFormat), () => "fix your IPostingsFormatFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); } else if ("Lucene41".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) || ("random".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) && "random".Equals(LuceneTestCase.TestPostingsFormat, StringComparison.Ordinal) && @@ -190,7 +190,7 @@ public override void Before(LuceneTestCase testInstance) { codec = Codec.ForName("Lucene41"); LuceneTestCase.OldFormatImpersonationIsActive = true; - Debug.Assert((codec is Lucene41RWCodec), "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); + Debugging.Assert(() => (codec is Lucene41RWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); } else if ("Lucene42".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) || ("random".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) && "random".Equals(LuceneTestCase.TestPostingsFormat, StringComparison.Ordinal) && @@ -200,7 +200,7 @@ public override void Before(LuceneTestCase testInstance) { codec = Codec.ForName("Lucene42"); LuceneTestCase.OldFormatImpersonationIsActive = true; - Debug.Assert((codec is Lucene42RWCodec), "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); + Debugging.Assert(() => (codec is Lucene42RWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); } else if ("Lucene45".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) || ("random".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) && "random".Equals(LuceneTestCase.TestPostingsFormat, StringComparison.Ordinal) && @@ -210,7 +210,7 @@ public override void Before(LuceneTestCase testInstance) { codec = Codec.ForName("Lucene45"); LuceneTestCase.OldFormatImpersonationIsActive = true; - Debug.Assert((codec is Lucene45RWCodec), "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); + Debugging.Assert(() => (codec is Lucene45RWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); } else if (("random".Equals(LuceneTestCase.TestPostingsFormat, StringComparison.Ordinal) == false) || ("random".Equals(LuceneTestCase.TestDocValuesFormat, StringComparison.Ordinal) == false)) @@ -275,7 +275,7 @@ public override void Before(LuceneTestCase testInstance) } else { - Debug.Assert(false); + Debugging.Assert(() => false); } Codec.Default = codec; diff --git a/src/Lucene.Net.TestFramework/Util/ThrottledIndexOutput.cs b/src/Lucene.Net.TestFramework/Util/ThrottledIndexOutput.cs index e4bb11288b..1a8b2a16e6 100644 --- a/src/Lucene.Net.TestFramework/Util/ThrottledIndexOutput.cs +++ b/src/Lucene.Net.TestFramework/Util/ThrottledIndexOutput.cs @@ -1,8 +1,8 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Store; using Lucene.Net.Support; using System; using System.Threading; -using Debug = Lucene.Net.Diagnostics.Debug; // LUCENENET NOTE: We cannot use System.Diagnostics.Debug because those calls will be optimized out of the release! namespace Lucene.Net.Util { @@ -61,7 +61,7 @@ public static int MBitsToBytes(int mbits) public ThrottledIndexOutput(int bytesPerSecond, long flushDelayMillis, long closeDelayMillis, long seekDelayMillis, long minBytesWritten, IndexOutput @delegate) { - Debug.Assert(bytesPerSecond > 0); + Debugging.Assert(() => bytesPerSecond > 0); this.@delegate = @delegate; this.bytesPerSecond = bytesPerSecond; this.flushDelayMillis = flushDelayMillis; From 1ddc6414f70f4ad1e925c2121f97e7c52653da3b Mon Sep 17 00:00:00 2001 From: Shad Storhaug Date: Thu, 13 Aug 2020 23:31:25 +0700 Subject: [PATCH 05/13] Converted all test projects to use Lucene.Net.Diagnostics.Debugging.Assert() instead of System.Diagnostics.Debug.Assert() --- .../GraphvizFormatter.cs | 6 +- .../JapaneseIterationMarkCharFilter.cs | 4 +- .../JapaneseTokenizer.cs | 38 ++++++------ .../Tools/BinaryDictionaryWriter.cs | 11 ++-- .../Tools/ConnectionCostsBuilder.cs | 3 +- .../CharFilters/TestMappingCharFilter.cs | 6 +- .../Analysis/Core/TestFactories.cs | 4 +- .../Analysis/Core/TestRandomChains.cs | 6 +- .../Analysis/Hunspell/TestAllDictionaries.cs | 16 ++--- .../Analysis/Hunspell/TestAllDictionaries2.cs | 16 ++--- .../Analysis/Synonym/TestSynonymMapFilter.cs | 6 +- src/Lucene.Net.Tests.Facet/FacetTestCase.cs | 6 +- .../Range/TestRangeFacetCounts.cs | 58 +++++++++---------- .../TestTaxonomyFacetSumValueSource.cs | 6 +- .../TestDrillSideways.cs | 8 +-- .../GroupFacetCollectorTest.cs | 6 +- .../TestPostingsHighlighter.cs | 7 ++- src/Lucene.Net.Tests.Join/TestJoinUtil.cs | 4 +- .../Classic/TestQueryParser.cs | 18 +++--- .../Flexible/Standard/TestStandardQP.cs | 12 ++-- .../IndexAndTaxonomyReplicationClientTest.cs | 4 +- .../IndexReplicationClientTest.cs | 4 +- .../SpatialTestCase.cs | 8 +-- .../Analyzing/AnalyzingSuggesterTest.cs | 4 +- .../Suggest/Analyzing/FuzzySuggesterTest.cs | 6 +- .../Analyzing/TestFreeTextSuggester.cs | 6 +- .../Suggest/LookupBenchmarkTest.cs | 6 +- .../Analysis/TestGraphTokenizers.cs | 4 +- .../Lucene41/TestBlockPostingsFormat3.cs | 4 +- .../PerField/TestPerFieldDocValuesFormat.cs | 4 +- .../Index/TestBackwardsCompatibility.cs | 4 +- .../Index/TestBackwardsCompatibility3x.cs | 4 +- src/Lucene.Net.Tests/Index/TestCodecs.cs | 8 +-- src/Lucene.Net.Tests/Index/TestIndexWriter.cs | 7 +-- .../Index/TestIndexWriterMerging.cs | 4 +- .../Index/TestIndexableField.cs | 6 +- .../Index/TestLongPostings.cs | 6 +- src/Lucene.Net.Tests/Index/TestNRTThreads.cs | 4 +- src/Lucene.Net.Tests/Index/TestPayloads.cs | 4 +- .../Index/TestPayloadsOnVectors.cs | 6 +- .../Index/TestPostingsOffsets.cs | 10 ++-- .../Index/TestStressIndexing2.cs | 6 +- src/Lucene.Net.Tests/Index/TestStressNRT.cs | 6 +- src/Lucene.Net.Tests/Index/TestTermsEnum.cs | 6 +- .../Search/Spans/MultiSpansWrapper.cs | 4 +- .../Search/TestBooleanScorer.cs | 4 +- .../Search/TestConstantScoreQuery.cs | 4 +- src/Lucene.Net.Tests/Search/TestFieldCache.cs | 48 ++++++++------- .../Search/TestMinShouldMatch2.cs | 16 ++--- .../Search/TestMultiThreadTermVectors.cs | 4 +- .../Search/TestNumericRangeQuery32.cs | 8 +-- .../Search/TestNumericRangeQuery64.cs | 6 +- .../Search/TestTimeLimitingCollector.cs | 4 +- .../Util/Automaton/TestUTF32ToUTF8.cs | 8 +-- src/Lucene.Net.Tests/Util/Fst/TestFSTs.cs | 4 +- .../Util/Packed/TestEliasFanoDocIdSet.cs | 4 +- .../Util/Packed/TestEliasFanoSequence.cs | 8 +-- src/Lucene.Net.Tests/Util/Test2BPagedBytes.cs | 4 +- src/Lucene.Net.Tests/Util/TestWAH8DocIdSet.cs | 4 +- .../TestICUPostingsHighlighter.cs | 8 +-- 60 files changed, 251 insertions(+), 259 deletions(-) diff --git a/src/Lucene.Net.Analysis.Kuromoji/GraphvizFormatter.cs b/src/Lucene.Net.Analysis.Kuromoji/GraphvizFormatter.cs index a1e1fa984a..aca253ce4a 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/GraphvizFormatter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/GraphvizFormatter.cs @@ -1,7 +1,7 @@ using Lucene.Net.Analysis.Ja.Dict; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Text; namespace Lucene.Net.Analysis.Ja @@ -89,8 +89,8 @@ private void SetBestPathMap(WrappedPositionArray positions, int startPos, Positi string toNodeID = GetNodeID(pos, bestIDX); string fromNodeID = GetNodeID(backPos, backIDX); - Debugging.Assert(!bestPathMap.ContainsKey(fromNodeID)); - Debugging.Assert(!bestPathMap.Values.Contains(toNodeID)); + Debugging.Assert(() => !bestPathMap.ContainsKey(fromNodeID)); + Debugging.Assert(() => !bestPathMap.Values.Contains(toNodeID)); bestPathMap[fromNodeID] = toNodeID; pos = backPos; bestIDX = backIDX; diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilter.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilter.cs index ad243f40eb..931d70ecce 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilter.cs @@ -1,5 +1,5 @@ using Lucene.Net.Analysis.Util; -using System.Diagnostics; +using Lucene.Net.Diagnostics; using System.IO; namespace Lucene.Net.Analysis.Ja @@ -133,7 +133,7 @@ static JapaneseIterationMarkCharFilter() // Make katakana dakuten map from hiragana map char codePointDifference = (char)('\u30ab' - '\u304b'); // カ - か - Debugging.Assert(h2d.Length == k2d.Length); + Debugging.Assert(() => h2d.Length == k2d.Length); for (int i = 0; i < k2d.Length; i++) { k2d[i] = (char)(h2d[i] + codePointDifference); diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs index d9d2293586..2d3c99321e 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs @@ -3,11 +3,11 @@ using Lucene.Net.Analysis.Ja.TokenAttributes; using Lucene.Net.Analysis.TokenAttributes; using Lucene.Net.Analysis.Util; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using Lucene.Net.Util; using Lucene.Net.Util.Fst; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; using System.IO; using System.Threading; @@ -314,7 +314,7 @@ private void Add(IDictionary dict, Position fromPosData, int endPos, int wordID, int leftID = dict.GetLeftId(wordID); int leastCost = int.MaxValue; int leastIDX = -1; - Debugging.Assert(fromPosData.count > 0); + Debugging.Assert(() => fromPosData.count > 0); for (int idx = 0; idx < fromPosData.count; idx++) { // Cost is path cost so far, plus word cost (added at @@ -356,7 +356,7 @@ private void Add(IDictionary dict, Position fromPosData, int endPos, int wordID, } //positions.get(endPos).add(leastCost, dict.getRightId(wordID), fromPosData.pos, leastIDX, wordID, type); - Debugging.Assert(leftID == dict.GetRightId(wordID)); + Debugging.Assert(() => leftID == dict.GetRightId(wordID)); positions.Get(endPos).Add(leastCost, leftID, fromPosData.pos, leastIDX, wordID, type); } @@ -387,7 +387,7 @@ public override bool IncrementToken() int position = token.Position; int length = token.Length; ClearAttributes(); - Debugging.Assert(length > 0); + Debugging.Assert(() => length > 0); //System.out.println("off=" + token.getOffset() + " len=" + length + " vs " + token.getSurfaceForm().length); termAtt.CopyBuffer(token.SurfaceForm, token.Offset, length); offsetAtt.SetOffset(CorrectOffset(position), CorrectOffset(position + length)); @@ -402,7 +402,7 @@ public override bool IncrementToken() } else { - Debugging.Assert(token.Position > lastTokenPos); + Debugging.Assert(() => token.Position > lastTokenPos); posIncAtt.PositionIncrement = 1; posLengthAtt.PositionLength = 1; } @@ -511,7 +511,7 @@ private void Parse() } // We will always have at least one live path: - Debugging.Assert(leastIDX != -1); + Debugging.Assert(() => leastIDX != -1); // Second pass: prune all but the best path: for (int pos2 = pos; pos2 < positions.GetNextPos(); pos2++) @@ -544,7 +544,7 @@ private void Parse() if (pos != leastPosData.pos) { // We jumped into a future position: - Debugging.Assert(pos < leastPosData.pos); + Debugging.Assert(() => pos < leastPosData.pos); pos = leastPosData.pos; } @@ -913,10 +913,10 @@ private void Backtrace(Position endPosData, int fromIDX) { //System.out.println("BT: back pos=" + pos + " bestIDX=" + bestIDX); Position posData = positions.Get(pos); - Debugging.Assert(bestIDX < posData.count); + Debugging.Assert(() => bestIDX < posData.count); int backPos = posData.backPos[bestIDX]; - Debugging.Assert(backPos >= lastBackTracePos, "backPos=" + backPos + " vs lastBackTracePos=" + lastBackTracePos); + Debugging.Assert(() => backPos >= lastBackTracePos, () => "backPos=" + backPos + " vs lastBackTracePos=" + lastBackTracePos); int length = pos - backPos; JapaneseTokenizerType backType = posData.backType[bestIDX]; int backID = posData.backID[bestIDX]; @@ -989,7 +989,7 @@ private void Backtrace(Position endPosData, int fromIDX) if (leastIDX != -1 && leastCost <= maxCost && posData.backPos[leastIDX] != backPos) { // We should have pruned the altToken from the graph: - Debugging.Assert(posData.backPos[leastIDX] != backPos); + Debugging.Assert(() => posData.backPos[leastIDX] != backPos); // Save the current compound token, to output when // this alternate path joins back: @@ -1024,7 +1024,7 @@ private void Backtrace(Position endPosData, int fromIDX) } int offset = backPos - lastBackTracePos; - Debugging.Assert(offset >= 0); + Debugging.Assert(() => offset >= 0); if (altToken != null && altToken.Position >= backPos) { @@ -1035,7 +1035,7 @@ private void Backtrace(Position endPosData, int fromIDX) // The pruning we did when we created the altToken // ensures that the back trace will align back with // the start of the altToken: - Debugging.Assert(altToken.Position == backPos, altToken.Position + " vs " + backPos); + Debugging.Assert(() => altToken.Position == backPos, () => altToken.Position + " vs " + backPos); // NOTE: not quite right: the compound token may // have had all punctuation back traced so far, but @@ -1060,7 +1060,7 @@ private void Backtrace(Position endPosData, int fromIDX) { Console.WriteLine(" discard all-punctuation altToken=" + altToken); } - Debugging.Assert(discardPunctuation); + Debugging.Assert(() => discardPunctuation); } altToken = null; } @@ -1355,7 +1355,7 @@ public void Reset() { count = 0; // forwardCount naturally resets after it runs: - Debugging.Assert(forwardCount == 0, "pos=" + pos + " forwardCount=" + forwardCount); + Debugging.Assert(() => forwardCount == 0, () => "pos=" + pos + " forwardCount=" + forwardCount); } } @@ -1432,13 +1432,13 @@ public Position Get(int pos) nextWrite = 0; } // Should have already been reset: - Debugging.Assert(positions[nextWrite].count == 0); + Debugging.Assert(() => positions[nextWrite].count == 0); positions[nextWrite++].pos = nextPos++; count++; } - Debugging.Assert(InBounds(pos)); + Debugging.Assert(() => InBounds(pos)); int index = GetIndex(pos); - Debugging.Assert(positions[index].pos == pos); + Debugging.Assert(() => positions[index].pos == pos); return positions[index]; } @@ -1466,8 +1466,8 @@ private int GetIndex(int pos) public void FreeBefore(int pos) { int toFree = count - (nextPos - pos); - Debugging.Assert(toFree >= 0); - Debugging.Assert(toFree <= count); + Debugging.Assert(() => toFree >= 0); + Debugging.Assert(() => toFree <= count); int index = nextWrite - count; if (index < 0) { diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs index cfbfb3d6a1..ecb270d6c8 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs @@ -6,7 +6,6 @@ using Lucene.Net.Util; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; using System.IO; using System.Text; @@ -62,7 +61,7 @@ public virtual int Put(string[] entry) for (int i = 4; i < 8; i++) { string part = entry[i]; - Debugging.Assert(part.Length > 0); + Debugging.Assert(() => part.Length > 0); if (!"*".Equals(part, StringComparison.Ordinal)) { if (sb.Length > 0) @@ -119,8 +118,8 @@ public virtual int Put(string[] entry) flags |= BinaryDictionary.HAS_PRONUNCIATION; } - Debugging.Assert(leftId == rightId); - Debugging.Assert(leftId < 4096); // there are still unused bits + Debugging.Assert(() => leftId == rightId); + Debugging.Assert(() => leftId < 4096); // there are still unused bits // add pos mapping int toFill = 1 + leftId - posDict.Count; for (int i = 0; i < toFill; i++) @@ -129,7 +128,7 @@ public virtual int Put(string[] entry) } string existing = posDict[leftId]; - Debugging.Assert(existing == null || existing.Equals(fullPOSData, StringComparison.Ordinal)); + Debugging.Assert(() => existing == null || existing.Equals(fullPOSData, StringComparison.Ordinal)); posDict[leftId] = fullPOSData; m_buffer.PutInt16((short)(leftId << 3 | flags)); @@ -137,7 +136,7 @@ public virtual int Put(string[] entry) if ((flags & BinaryDictionary.HAS_BASEFORM) != 0) { - Debugging.Assert(baseForm.Length < 16); + Debugging.Assert(() => baseForm.Length < 16); int shared = SharedPrefix(entry[0], baseForm); int suffix = baseForm.Length - shared; m_buffer.Put((byte)(shared << 4 | suffix)); diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs index d7acc77169..a653cdc134 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs @@ -1,6 +1,5 @@ using J2N.Text; using Lucene.Net.Diagnostics; -using System.Diagnostics; using System.Globalization; using System.IO; using System.Text; @@ -55,7 +54,7 @@ public static ConnectionCostsWriter Build(string filename) { string[] fields = whiteSpaceRegex.Split(line).TrimEnd(); - Debugging.Assert(fields.Length == 3); + Debugging.Assert(() => fields.Length == 3); int forwardId = int.Parse(fields[0], CultureInfo.InvariantCulture); int backwardId = int.Parse(fields[1], CultureInfo.InvariantCulture); diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs index b694a3634e..f398ac18a5 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs @@ -1,8 +1,8 @@ -using Lucene.Net.Util; +using Lucene.Net.Diagnostics; +using Lucene.Net.Util; using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Text; using Console = Lucene.Net.Util.SystemConsole; @@ -422,7 +422,7 @@ public virtual void TestRandomMaps2() // Same length: no change to offset } - Debug.Assert(inputOffsets.Count == output.Length, "inputOffsets.size()=" + inputOffsets.Count + " vs output.length()=" + output.Length); + Debugging.Assert(() => inputOffsets.Count == output.Length, () => "inputOffsets.size()=" + inputOffsets.Count + " vs output.length()=" + output.Length); } else { diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs index fc004eee4e..e48cafa91f 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs @@ -1,9 +1,9 @@ using Lucene.Net.Analysis.Util; +using Lucene.Net.Diagnostics; using Lucene.Net.Util; using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Reflection; @@ -211,7 +211,7 @@ private sealed class FactoryAnalyzer : Analyzer internal FactoryAnalyzer(TokenizerFactory tokenizer, TokenFilterFactory tokenfilter, CharFilterFactory charFilter) { - Debug.Assert(tokenizer != null); + Debugging.Assert(() => tokenizer != null); this.tokenizer = tokenizer; this.charFilter = charFilter; this.tokenfilter = tokenfilter; diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs index b71771bfbf..4f66f90bb0 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs @@ -17,6 +17,7 @@ using Lucene.Net.Analysis.Synonym; using Lucene.Net.Analysis.Util; using Lucene.Net.Analysis.Wikipedia; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using Lucene.Net.Tartarus.Snowball; using Lucene.Net.TestFramework.Analysis; @@ -25,7 +26,6 @@ using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; using System.IO; using System.Linq; @@ -172,7 +172,7 @@ public PredicateAnonymousInnerClassHelper2() public virtual bool Apply(object[] args) { - Debug.Assert(args.Length == 3); + Debugging.Assert(() => args.Length == 3); return !((bool)args[2]); // args are broken if consumeAllTokens is false } } @@ -185,7 +185,7 @@ public PredicateAnonymousInnerClassHelper3() public virtual bool Apply(object[] args) { - Debug.Assert(args.Length == 3); + Debugging.Assert(() => args.Length == 3); return !((bool)args[2]); // args are broken if consumeAllTokens is false } } diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries.cs index 657ef71187..b8158af45e 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries.cs @@ -1,7 +1,7 @@ -using Lucene.Net.Util; +using Lucene.Net.Diagnostics; +using Lucene.Net.Util; using NUnit.Framework; using System; -using System.Diagnostics; using System.IO; using System.IO.Compression; using System.Text; @@ -168,16 +168,16 @@ public virtual void Test() for (int i = 0; i < tests.Length; i += 3) { FileInfo f = new FileInfo(System.IO.Path.Combine(DICTIONARY_HOME.FullName, tests[i])); - Debug.Assert(f.Exists); + Debugging.Assert(() => f.Exists); using (Stream fileStream = f.OpenRead()) { using (ZipArchive zip = new ZipArchive(fileStream, ZipArchiveMode.Read, false, Encoding.UTF8)) { ZipArchiveEntry dicEntry = zip.GetEntry(tests[i + 1]); - Debug.Assert(dicEntry != null); + Debugging.Assert(() => dicEntry != null); ZipArchiveEntry affEntry = zip.GetEntry(tests[i + 2]); - Debug.Assert(affEntry != null); + Debugging.Assert(() => affEntry != null); using (Stream dictionary = dicEntry.Open()) { @@ -208,16 +208,16 @@ public virtual void TestOneDictionary() if (tests[i].Equals(toTest, StringComparison.Ordinal)) { FileInfo f = new FileInfo(System.IO.Path.Combine(DICTIONARY_HOME.FullName, tests[i])); - Debug.Assert(f.Exists); + Debugging.Assert(() => f.Exists); using (Stream fileStream = f.OpenRead()) { using (ZipArchive zip = new ZipArchive(fileStream, ZipArchiveMode.Read, false, Encoding.UTF8)) { ZipArchiveEntry dicEntry = zip.GetEntry(tests[i + 1]); - Debug.Assert(dicEntry != null); + Debugging.Assert(() => dicEntry != null); ZipArchiveEntry affEntry = zip.GetEntry(tests[i + 2]); - Debug.Assert(affEntry != null); + Debugging.Assert(() => affEntry != null); using (Stream dictionary = dicEntry.Open()) { diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries2.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries2.cs index 2a60fc5f40..e6c2d9765d 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries2.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries2.cs @@ -1,7 +1,7 @@ -using Lucene.Net.Util; +using Lucene.Net.Diagnostics; +using Lucene.Net.Util; using NUnit.Framework; using System; -using System.Diagnostics; using System.IO; using System.IO.Compression; using System.Text; @@ -184,16 +184,16 @@ public virtual void Test() for (int i = 0; i < tests.Length; i += 3) { FileInfo f = new FileInfo(System.IO.Path.Combine(DICTIONARY_HOME.FullName, tests[i])); - Debug.Assert(f.Exists); + Debugging.Assert(() => f.Exists); using (Stream fileStream = f.OpenRead()) { using (ZipArchive zip = new ZipArchive(fileStream, ZipArchiveMode.Read, false, Encoding.UTF8)) { ZipArchiveEntry dicEntry = zip.GetEntry(tests[i + 1]); - Debug.Assert(dicEntry != null); + Debugging.Assert(() => dicEntry != null); ZipArchiveEntry affEntry = zip.GetEntry(tests[i + 2]); - Debug.Assert(affEntry != null); + Debugging.Assert(() => affEntry != null); using (Stream dictionary = dicEntry.Open()) { @@ -226,16 +226,16 @@ public virtual void TestOneDictionary() if (tests[i].Equals(toTest, StringComparison.Ordinal)) { FileInfo f = new FileInfo(System.IO.Path.Combine(DICTIONARY_HOME.FullName, tests[i])); - Debug.Assert(f.Exists); + Debugging.Assert(() => f.Exists); using (Stream fileStream = f.OpenRead()) { using (ZipArchive zip = new ZipArchive(fileStream, ZipArchiveMode.Read, false, Encoding.UTF8)) { ZipArchiveEntry dicEntry = zip.GetEntry(tests[i + 1]); - Debug.Assert(dicEntry != null); + Debugging.Assert(() => dicEntry != null); ZipArchiveEntry affEntry = zip.GetEntry(tests[i + 2]); - Debug.Assert(affEntry != null); + Debugging.Assert(() => affEntry != null); using (Stream dictionary = dicEntry.Open()) { diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMapFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMapFilter.cs index 432fd228e4..4ec4e7d65d 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMapFilter.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMapFilter.cs @@ -1,18 +1,18 @@ using J2N.Text; using Lucene.Net.Analysis.Core; using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using Lucene.Net.Util; using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; using System.IO; using System.Text; using System.Text.RegularExpressions; -using JCG = J2N.Collections.Generic; using Console = Lucene.Net.Util.SystemConsole; +using JCG = J2N.Collections.Generic; namespace Lucene.Net.Analysis.Synonym { @@ -268,7 +268,7 @@ public virtual void TestBasic() private string GetRandomString(char start, int alphabetSize, int length) { - Debug.Assert(alphabetSize <= 26); + Debugging.Assert(() => alphabetSize <= 26); char[] s = new char[2 * length]; for (int charIDX = 0; charIDX < length; charIDX++) { diff --git a/src/Lucene.Net.Tests.Facet/FacetTestCase.cs b/src/Lucene.Net.Tests.Facet/FacetTestCase.cs index fd04d815f5..7c7a4ef9f2 100644 --- a/src/Lucene.Net.Tests.Facet/FacetTestCase.cs +++ b/src/Lucene.Net.Tests.Facet/FacetTestCase.cs @@ -1,8 +1,8 @@ -using Lucene.Net.Support; +using Lucene.Net.Diagnostics; +using Lucene.Net.Support; using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; @@ -154,7 +154,7 @@ protected internal virtual void SortTies(LabelAndValue[] labelValues) if (numInRow > 1) { Array.Sort(labelValues, i - numInRow, i - (i - numInRow), Comparer.Create((a,b)=> { - Debug.Assert((double)a.Value == (double)b.Value); + Debugging.Assert(() => (double)a.Value == (double)b.Value); return (new BytesRef(a.Label)).CompareTo(new BytesRef(b.Label)); })); } diff --git a/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs b/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs index 32b9588cfa..a28559ca95 100644 --- a/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs +++ b/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs @@ -1,9 +1,10 @@ using J2N.Threading.Atomic; +using Lucene.Net.Diagnostics; +using Lucene.Net.Search; +using NUnit.Framework; using System; -using System.Diagnostics; using System.Collections; using System.Collections.Generic; -using NUnit.Framework; using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; @@ -27,43 +28,40 @@ namespace Lucene.Net.Facet.Range * limitations under the License. */ - - using Document = Lucene.Net.Documents.Document; - using DoubleDocValuesField = Lucene.Net.Documents.DoubleDocValuesField; - using DoubleField = Lucene.Net.Documents.DoubleField; - using Field = Lucene.Net.Documents.Field; - using SingleDocValuesField = Lucene.Net.Documents.SingleDocValuesField; - using SingleField = Lucene.Net.Documents.SingleField; - using Int64Field = Lucene.Net.Documents.Int64Field; - using NumericDocValuesField = Lucene.Net.Documents.NumericDocValuesField; - using DrillSidewaysResult = Lucene.Net.Facet.DrillSidewaysResult; - using TaxonomyReader = Lucene.Net.Facet.Taxonomy.TaxonomyReader; - using DirectoryTaxonomyReader = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyReader; - using DirectoryTaxonomyWriter = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyWriter; using AtomicReader = Lucene.Net.Index.AtomicReader; using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext; - using IndexReader = Lucene.Net.Index.IndexReader; - using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig; - using OpenMode = Lucene.Net.Index.OpenMode; - using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter; - using FunctionValues = Lucene.Net.Queries.Function.FunctionValues; - using ValueSource = Lucene.Net.Queries.Function.ValueSource; - using DoubleDocValues = Lucene.Net.Queries.Function.DocValues.DoubleDocValues; - using DoubleFieldSource = Lucene.Net.Queries.Function.ValueSources.DoubleFieldSource; - using SingleFieldSource = Lucene.Net.Queries.Function.ValueSources.SingleFieldSource; - using Int64FieldSource = Lucene.Net.Queries.Function.ValueSources.Int64FieldSource; using CachingWrapperFilter = Lucene.Net.Search.CachingWrapperFilter; + using Directory = Lucene.Net.Store.Directory; + using DirectoryTaxonomyReader = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyReader; + using DirectoryTaxonomyWriter = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyWriter; using DocIdSet = Lucene.Net.Search.DocIdSet; using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator; + using Document = Lucene.Net.Documents.Document; + using DoubleDocValues = Lucene.Net.Queries.Function.DocValues.DoubleDocValues; + using DoubleDocValuesField = Lucene.Net.Documents.DoubleDocValuesField; + using DoubleField = Lucene.Net.Documents.DoubleField; + using DoubleFieldSource = Lucene.Net.Queries.Function.ValueSources.DoubleFieldSource; + using DrillSidewaysResult = Lucene.Net.Facet.DrillSidewaysResult; + using Field = Lucene.Net.Documents.Field; using Filter = Lucene.Net.Search.Filter; + using FixedBitSet = Lucene.Net.Util.FixedBitSet; + using FunctionValues = Lucene.Net.Queries.Function.FunctionValues; + using IndexReader = Lucene.Net.Index.IndexReader; using IndexSearcher = Lucene.Net.Search.IndexSearcher; + using Int64Field = Lucene.Net.Documents.Int64Field; + using Int64FieldSource = Lucene.Net.Queries.Function.ValueSources.Int64FieldSource; + using IOUtils = Lucene.Net.Util.IOUtils; using MatchAllDocsQuery = Lucene.Net.Search.MatchAllDocsQuery; - using Lucene.Net.Search; + using NumericDocValuesField = Lucene.Net.Documents.NumericDocValuesField; + using OpenMode = Lucene.Net.Index.OpenMode; using QueryWrapperFilter = Lucene.Net.Search.QueryWrapperFilter; - using Directory = Lucene.Net.Store.Directory; - using FixedBitSet = Lucene.Net.Util.FixedBitSet; - using IOUtils = Lucene.Net.Util.IOUtils; + using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter; + using SingleDocValuesField = Lucene.Net.Documents.SingleDocValuesField; + using SingleField = Lucene.Net.Documents.SingleField; + using SingleFieldSource = Lucene.Net.Queries.Function.ValueSources.SingleFieldSource; + using TaxonomyReader = Lucene.Net.Facet.Taxonomy.TaxonomyReader; using TestUtil = Lucene.Net.Util.TestUtil; + using ValueSource = Lucene.Net.Queries.Function.ValueSource; [TestFixture] public class TestRangeFacetCounts : FacetTestCase @@ -1199,7 +1197,7 @@ public DrillSidewaysAnonymousInnerClassHelper2(TestRangeFacetCounts outerInstanc protected override Facets BuildFacetsResult(FacetsCollector drillDowns, FacetsCollector[] drillSideways, string[] drillSidewaysDims) { - Debug.Assert(drillSideways.Length == 1); + Debugging.Assert(() => drillSideways.Length == 1); return new DoubleRangeFacetCounts("field", vs, drillSideways[0], fastMatchFilter, ranges); } diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs index 254c9a57f7..6e8be41514 100644 --- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs +++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs @@ -1,8 +1,8 @@ -using NUnit.Framework; +using Lucene.Net.Diagnostics; +using NUnit.Framework; using System; using System.Collections; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; using System.IO; using Assert = Lucene.Net.TestFramework.Assert; @@ -385,7 +385,7 @@ public ValueSourceAnonymousInnerClassHelper(TestTaxonomyFacetSumValueSource oute public override FunctionValues GetValues(IDictionary context, AtomicReaderContext readerContext) { Scorer scorer = (Scorer)context["scorer"]; - Debug.Assert(scorer != null); + Debugging.Assert(() => scorer != null); return new DoubleDocValuesAnonymousInnerClassHelper(this, scorer); } diff --git a/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs b/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs index 31630d482e..115a1203e3 100644 --- a/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs +++ b/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs @@ -1,11 +1,11 @@ using J2N.Collections.Generic.Extensions; using J2N.Text; +using Lucene.Net.Diagnostics; using Lucene.Net.Search; using Lucene.Net.Support; using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Linq; using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; @@ -866,7 +866,7 @@ public virtual void SetScorer(Scorer scorer) public virtual void Collect(int doc) { - Debug.Assert(doc > lastDocID); + Debugging.Assert(() => doc > lastDocID); lastDocID = doc; } @@ -948,8 +948,8 @@ public virtual void Inc(int[] dims, int[] dims2) public virtual void Inc(int[] dims, int[] dims2, int onlyDim) { - Debug.Assert(dims.Length == counts.Length); - Debug.Assert(dims2.Length == counts.Length); + Debugging.Assert(() => dims.Length == counts.Length); + Debugging.Assert(() => dims2.Length == counts.Length); for (int dim = 0; dim < dims.Length; dim++) { if (onlyDim == -1 || dim == onlyDim) diff --git a/src/Lucene.Net.Tests.Grouping/GroupFacetCollectorTest.cs b/src/Lucene.Net.Tests.Grouping/GroupFacetCollectorTest.cs index ba3bbba29d..faa1519bf9 100644 --- a/src/Lucene.Net.Tests.Grouping/GroupFacetCollectorTest.cs +++ b/src/Lucene.Net.Tests.Grouping/GroupFacetCollectorTest.cs @@ -1,6 +1,7 @@ using J2N; using J2N.Text; using Lucene.Net.Analysis; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index; using Lucene.Net.Index.Extensions; @@ -10,7 +11,6 @@ using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; using Collections = Lucene.Net.Support.Collections; using Console = Lucene.Net.Util.SystemConsole; @@ -559,7 +559,7 @@ private IndexContext CreateIndexContext(bool multipleFacetValuesPerDocument) Field[] facetFields; if (useDv) { - Debug.Assert(!multipleFacetValuesPerDocument); + Debugging.Assert(() => !multipleFacetValuesPerDocument); facetFields = new Field[2]; facetFields[0] = NewStringField("facet", "", Field.Store.NO); doc.Add(facetFields[0]); @@ -816,7 +816,7 @@ private AbstractGroupFacetCollector CreateRandomCollector(string groupField, str { BytesRef facetPrefixBR = facetPrefix == null ? null : new BytesRef(facetPrefix); // DocValues cannot be multi-valued: - Debug.Assert(!multipleFacetsPerDocument || !groupField.EndsWith("_dv", StringComparison.Ordinal)); + Debugging.Assert(() => !multipleFacetsPerDocument || !groupField.EndsWith("_dv", StringComparison.Ordinal)); return TermGroupFacetCollector.CreateTermGroupFacetCollector(groupField, facetField, multipleFacetsPerDocument, facetPrefixBR, Random.nextInt(1024)); } diff --git a/src/Lucene.Net.Tests.Highlighter/PostingsHighlight/TestPostingsHighlighter.cs b/src/Lucene.Net.Tests.Highlighter/PostingsHighlight/TestPostingsHighlighter.cs index 08ebb86073..859bff449d 100644 --- a/src/Lucene.Net.Tests.Highlighter/PostingsHighlight/TestPostingsHighlighter.cs +++ b/src/Lucene.Net.Tests.Highlighter/PostingsHighlight/TestPostingsHighlighter.cs @@ -1,6 +1,7 @@ #if FEATURE_BREAKITERATOR using ICU4N.Text; using Lucene.Net.Analysis; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index; using Lucene.Net.Index.Extensions; @@ -774,8 +775,8 @@ public LoadFieldValuesPostingsHighlighter(int maxLength, string text) protected override IList LoadFieldValues(IndexSearcher searcher, string[] fields, int[] docids, int maxLength) { - Debug.Assert(fields.Length == 1); - Debug.Assert(docids.Length == 1); + Debugging.Assert(() => fields.Length == 1); + Debugging.Assert(() => docids.Length == 1); String[][] contents = RectangularArrays.ReturnRectangularArray(1, 1); //= new String[1][1]; contents[0][0] = text; return contents; @@ -1178,7 +1179,7 @@ internal class GetMultiValuedSeparatorPostingsHighlighter : PostingsHighlighter { protected override char GetMultiValuedSeparator(string field) { - Debug.Assert(field.Equals("body", StringComparison.Ordinal)); + Debugging.Assert(() => field.Equals("body", StringComparison.Ordinal)); return '\u2029'; } } diff --git a/src/Lucene.Net.Tests.Join/TestJoinUtil.cs b/src/Lucene.Net.Tests.Join/TestJoinUtil.cs index a52fa620af..f79246dbad 100644 --- a/src/Lucene.Net.Tests.Join/TestJoinUtil.cs +++ b/src/Lucene.Net.Tests.Join/TestJoinUtil.cs @@ -1,4 +1,5 @@ using Lucene.Net.Analysis; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index; using Lucene.Net.Index.Extensions; @@ -9,7 +10,6 @@ using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using Console = Lucene.Net.Util.SystemConsole; using JCG = J2N.Collections.Generic; @@ -1030,7 +1030,7 @@ private FixedBitSet CreateExpectedResult(string queryValue, bool from, IndexRead { DocsEnum docsEnum = MultiFields.GetTermDocsEnum(topLevelReader, MultiFields.GetLiveDocs(topLevelReader), "id", new BytesRef(otherSideDoc.id), 0); - Debug.Assert(docsEnum != null); + Debugging.Assert(() => docsEnum != null); int doc = docsEnum.NextDoc(); expectedResult.Set(doc); } diff --git a/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs b/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs index 60027b9748..10bffe2fde 100644 --- a/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs +++ b/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs @@ -1,15 +1,13 @@ using Lucene.Net.Analysis; using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.QueryParsers.Flexible.Standard; using Lucene.Net.QueryParsers.Util; using Lucene.Net.Search; -using Lucene.Net.Support; using NUnit.Framework; using System; -using System.Diagnostics; using System.Globalization; -using System.Reflection; namespace Lucene.Net.QueryParsers.Classic { @@ -67,8 +65,8 @@ public override ICommonQueryParserConfiguration GetParserConfig(Analyzer a) public override Query GetQuery(string query, ICommonQueryParserConfiguration cqpC) { - Debug.Assert(cqpC != null, "Parameter must not be null"); - Debug.Assert(cqpC is QueryParser, "Parameter must be instance of QueryParser"); + Debugging.Assert(() => cqpC != null, () => "Parameter must not be null"); + Debugging.Assert(() => cqpC is QueryParser, () => "Parameter must be instance of QueryParser"); QueryParser qp = (QueryParser)cqpC; return qp.Parse(query); } @@ -85,35 +83,35 @@ public override bool IsQueryParserException(Exception exception) public override void SetDefaultOperatorOR(ICommonQueryParserConfiguration cqpC) { - Debug.Assert(cqpC is QueryParser); + Debugging.Assert(() => cqpC is QueryParser); QueryParser qp = (QueryParser)cqpC; qp.DefaultOperator = Operator.OR; } public override void SetDefaultOperatorAND(ICommonQueryParserConfiguration cqpC) { - Debug.Assert(cqpC is QueryParser); + Debugging.Assert(() => cqpC is QueryParser); QueryParser qp = (QueryParser)cqpC; qp.DefaultOperator = Operator.AND; } public override void SetAnalyzeRangeTerms(ICommonQueryParserConfiguration cqpC, bool value) { - Debug.Assert(cqpC is QueryParser); + Debugging.Assert(() => cqpC is QueryParser); QueryParser qp = (QueryParser)cqpC; qp.AnalyzeRangeTerms = (value); } public override void SetAutoGeneratePhraseQueries(ICommonQueryParserConfiguration cqpC, bool value) { - Debug.Assert(cqpC is QueryParser); + Debugging.Assert(() => cqpC is QueryParser); QueryParser qp = (QueryParser)cqpC; qp.AutoGeneratePhraseQueries = value; } public override void SetDateResolution(ICommonQueryParserConfiguration cqpC, string field, DateTools.Resolution value) { - Debug.Assert(cqpC is QueryParser); + Debugging.Assert(() => cqpC is QueryParser); QueryParser qp = (QueryParser)cqpC; qp.SetDateResolution(field, value); } diff --git a/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestStandardQP.cs b/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestStandardQP.cs index d81dcf4fb8..c336603c51 100644 --- a/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestStandardQP.cs +++ b/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestStandardQP.cs @@ -1,4 +1,5 @@ using Lucene.Net.Analysis; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index; using Lucene.Net.QueryParsers.Flexible.Core; @@ -8,7 +9,6 @@ using Lucene.Net.Support; using NUnit.Framework; using System; -using System.Diagnostics; using System.IO; using Operator = Lucene.Net.QueryParsers.Flexible.Standard.Config.StandardQueryConfigHandler.Operator; @@ -59,8 +59,8 @@ public override ICommonQueryParserConfiguration GetParserConfig(Analyzer a) public override Query GetQuery(String query, ICommonQueryParserConfiguration cqpC) { - Debug.Assert(cqpC != null, "Parameter must not be null"); - Debug.Assert((cqpC is StandardQueryParser), "Parameter must be instance of StandardQueryParser"); + Debugging.Assert(() => cqpC != null, () => "Parameter must not be null"); + Debugging.Assert(() => (cqpC is StandardQueryParser), () => "Parameter must be instance of StandardQueryParser"); StandardQueryParser qp = (StandardQueryParser)cqpC; return Parse(query, qp); } @@ -80,7 +80,7 @@ public override bool IsQueryParserException(Exception exception) public override void SetDefaultOperatorOR(ICommonQueryParserConfiguration cqpC) { - Debug.Assert(cqpC is StandardQueryParser); + Debugging.Assert(() => cqpC is StandardQueryParser); StandardQueryParser qp = (StandardQueryParser)cqpC; qp.DefaultOperator = (Operator.OR); } @@ -88,7 +88,7 @@ public override void SetDefaultOperatorOR(ICommonQueryParserConfiguration cqpC) public override void SetDefaultOperatorAND(ICommonQueryParserConfiguration cqpC) { - Debug.Assert(cqpC is StandardQueryParser); + Debugging.Assert(() => cqpC is StandardQueryParser); StandardQueryParser qp = (StandardQueryParser)cqpC; qp.DefaultOperator = (Operator.AND); } @@ -111,7 +111,7 @@ public override void SetAutoGeneratePhraseQueries(ICommonQueryParserConfiguratio public override void SetDateResolution(ICommonQueryParserConfiguration cqpC, string field, DateTools.Resolution value) { - Debug.Assert(cqpC is StandardQueryParser); + Debugging.Assert(() => cqpC is StandardQueryParser); StandardQueryParser qp = (StandardQueryParser)cqpC; qp.DateResolutionMap.Put(field, value); } diff --git a/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs b/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs index 69c5b034ec..b41b01a201 100644 --- a/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs +++ b/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs @@ -1,5 +1,6 @@ using J2N.Threading.Atomic; using Lucene.Net.Attributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Facet; using Lucene.Net.Facet.Taxonomy; @@ -11,7 +12,6 @@ using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; using System.IO; using System.Threading; @@ -461,7 +461,7 @@ protected override void HandleUpdateException(Exception exception) { // count-down number of failures failures.DecrementAndGet(); - Debug.Assert(failures >= 0, "handler failed too many times: " + failures); + Debugging.Assert(() => failures >= 0, () => "handler failed too many times: " + failures); if (Verbose) { if (failures == 0) diff --git a/src/Lucene.Net.Tests.Replicator/IndexReplicationClientTest.cs b/src/Lucene.Net.Tests.Replicator/IndexReplicationClientTest.cs index c313fdb5f2..3c158bf2af 100644 --- a/src/Lucene.Net.Tests.Replicator/IndexReplicationClientTest.cs +++ b/src/Lucene.Net.Tests.Replicator/IndexReplicationClientTest.cs @@ -1,5 +1,6 @@ using J2N.Threading.Atomic; using Lucene.Net.Attributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index; using Lucene.Net.Store; @@ -7,7 +8,6 @@ using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; using System.IO; using System.Threading; @@ -367,7 +367,7 @@ protected override void HandleUpdateException(Exception exception) { // count-down number of failures failures.DecrementAndGet(); - Debug.Assert(failures >= 0, "handler failed too many times: " + failures); + Debugging.Assert(() => failures >= 0, () => "handler failed too many times: " + failures); if (Verbose) { if (failures == 0) diff --git a/src/Lucene.Net.Tests.Spatial/SpatialTestCase.cs b/src/Lucene.Net.Tests.Spatial/SpatialTestCase.cs index f34380c1c3..b57d7face6 100644 --- a/src/Lucene.Net.Tests.Spatial/SpatialTestCase.cs +++ b/src/Lucene.Net.Tests.Spatial/SpatialTestCase.cs @@ -1,5 +1,6 @@ using Lucene.Net.Analysis; using Lucene.Net.Codecs.Lucene45; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index; using Lucene.Net.Index.Extensions; @@ -9,7 +10,6 @@ using Spatial4n.Core.Shapes; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Text; @@ -152,7 +152,7 @@ protected virtual IRectangle randomRectangle() private double randomGaussianMinMeanMax(double min, double mean, double max) { - Debug.Assert(mean > min); + Debugging.Assert(() => mean > min); return randomGaussianMeanMax(mean - min, max - min) + min; } @@ -166,7 +166,7 @@ private double randomGaussianMinMeanMax(double min, double mean, double max) private double randomGaussianMeanMax(double mean, double max) { // DWS: I verified the results empirically - Debug.Assert(mean <= max && mean >= 0); + Debugging.Assert(() => mean <= max && mean >= 0); double g = randomGaussian(); double mean2 = mean; double flip = 1; @@ -180,7 +180,7 @@ private double randomGaussianMeanMax(double mean, double max) // 1 standard deviation alters the calculation double pivotMax = max - mean2; double pivot = Math.Min(mean2, pivotMax / 2);//from 0 to max-mean2 - Debug.Assert(pivot >= 0 && pivotMax >= pivot && g >= 0); + Debugging.Assert(() => pivot >= 0 && pivotMax >= pivot && g >= 0); double pivotResult; if (g <= 1) pivotResult = pivot * g; diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs index 27a695491c..016b78e80e 100644 --- a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs +++ b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs @@ -2,12 +2,12 @@ using J2N.Text; using Lucene.Net.Analysis; using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Util; using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Text; using System.Text.RegularExpressions; @@ -651,7 +651,7 @@ public int CompareTo(TermFreq2 other) } else { - Debug.Assert(false); + Debugging.Assert(() => false); return 0; } } diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs index c56e61b8f4..5951a30590 100644 --- a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs +++ b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs @@ -3,13 +3,13 @@ using J2N.Text; using Lucene.Net.Analysis; using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using Lucene.Net.Util; using Lucene.Net.Util.Automaton; using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Text; using System.Text.RegularExpressions; @@ -603,7 +603,7 @@ public int CompareTo(TermFreqPayload2 other) } else { - Debug.Assert(false); + Debugging.Assert(() => false); return 0; } } @@ -1313,7 +1313,7 @@ public int Compare(Lookup.LookupResult a, Lookup.LookupResult b) else { int c = CHARSEQUENCE_COMPARER.Compare(a.Key, b.Key); - Debug.Assert(c != 0, "term=" + a.Key); + Debugging.Assert(() => c != 0, () => "term=" + a.Key); return c; } } diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs index bbe4e8ae5c..2f160a6fd3 100644 --- a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs +++ b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs @@ -2,18 +2,18 @@ using Lucene.Net.Analysis; using Lucene.Net.Analysis.Core; using Lucene.Net.Analysis.Util; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Support; using Lucene.Net.Util; using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; using System.IO; using System.Text; -using JCG = J2N.Collections.Generic; using Console = Lucene.Net.Util.SystemConsole; +using JCG = J2N.Collections.Generic; namespace Lucene.Net.Search.Suggest.Analyzing { @@ -680,7 +680,7 @@ private static string GetZipfToken(string[] tokens) return tokens[k]; } } - Debug.Assert(false); + Debugging.Assert(() => false); return null; } diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs index 15094cca49..d835cf9a36 100644 --- a/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs +++ b/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs @@ -1,6 +1,7 @@ using J2N; using J2N.Collections.Generic.Extensions; using Lucene.Net.Analysis; +using Lucene.Net.Diagnostics; using Lucene.Net.Search.Suggest.Analyzing; using Lucene.Net.Search.Suggest.Fst; using Lucene.Net.Search.Suggest.Jaspell; @@ -11,7 +12,6 @@ using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; using System.IO; using System.Linq; @@ -75,7 +75,7 @@ public class LookupBenchmarkTest : LuceneTestCase public override void SetUp() { - Debug.Assert(false, "disable assertions before running benchmarks!"); + Debugging.Assert(() => false, () => "disable assertions before running benchmarks!"); IList input = ReadTop50KWiki(); input.Shuffle(Random); dictionaryInput = input.ToArray(); @@ -93,7 +93,7 @@ public static IList ReadTop50KWiki() List input = new List(); var resource = typeof(LookupBenchmarkTest).FindAndGetManifestResourceStream("Top50KWiki.utf8"); - Debug.Assert(resource != null, "Resource missing: Top50KWiki.utf8"); + Debugging.Assert(() => resource != null, () => "Resource missing: Top50KWiki.utf8"); string line = null; using (TextReader br = new StreamReader(resource, UTF_8)) diff --git a/src/Lucene.Net.Tests/Analysis/TestGraphTokenizers.cs b/src/Lucene.Net.Tests/Analysis/TestGraphTokenizers.cs index 8f912d2362..8aaa6b92d3 100644 --- a/src/Lucene.Net.Tests/Analysis/TestGraphTokenizers.cs +++ b/src/Lucene.Net.Tests/Analysis/TestGraphTokenizers.cs @@ -1,9 +1,9 @@ using J2N.Text; using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Text; using Assert = Lucene.Net.TestFramework.Assert; @@ -176,7 +176,7 @@ internal virtual void FillTokens() pos += minPosLength; offset = 2 * pos; } - Debug.Assert(maxPos <= pos, "input string mal-formed: posLength>1 tokens hang over the end"); + Debugging.Assert(() => maxPos <= pos, () => "input string mal-formed: posLength>1 tokens hang over the end"); } } diff --git a/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs b/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs index a1478fdbab..6a2347deac 100644 --- a/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs +++ b/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs @@ -1,5 +1,6 @@ using J2N.Collections.Generic.Extensions; using Lucene.Net.Analysis; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index; using Lucene.Net.Index.Extensions; @@ -7,7 +8,6 @@ using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using Assert = Lucene.Net.TestFramework.Assert; using JCG = J2N.Collections.Generic; @@ -296,7 +296,7 @@ private void AssertTermsSeeking(Terms leftTerms, Terms rightTerms) /// public virtual void AssertTermsStatistics(Terms leftTerms, Terms rightTerms) { - Debug.Assert(leftTerms.Comparer == rightTerms.Comparer); + Debugging.Assert(() => leftTerms.Comparer == rightTerms.Comparer); if (leftTerms.DocCount != -1 && rightTerms.DocCount != -1) { Assert.AreEqual(leftTerms.DocCount, rightTerms.DocCount); diff --git a/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldDocValuesFormat.cs b/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldDocValuesFormat.cs index 51c479f165..42a0bd84b6 100644 --- a/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldDocValuesFormat.cs +++ b/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldDocValuesFormat.cs @@ -1,10 +1,10 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index; using Lucene.Net.Index.Extensions; using Lucene.Net.Support; using NUnit.Framework; using System; -using System.Diagnostics; using Assert = Lucene.Net.TestFramework.Assert; namespace Lucene.Net.Codecs.PerField @@ -113,7 +113,7 @@ public virtual void TestTwoFieldsTwoFormats() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - Debug.Assert(ireader.Leaves.Count == 1); + Debugging.Assert(() => ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv1"); Assert.AreEqual(5, dv.Get(hits.ScoreDocs[i].Doc)); BinaryDocValues dv2 = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv2"); diff --git a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs index a19c6b4b42..a82465ae4d 100644 --- a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs +++ b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs @@ -1,10 +1,10 @@ using J2N; using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using Lucene.Net.Index.Extensions; using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Linq; using System.Text; @@ -434,7 +434,7 @@ public virtual void SearchIndex(Directory dir, string oldName) // true if this is a 4.2+ index bool is42Index = MultiFields.GetMergedFieldInfos(reader).FieldInfo("dvSortedSet") != null; - Debug.Assert(is40Index); // NOTE: currently we can only do this on trunk! + Debugging.Assert(() => is40Index); // NOTE: currently we can only do this on trunk! IBits liveDocs = MultiFields.GetLiveDocs(reader); diff --git a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs index 8e84317196..8fd5b72436 100644 --- a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs +++ b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs @@ -1,9 +1,9 @@ using J2N; +using Lucene.Net.Diagnostics; using Lucene.Net.Index.Extensions; using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Text; using Assert = Lucene.Net.TestFramework.Assert; @@ -1039,7 +1039,7 @@ public virtual void TestNegativePositions() Assert.AreEqual(1, td.TotalHits); AtomicReader wrapper = SlowCompositeReaderWrapper.Wrap(ir); DocsAndPositionsEnum de = wrapper.GetTermPositionsEnum(new Term("field3", "broken")); - Debug.Assert(de != null); + Debugging.Assert(() => de != null); Assert.AreEqual(0, de.NextDoc()); Assert.AreEqual(0, de.NextPosition()); ir.Dispose(); diff --git a/src/Lucene.Net.Tests/Index/TestCodecs.cs b/src/Lucene.Net.Tests/Index/TestCodecs.cs index 332f63527a..2b7fbe326c 100644 --- a/src/Lucene.Net.Tests/Index/TestCodecs.cs +++ b/src/Lucene.Net.Tests/Index/TestCodecs.cs @@ -1,12 +1,12 @@ using J2N.Text; using J2N.Threading; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index.Extensions; using Lucene.Net.Search; using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Threading; using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; @@ -446,7 +446,7 @@ public virtual void TestRandomPostings() for (int i = 0; i < NUM_TEST_THREADS - 1; i++) { threads[i].Join(); - Debug.Assert(!threads[i].failed); + Debugging.Assert(() => !threads[i].failed); } } @@ -759,14 +759,14 @@ public virtual void _run() if (doc == DocIdSetIterator.NO_MORE_DOCS) { // skipped past last doc - Debug.Assert(upto2 == term2.docs.Length - 1); + Debugging.Assert(() => upto2 == term2.docs.Length - 1); ended = true; break; } else { // skipped to next doc - Debug.Assert(upto2 < term2.docs.Length - 1); + Debugging.Assert(() => upto2 < term2.docs.Length - 1); if (doc >= term2.docs[1 + upto2]) { upto2++; diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriter.cs b/src/Lucene.Net.Tests/Index/TestIndexWriter.cs index b0c6baeca3..e195ed4d80 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriter.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriter.cs @@ -4,6 +4,7 @@ using Lucene.Net.Analysis.TokenAttributes; using Lucene.Net.Codecs; using Lucene.Net.Codecs.SimpleText; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index.Extensions; using Lucene.Net.Search; @@ -12,11 +13,9 @@ using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Text; using System.Threading; -using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; using JCG = J2N.Collections.Generic; @@ -2141,14 +2140,14 @@ public virtual void TestNRTReaderVersion() r = w.GetReader(); long version2 = r.Version; r.Dispose(); - Debug.Assert(version2 > version); + Debugging.Assert(() => version2 > version); w.DeleteDocuments(new Term("id", "0")); r = w.GetReader(); w.Dispose(); long version3 = r.Version; r.Dispose(); - Debug.Assert(version3 > version2); + Debugging.Assert(() => version3 > version2); d.Dispose(); } diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs index 9faef06930..a48d95448a 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs @@ -1,10 +1,10 @@ using J2N.Threading; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index.Extensions; using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Threading; using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; @@ -315,7 +315,7 @@ public override void Merge(IndexWriter writer, MergeTrigger trigger, bool newMer } for (int i = 0; i < merge.Segments.Count; i++) { - Debug.Assert(merge.Segments[i].Info.DocCount < 20); + Debugging.Assert(() => merge.Segments[i].Info.DocCount < 20); } writer.Merge(merge); } diff --git a/src/Lucene.Net.Tests/Index/TestIndexableField.cs b/src/Lucene.Net.Tests/Index/TestIndexableField.cs index 8d65685b0d..b71cdf2c7f 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexableField.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexableField.cs @@ -1,8 +1,8 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; @@ -317,7 +317,7 @@ public virtual void TestArbitraryFields() } else { - Debug.Assert(stringValue != null); + Debugging.Assert(() => stringValue != null); Assert.AreEqual(stringValue, f.GetStringValue()); } } @@ -424,7 +424,7 @@ public bool MoveNext() return false; } - Debug.Assert(fieldUpto < outerInstance.fieldCount); + Debugging.Assert(() => fieldUpto < outerInstance.fieldCount); if (fieldUpto == 0) { fieldUpto = 1; diff --git a/src/Lucene.Net.Tests/Index/TestLongPostings.cs b/src/Lucene.Net.Tests/Index/TestLongPostings.cs index ef3a68c722..118fcaaee9 100644 --- a/src/Lucene.Net.Tests/Index/TestLongPostings.cs +++ b/src/Lucene.Net.Tests/Index/TestLongPostings.cs @@ -1,9 +1,9 @@ using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index.Extensions; using NUnit.Framework; using System; -using System.Diagnostics; using System.IO; using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; @@ -462,9 +462,9 @@ public virtual void DoTestLongPostingsNoPositions(IndexOptions options) else { docs = postings = TestUtil.Docs(Random, r, "field", new BytesRef(term), null, null, DocsFlags.FREQS); - Debug.Assert(postings != null); + Debugging.Assert(() => postings != null); } - Debug.Assert(docs != null); + Debugging.Assert(() => docs != null); int docID = -1; while (docID < DocIdSetIterator.NO_MORE_DOCS) diff --git a/src/Lucene.Net.Tests/Index/TestNRTThreads.cs b/src/Lucene.Net.Tests/Index/TestNRTThreads.cs index 4c7e05d9ed..e2a79c0797 100644 --- a/src/Lucene.Net.Tests/Index/TestNRTThreads.cs +++ b/src/Lucene.Net.Tests/Index/TestNRTThreads.cs @@ -1,7 +1,7 @@ +using Lucene.Net.Diagnostics; using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Threading.Tasks; using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; @@ -116,7 +116,7 @@ protected override void DoSearching(TaskScheduler es, long stopTime) protected override Directory GetDirectory(Directory @in) { - Debug.Assert(@in is MockDirectoryWrapper); + Debugging.Assert(() => @in is MockDirectoryWrapper); if (!useNonNrtReaders) { ((MockDirectoryWrapper)@in).AssertNoDeleteOpenFile = true; diff --git a/src/Lucene.Net.Tests/Index/TestPayloads.cs b/src/Lucene.Net.Tests/Index/TestPayloads.cs index 40cae80c55..0f208af1dd 100644 --- a/src/Lucene.Net.Tests/Index/TestPayloads.cs +++ b/src/Lucene.Net.Tests/Index/TestPayloads.cs @@ -2,13 +2,13 @@ using J2N.Threading; using Lucene.Net.Analysis; using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index.Extensions; using Lucene.Net.Util; using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Text; using Assert = Lucene.Net.TestFramework.Assert; @@ -310,7 +310,7 @@ private void GenerateRandomData(byte[] data) // this test needs the random data to be valid unicode string s = TestUtil.RandomFixedByteLengthUnicodeString(Random, data.Length); var b = s.GetBytes(utf8); - Debug.Assert(b.Length == data.Length); + Debugging.Assert(() => b.Length == data.Length); System.Buffer.BlockCopy(b, 0, data, 0, b.Length); } diff --git a/src/Lucene.Net.Tests/Index/TestPayloadsOnVectors.cs b/src/Lucene.Net.Tests/Index/TestPayloadsOnVectors.cs index cd6a54860e..5b7e149235 100644 --- a/src/Lucene.Net.Tests/Index/TestPayloadsOnVectors.cs +++ b/src/Lucene.Net.Tests/Index/TestPayloadsOnVectors.cs @@ -1,9 +1,9 @@ using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index.Extensions; using NUnit.Framework; using System; -using System.Diagnostics; using System.IO; using Assert = Lucene.Net.TestFramework.Assert; @@ -79,7 +79,7 @@ public virtual void TestMixupDocs() DirectoryReader reader = writer.GetReader(); Terms terms = reader.GetTermVector(1, "field"); - Debug.Assert(terms != null); + Debugging.Assert(() => terms != null); TermsEnum termsEnum = terms.GetIterator(null); Assert.IsTrue(termsEnum.SeekExact(new BytesRef("withPayload"))); DocsAndPositionsEnum de = termsEnum.DocsAndPositions(null, null); @@ -128,7 +128,7 @@ public virtual void TestMixupMultiValued() writer.AddDocument(doc); DirectoryReader reader = writer.GetReader(); Terms terms = reader.GetTermVector(0, "field"); - Debug.Assert(terms != null); + Debugging.Assert(() => terms != null); TermsEnum termsEnum = terms.GetIterator(null); Assert.IsTrue(termsEnum.SeekExact(new BytesRef("withPayload"))); DocsAndPositionsEnum de = termsEnum.DocsAndPositions(null, null); diff --git a/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs b/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs index c506b900d9..c621f33aed 100644 --- a/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs +++ b/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs @@ -1,11 +1,11 @@ using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index.Extensions; using Lucene.Net.Search; using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Linq; using Assert = Lucene.Net.TestFramework.Assert; @@ -177,9 +177,9 @@ public virtual void DoTestNumbers(bool withPayloads) { dp.NextPosition(); int start = dp.StartOffset; - Debug.Assert(start >= 0); + Debugging.Assert(() => start >= 0); int end = dp.EndOffset; - Debug.Assert(end >= 0 && end >= start); + Debugging.Assert(() => end >= 0 && end >= start); // check that the offsets correspond to the term in the src text Assert.IsTrue(storedNumbers.Substring(start, end - start).Equals(term, StringComparison.Ordinal)); if (withPayloads) @@ -208,9 +208,9 @@ public virtual void DoTestNumbers(bool withPayloads) string storedNumbers = reader.Document(doc).Get("numbers"); dp.NextPosition(); int start = dp.StartOffset; - Debug.Assert(start >= 0); + Debugging.Assert(() => start >= 0); int end = dp.EndOffset; - Debug.Assert(end >= 0 && end >= start); + Debugging.Assert(() => end >= 0 && end >= start); // check that the offsets correspond to the term in the src text Assert.IsTrue(storedNumbers.Substring(start, end - start).Equals("hundred", StringComparison.Ordinal)); if (withPayloads) diff --git a/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs b/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs index f0efbc945b..aa7f52f876 100644 --- a/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs +++ b/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs @@ -2,6 +2,7 @@ using J2N.Text; using J2N.Threading; using Lucene.Net.Analysis.TokenAttributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index.Extensions; using Lucene.Net.Support; @@ -9,7 +10,6 @@ using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; using System.Threading; using Assert = Lucene.Net.TestFramework.Assert; @@ -347,7 +347,7 @@ public virtual void VerifyEquals(DirectoryReader r1, DirectoryReader r2, string } if (r1.NumDocs != r2.NumDocs) { - Debug.Assert(false, "r1.NumDocs=" + r1.NumDocs + " vs r2.NumDocs=" + r2.NumDocs); + Debugging.Assert(() => false, () => "r1.NumDocs=" + r1.NumDocs + " vs r2.NumDocs=" + r2.NumDocs); } bool hasDeletes = !(r1.MaxDoc == r2.MaxDoc && r1.NumDocs == r1.MaxDoc); @@ -682,7 +682,7 @@ public static void VerifyEquals(Document d1, Document d2) IIndexableField f2 = ff2[i]; if (f1.GetBinaryValue() != null) { - Debug.Assert(f2.GetBinaryValue() != null); + Debugging.Assert(() => f2.GetBinaryValue() != null); } else { diff --git a/src/Lucene.Net.Tests/Index/TestStressNRT.cs b/src/Lucene.Net.Tests/Index/TestStressNRT.cs index 9acc423997..ef4f7958f1 100644 --- a/src/Lucene.Net.Tests/Index/TestStressNRT.cs +++ b/src/Lucene.Net.Tests/Index/TestStressNRT.cs @@ -1,12 +1,12 @@ using J2N.Threading; using J2N.Threading.Atomic; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Support; using NUnit.Framework; using System; using System.Collections.Concurrent; using System.Collections.Generic; -using System.Diagnostics; using System.Threading; using Console = Lucene.Net.Util.SystemConsole; @@ -263,8 +263,8 @@ public override void Run() { // install the new reader if it's newest (and check the current version since another reader may have already been installed) //System.out.println(Thread.currentThread().getName() + ": newVersion=" + newReader.getVersion()); - Debug.Assert(newReader.RefCount > 0); - Debug.Assert(outerInstance.reader.RefCount > 0); + Debugging.Assert(() => newReader.RefCount > 0); + Debugging.Assert(() => outerInstance.reader.RefCount > 0); if (newReader.Version > outerInstance.reader.Version) { if (Verbose) diff --git a/src/Lucene.Net.Tests/Index/TestTermsEnum.cs b/src/Lucene.Net.Tests/Index/TestTermsEnum.cs index 912e6c7723..def59ed669 100644 --- a/src/Lucene.Net.Tests/Index/TestTermsEnum.cs +++ b/src/Lucene.Net.Tests/Index/TestTermsEnum.cs @@ -1,3 +1,4 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index.Extensions; using Lucene.Net.Search; @@ -5,10 +6,9 @@ using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; -using JCG = J2N.Collections.Generic; using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; +using JCG = J2N.Collections.Generic; namespace Lucene.Net.Index { @@ -829,7 +829,7 @@ private void TestRandomSeeks(IndexReader r, params string[] validTermStrings) } else { - Debug.Assert(loc >= -validTerms.Length); + Debugging.Assert(() => loc >= -validTerms.Length); Assert.AreEqual(TermsEnum.SeekStatus.NOT_FOUND, result); } } diff --git a/src/Lucene.Net.Tests/Search/Spans/MultiSpansWrapper.cs b/src/Lucene.Net.Tests/Search/Spans/MultiSpansWrapper.cs index ea8710d987..e8f5f73a02 100644 --- a/src/Lucene.Net.Tests/Search/Spans/MultiSpansWrapper.cs +++ b/src/Lucene.Net.Tests/Search/Spans/MultiSpansWrapper.cs @@ -1,7 +1,7 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Support; using System.Collections.Generic; -using System.Diagnostics; using JCG = J2N.Collections.Generic; namespace Lucene.Net.Search.Spans @@ -110,7 +110,7 @@ public override bool SkipTo(int target) } int subIndex = ReaderUtil.SubIndex(target, leaves); - Debug.Assert(subIndex >= leafOrd); + Debugging.Assert(() => subIndex >= leafOrd); if (subIndex != leafOrd) { AtomicReaderContext ctx = leaves[subIndex]; diff --git a/src/Lucene.Net.Tests/Search/TestBooleanScorer.cs b/src/Lucene.Net.Tests/Search/TestBooleanScorer.cs index 76a8e64c60..d203efb694 100644 --- a/src/Lucene.Net.Tests/Search/TestBooleanScorer.cs +++ b/src/Lucene.Net.Tests/Search/TestBooleanScorer.cs @@ -1,9 +1,9 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Support; using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using Assert = Lucene.Net.TestFramework.Assert; namespace Lucene.Net.Search @@ -119,7 +119,7 @@ private class BulkScorerAnonymousInnerClassHelper : BulkScorer public override bool Score(ICollector c, int maxDoc) { - Debug.Assert(doc == -1); + Debugging.Assert(() => doc == -1); doc = 3000; FakeScorer fs = new FakeScorer(); fs.doc = doc; diff --git a/src/Lucene.Net.Tests/Search/TestConstantScoreQuery.cs b/src/Lucene.Net.Tests/Search/TestConstantScoreQuery.cs index 55ca85c511..a2aa38e536 100644 --- a/src/Lucene.Net.Tests/Search/TestConstantScoreQuery.cs +++ b/src/Lucene.Net.Tests/Search/TestConstantScoreQuery.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using NUnit.Framework; -using System.Diagnostics; using Assert = Lucene.Net.TestFramework.Assert; namespace Lucene.Net.Search @@ -234,7 +234,7 @@ public virtual void TestQueryWrapperFilter() Filter filter = new QueryWrapperFilter(AssertingQuery.Wrap(Random, new TermQuery(new Term("field", "a")))); IndexSearcher s = NewSearcher(r); - Debug.Assert(s is AssertingIndexSearcher); + Debugging.Assert(() => s is AssertingIndexSearcher); // this used to fail s.Search(new ConstantScoreQuery(filter), new TotalHitCountCollector()); diff --git a/src/Lucene.Net.Tests/Search/TestFieldCache.cs b/src/Lucene.Net.Tests/Search/TestFieldCache.cs index 39be70d167..3358372d96 100644 --- a/src/Lucene.Net.Tests/Search/TestFieldCache.cs +++ b/src/Lucene.Net.Tests/Search/TestFieldCache.cs @@ -1,11 +1,11 @@ using J2N.Threading; using J2N.Threading.Atomic; +using Lucene.Net.Diagnostics; using Lucene.Net.Index.Extensions; using Lucene.Net.Support; using Lucene.Net.Util; using NUnit.Framework; using System; -using System.Diagnostics; using System.Collections.Generic; using System.Globalization; using System.IO; @@ -34,42 +34,40 @@ namespace Lucene.Net.Search * limitations under the License. */ - using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer; - using BinaryDocValuesField = Lucene.Net.Documents.BinaryDocValuesField; - using Document = Lucene.Net.Documents.Document; - using Field = Lucene.Net.Documents.Field; - using Store = Lucene.Net.Documents.Field.Store; - using Int32Field = Lucene.Net.Documents.Int32Field; - using Int64Field = Lucene.Net.Documents.Int64Field; - using NumericDocValuesField = Lucene.Net.Documents.NumericDocValuesField; - using SortedDocValuesField = Lucene.Net.Documents.SortedDocValuesField; - using SortedSetDocValuesField = Lucene.Net.Documents.SortedSetDocValuesField; - using StoredField = Lucene.Net.Documents.StoredField; using AtomicReader = Lucene.Net.Index.AtomicReader; using BinaryDocValues = Lucene.Net.Index.BinaryDocValues; + using BinaryDocValuesField = Lucene.Net.Documents.BinaryDocValuesField; + using Bytes = Lucene.Net.Search.FieldCache.Bytes; + using BytesRef = Lucene.Net.Util.BytesRef; + using Directory = Lucene.Net.Store.Directory; using DirectoryReader = Lucene.Net.Index.DirectoryReader; using DocTermOrds = Lucene.Net.Index.DocTermOrds; + using Document = Lucene.Net.Documents.Document; + using Doubles = Lucene.Net.Search.FieldCache.Doubles; + using Field = Lucene.Net.Documents.Field; + using IBits = Lucene.Net.Util.IBits; using IndexReader = Lucene.Net.Index.IndexReader; using IndexWriter = Lucene.Net.Index.IndexWriter; using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig; + using Int16s = Lucene.Net.Search.FieldCache.Int16s; + using Int32Field = Lucene.Net.Documents.Int32Field; + using Int32s = Lucene.Net.Search.FieldCache.Int32s; + using Int64Field = Lucene.Net.Documents.Int64Field; + using Int64s = Lucene.Net.Search.FieldCache.Int64s; + using LuceneTestCase = Lucene.Net.Util.LuceneTestCase; + using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer; + using NumericDocValuesField = Lucene.Net.Documents.NumericDocValuesField; using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter; + using Singles = Lucene.Net.Search.FieldCache.Singles; using SlowCompositeReaderWrapper = Lucene.Net.Index.SlowCompositeReaderWrapper; using SortedDocValues = Lucene.Net.Index.SortedDocValues; + using SortedDocValuesField = Lucene.Net.Documents.SortedDocValuesField; using SortedSetDocValues = Lucene.Net.Index.SortedSetDocValues; + using SortedSetDocValuesField = Lucene.Net.Documents.SortedSetDocValuesField; + using StoredField = Lucene.Net.Documents.StoredField; using TermsEnum = Lucene.Net.Index.TermsEnum; - using Bytes = Lucene.Net.Search.FieldCache.Bytes; - using Doubles = Lucene.Net.Search.FieldCache.Doubles; - using Singles = Lucene.Net.Search.FieldCache.Singles; - using Int32s = Lucene.Net.Search.FieldCache.Int32s; - using Int64s = Lucene.Net.Search.FieldCache.Int64s; - using Int16s = Lucene.Net.Search.FieldCache.Int16s; - using Directory = Lucene.Net.Store.Directory; - using IBits = Lucene.Net.Util.IBits; - using BytesRef = Lucene.Net.Util.BytesRef; - using IOUtils = Lucene.Net.Util.IOUtils; - using LuceneTestCase = Lucene.Net.Util.LuceneTestCase; using TestUtil = Lucene.Net.Util.TestUtil; - + [TestFixture] public class TestFieldCache : LuceneTestCase @@ -374,7 +372,7 @@ public virtual void Test() break; } long ord = termOrds.NextOrd(); - Debug.Assert(ord != SortedSetDocValues.NO_MORE_ORDS); + Debugging.Assert(() => ord != SortedSetDocValues.NO_MORE_ORDS); BytesRef scratch = new BytesRef(); termOrds.LookupOrd(ord, scratch); Assert.AreEqual(v, scratch); diff --git a/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs b/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs index 443d366737..6157c9a804 100644 --- a/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs +++ b/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs @@ -1,14 +1,14 @@ -using System.Linq; +using J2N.Collections.Generic.Extensions; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index; using Lucene.Net.Support; using Lucene.Net.Util; using NUnit.Framework; using System.Collections.Generic; -using System.Diagnostics; +using System.Linq; using Assert = Lucene.Net.TestFramework.Assert; using JCG = J2N.Collections.Generic; -using J2N.Collections.Generic.Extensions; namespace Lucene.Net.Search { @@ -365,14 +365,14 @@ internal SlowMinShouldMatchScorer(BooleanWeight weight, AtomicReader reader, Ind this.sims = new SimScorer[(int)dv.ValueCount]; foreach (BooleanClause clause in bq.GetClauses()) { - Debug.Assert(!clause.IsProhibited); - Debug.Assert(!clause.IsRequired); + Debugging.Assert(() => !clause.IsProhibited); + Debugging.Assert(() => !clause.IsRequired); Term term = ((TermQuery)clause.Query).Term; long ord = dv.LookupTerm(term.Bytes); if (ord >= 0) { bool success = ords.Add(ord); - Debug.Assert(success); // no dups + Debugging.Assert(() => success); // no dups TermContext context = TermContext.Build(reader.Context, term); SimWeight w = weight.Similarity.ComputeWeight(1f, searcher.CollectionStatistics("field"), searcher.TermStatistics(term, context)); var dummy = w.GetValueForNormalization(); // ignored @@ -384,7 +384,7 @@ internal SlowMinShouldMatchScorer(BooleanWeight weight, AtomicReader reader, Ind public override float GetScore() { - Debug.Assert(score != 0, currentMatched.ToString()); + Debugging.Assert(() => score != 0, currentMatched.ToString); return (float)score * ((BooleanWeight)m_weight).Coord(currentMatched, ((BooleanWeight)m_weight).MaxCoord); } @@ -394,7 +394,7 @@ public override float GetScore() public override int NextDoc() { - Debug.Assert(currentDoc != NO_MORE_DOCS); + Debugging.Assert(() => currentDoc != NO_MORE_DOCS); for (currentDoc = currentDoc + 1; currentDoc < maxDoc; currentDoc++) { currentMatched = 0; diff --git a/src/Lucene.Net.Tests/Search/TestMultiThreadTermVectors.cs b/src/Lucene.Net.Tests/Search/TestMultiThreadTermVectors.cs index 762cc4ed27..f6c0ac7522 100644 --- a/src/Lucene.Net.Tests/Search/TestMultiThreadTermVectors.cs +++ b/src/Lucene.Net.Tests/Search/TestMultiThreadTermVectors.cs @@ -1,9 +1,9 @@ using J2N.Threading; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index.Extensions; using NUnit.Framework; using System; -using System.Diagnostics; using System.IO; using System.Text; using System.Threading; @@ -223,7 +223,7 @@ private void VerifyVectors(Fields vectors, int num) foreach (string field in vectors) { Terms terms = vectors.GetTerms(field); - Debug.Assert(terms != null); + Debugging.Assert(() => terms != null); VerifyVector(terms.GetIterator(null), num); } } diff --git a/src/Lucene.Net.Tests/Search/TestNumericRangeQuery32.cs b/src/Lucene.Net.Tests/Search/TestNumericRangeQuery32.cs index 16860f2251..a7dc07eed7 100644 --- a/src/Lucene.Net.Tests/Search/TestNumericRangeQuery32.cs +++ b/src/Lucene.Net.Tests/Search/TestNumericRangeQuery32.cs @@ -1,8 +1,8 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index.Extensions; using NUnit.Framework; using System; -using System.Diagnostics; using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; @@ -32,7 +32,6 @@ namespace Lucene.Net.Search using Document = Documents.Document; using Field = Field; using FieldType = FieldType; - using SingleField = SingleField; using IndexReader = Lucene.Net.Index.IndexReader; using Int32Field = Int32Field; using LuceneTestCase = Lucene.Net.Util.LuceneTestCase; @@ -40,6 +39,7 @@ namespace Lucene.Net.Search using MultiFields = Lucene.Net.Index.MultiFields; using NumericUtils = Lucene.Net.Util.NumericUtils; using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter; + using SingleField = SingleField; using SlowCompositeReaderWrapper = Lucene.Net.Index.SlowCompositeReaderWrapper; using Terms = Lucene.Net.Index.Terms; using TermsEnum = Lucene.Net.Index.TermsEnum; @@ -474,13 +474,13 @@ public virtual void TestEmptyEnums() int count = 3000; int lower = (distance * 3 / 2) + startOffset, upper = lower + count * distance + (distance / 3); // test empty enum - Debug.Assert(lower < upper); + Debugging.Assert(() => lower < upper); Assert.IsTrue(0 < CountTerms(NumericRangeQuery.NewInt32Range("field4", 4, lower, upper, true, true))); Assert.AreEqual(0, CountTerms(NumericRangeQuery.NewInt32Range("field4", 4, upper, lower, true, true))); // test empty enum outside of bounds lower = distance * noDocs + startOffset; upper = 2 * lower; - Debug.Assert(lower < upper); + Debugging.Assert(() => lower < upper); Assert.AreEqual(0, CountTerms(NumericRangeQuery.NewInt32Range("field4", 4, lower, upper, true, true))); } diff --git a/src/Lucene.Net.Tests/Search/TestNumericRangeQuery64.cs b/src/Lucene.Net.Tests/Search/TestNumericRangeQuery64.cs index 0da43cdbde..4e0ad53037 100644 --- a/src/Lucene.Net.Tests/Search/TestNumericRangeQuery64.cs +++ b/src/Lucene.Net.Tests/Search/TestNumericRangeQuery64.cs @@ -1,8 +1,8 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index.Extensions; using NUnit.Framework; using System; -using System.Diagnostics; using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; @@ -501,13 +501,13 @@ public virtual void TestEmptyEnums() int count = 3000; long lower = (distance * 3 / 2) + startOffset, upper = lower + count * distance + (distance / 3); // test empty enum - Debug.Assert(lower < upper); + Debugging.Assert(() => lower < upper); Assert.IsTrue(0 < CountTerms(NumericRangeQuery.NewInt64Range("field4", 4, lower, upper, true, true))); Assert.AreEqual(0, CountTerms(NumericRangeQuery.NewInt64Range("field4", 4, upper, lower, true, true))); // test empty enum outside of bounds lower = distance * noDocs + startOffset; upper = 2L * lower; - Debug.Assert(lower < upper); + Debugging.Assert(() => lower < upper); Assert.AreEqual(0, CountTerms(NumericRangeQuery.NewInt64Range("field4", 4, lower, upper, true, true))); } diff --git a/src/Lucene.Net.Tests/Search/TestTimeLimitingCollector.cs b/src/Lucene.Net.Tests/Search/TestTimeLimitingCollector.cs index 243145ebe0..6ebd6212d9 100644 --- a/src/Lucene.Net.Tests/Search/TestTimeLimitingCollector.cs +++ b/src/Lucene.Net.Tests/Search/TestTimeLimitingCollector.cs @@ -1,6 +1,7 @@ using J2N.Text; using J2N.Threading; using Lucene.Net.Analysis; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index; using Lucene.Net.Index.Extensions; @@ -8,7 +9,6 @@ using Lucene.Net.Util; using NUnit.Framework; using System; -using System.Diagnostics; using System.Text.RegularExpressions; using Console = Lucene.Net.Util.SystemConsole; @@ -418,7 +418,7 @@ public virtual void Collect(int doc) //#endif } - Debug.Assert(docId >= 0, " base=" + docBase + " doc=" + doc); + Debugging.Assert(() => docId >= 0, () => " base=" + docBase + " doc=" + doc); bits.Set(docId); lastDocCollected = docId; } diff --git a/src/Lucene.Net.Tests/Util/Automaton/TestUTF32ToUTF8.cs b/src/Lucene.Net.Tests/Util/Automaton/TestUTF32ToUTF8.cs index 0f7dffb278..b18aaa8e9f 100644 --- a/src/Lucene.Net.Tests/Util/Automaton/TestUTF32ToUTF8.cs +++ b/src/Lucene.Net.Tests/Util/Automaton/TestUTF32ToUTF8.cs @@ -1,8 +1,8 @@ using J2N; using J2N.Text; +using Lucene.Net.Diagnostics; using NUnit.Framework; using System; -using System.Diagnostics; using System.Text; using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; @@ -76,7 +76,7 @@ private void TestOne(Random r, ByteRunAutomaton a, int startCode, int endCode, i nonSurrogateCount = endCode - startCode + 1 - (UnicodeUtil.UNI_SUR_LOW_END - UnicodeUtil.UNI_SUR_HIGH_START + 1); } - Debug.Assert(nonSurrogateCount > 0); + Debugging.Assert(() => nonSurrogateCount > 0); for (int iter = 0; iter < iters; iter++) { @@ -95,8 +95,8 @@ private void TestOne(Random r, ByteRunAutomaton a, int startCode, int endCode, i } } - Debug.Assert(code >= startCode && code <= endCode, "code=" + code + " start=" + startCode + " end=" + endCode); - Debug.Assert(!IsSurrogate(code)); + Debugging.Assert(() => code >= startCode && code <= endCode, () => "code=" + code + " start=" + startCode + " end=" + endCode); + Debugging.Assert(() => !IsSurrogate(code)); Assert.IsTrue(Matches(a, code), "DFA for range " + startCode + "-" + endCode + " failed to match code=" + code); } diff --git a/src/Lucene.Net.Tests/Util/Fst/TestFSTs.cs b/src/Lucene.Net.Tests/Util/Fst/TestFSTs.cs index 1912f1f667..df6777489e 100644 --- a/src/Lucene.Net.Tests/Util/Fst/TestFSTs.cs +++ b/src/Lucene.Net.Tests/Util/Fst/TestFSTs.cs @@ -1,12 +1,12 @@ using J2N.Collections.Generic.Extensions; using J2N.Threading.Atomic; +using Lucene.Net.Diagnostics; using Lucene.Net.Index.Extensions; using Lucene.Net.Support; using Lucene.Net.Util.Automaton; using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; using System.IO; using System.Linq; @@ -571,7 +571,7 @@ public virtual void Run(int limit, bool verify, bool verifyByOutput) long tMid = Environment.TickCount; Console.WriteLine(((tMid - tStart) / 1000.0) + " sec to add all terms"); - Debug.Assert(builder.TermCount == ord); + Debugging.Assert(() => builder.TermCount == ord); FST fst = builder.Finish(); long tEnd = Environment.TickCount; Console.WriteLine(((tEnd - tMid) / 1000.0) + " sec to finish/pack"); diff --git a/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoDocIdSet.cs b/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoDocIdSet.cs index 934890a31b..e7cb03c15a 100644 --- a/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoDocIdSet.cs +++ b/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoDocIdSet.cs @@ -1,4 +1,4 @@ -using System.Diagnostics; +using Lucene.Net.Diagnostics; using BitSet = J2N.Collections.BitSet; namespace Lucene.Net.Util.Packed @@ -52,7 +52,7 @@ public override int NextDoc() { doc = NO_MORE_DOCS; } - Debug.Assert(doc < numBits); + Debugging.Assert(() => doc < numBits); return doc; } diff --git a/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoSequence.cs b/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoSequence.cs index e207049cbb..35b2633a1e 100644 --- a/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoSequence.cs +++ b/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoSequence.cs @@ -1,7 +1,7 @@ using Lucene.Net.Attributes; +using Lucene.Net.Diagnostics; using NUnit.Framework; using System; -using System.Diagnostics; using Assert = Lucene.Net.TestFramework.Assert; namespace Lucene.Net.Util.Packed @@ -91,7 +91,7 @@ private static void TstDecodeAllAdvanceToExpected(long[] values, EliasFanoDecode private static void TstDecodeAdvanceToMultiples(long[] values, EliasFanoDecoder efd, long m) { // test advancing to multiples of m - Debug.Assert(m > 0); + Debugging.Assert(() => m > 0); long previousValue = -1L; long index = 0; long mm = m; @@ -120,7 +120,7 @@ private static void TstDecodeAdvanceToMultiples(long[] values, EliasFanoDecoder private static void TstDecodeBackToMultiples(long[] values, EliasFanoDecoder efd, long m) { // test backing to multiples of m - Debug.Assert(m > 0); + Debugging.Assert(() => m > 0); efd.ToAfterSequence(); int index = values.Length - 1; if (index < 0) @@ -135,7 +135,7 @@ private static void TstDecodeBackToMultiples(long[] values, EliasFanoDecoder efd while (index >= 0) { expValue = values[index]; - Debug.Assert(mm < previousValue); + Debugging.Assert(() => mm < previousValue); if (expValue <= mm) { long backValue_ = efd.BackToValue(mm); diff --git a/src/Lucene.Net.Tests/Util/Test2BPagedBytes.cs b/src/Lucene.Net.Tests/Util/Test2BPagedBytes.cs index e9da5ad4a4..63a75b9948 100644 --- a/src/Lucene.Net.Tests/Util/Test2BPagedBytes.cs +++ b/src/Lucene.Net.Tests/Util/Test2BPagedBytes.cs @@ -1,7 +1,7 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Store; using NUnit.Framework; using System; -using System.Diagnostics; using Assert = Lucene.Net.TestFramework.Assert; namespace Lucene.Net.Util @@ -57,7 +57,7 @@ public virtual void Test() r2.NextBytes(bytes); dataOutput.WriteBytes(bytes, bytes.Length); long fp = dataOutput.GetFilePointer(); - Debug.Assert(fp == lastFP + numBytes); + Debugging.Assert(() => fp == lastFP + numBytes); lastFP = fp; netBytes += numBytes; } diff --git a/src/Lucene.Net.Tests/Util/TestWAH8DocIdSet.cs b/src/Lucene.Net.Tests/Util/TestWAH8DocIdSet.cs index e6fa9d8bb2..f31d3cee08 100644 --- a/src/Lucene.Net.Tests/Util/TestWAH8DocIdSet.cs +++ b/src/Lucene.Net.Tests/Util/TestWAH8DocIdSet.cs @@ -1,8 +1,8 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Search; using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using Assert = Lucene.Net.TestFramework.Assert; using BitSet = J2N.Collections.BitSet; @@ -94,7 +94,7 @@ public virtual void TestUnion() /// Create a random set which has of its bits set. protected static OpenBitSet RandomOpenSet(int numBits, int numBitsSet) { - Debug.Assert(numBitsSet <= numBits); + Debugging.Assert(() => numBitsSet <= numBits); OpenBitSet set = new OpenBitSet(numBits); Random random = Random; if (numBitsSet == numBits) diff --git a/src/dotnet/Lucene.Net.Tests.ICU/Search/PostingsHighlight/TestICUPostingsHighlighter.cs b/src/dotnet/Lucene.Net.Tests.ICU/Search/PostingsHighlight/TestICUPostingsHighlighter.cs index 4a1212c268..29a1360eeb 100644 --- a/src/dotnet/Lucene.Net.Tests.ICU/Search/PostingsHighlight/TestICUPostingsHighlighter.cs +++ b/src/dotnet/Lucene.Net.Tests.ICU/Search/PostingsHighlight/TestICUPostingsHighlighter.cs @@ -2,6 +2,7 @@ using ICU4N.Text; using Lucene.Net.Analysis; using Lucene.Net.Attributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index; using Lucene.Net.Index.Extensions; @@ -10,7 +11,6 @@ using NUnit.Framework; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; using System.IO; using System.Text; @@ -774,8 +774,8 @@ public LoadFieldValuesPostingsHighlighter(int maxLength, string text) protected override IList LoadFieldValues(IndexSearcher searcher, string[] fields, int[] docids, int maxLength) { - Debug.Assert( fields.Length == 1); - Debug.Assert( docids.Length == 1); + Debugging.Assert(() => fields.Length == 1); + Debugging.Assert(() => docids.Length == 1); String[][] contents = RectangularArrays.ReturnRectangularArray(1, 1); //= new String[1][1]; contents[0][0] = text; return contents; @@ -1178,7 +1178,7 @@ internal class GetMultiValuedSeparatorPostingsHighlighter : ICUPostingsHighlight { protected override char GetMultiValuedSeparator(string field) { - Debug.Assert( field.Equals("body", StringComparison.Ordinal)); + Debugging.Assert(() => field.Equals("body", StringComparison.Ordinal)); return '\u2029'; } } From 4fe64ee40aa6799d267bd9dc2990a0ee66a3557e Mon Sep 17 00:00:00 2001 From: Shad Storhaug Date: Fri, 14 Aug 2020 10:01:41 +0700 Subject: [PATCH 06/13] SWEEP: Reviewed and added missing asserts and moved some assert conditions to be not run when asserts are disabled --- .../Analysis/CharFilter/BaseCharFilter.cs | 3 +- .../Analysis/Pt/RSLPStemmerBase.cs | 3 +- .../SimpleText/SimpleTextDocValuesReader.cs | 6 +- .../SimpleText/SimpleTextFieldsReader.cs | 3 +- .../SimpleText/SimpleTextTermVectorsReader.cs | 14 +---- .../Index/TestFlushByRamOrCountsPolicy.cs | 4 +- src/Lucene.Net/Analysis/Tokenizer.cs | 4 +- src/Lucene.Net/Codecs/BlockTreeTermsReader.cs | 7 +-- src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs | 3 +- .../Codecs/Compressing/CompressionMode.cs | 2 + .../Lucene3x/Lucene3xTermVectorsReader.cs | 11 +--- .../Codecs/Lucene3x/TermInfosReader.cs | 2 +- src/Lucene.Net/Codecs/Lucene40/BitVector.cs | 6 +- .../Lucene40/Lucene40TermVectorsReader.cs | 11 +--- src/Lucene.Net/Index/CheckIndex.cs | 38 +++++++------ .../Index/ConcurrentMergeScheduler.cs | 3 +- src/Lucene.Net/Index/DirectoryReader.cs | 1 - src/Lucene.Net/Index/DocFieldProcessor.cs | 1 - src/Lucene.Net/Index/DocTermOrds.cs | 3 +- src/Lucene.Net/Index/DocValuesFieldUpdates.cs | 4 +- src/Lucene.Net/Index/DocumentsWriter.cs | 22 ++++---- .../Index/DocumentsWriterDeleteQueue.cs | 1 - .../Index/DocumentsWriterFlushControl.cs | 9 ++- .../Index/DocumentsWriterFlushQueue.cs | 12 ++-- .../Index/DocumentsWriterPerThreadPool.cs | 56 +++++++++++++------ .../Index/DocumentsWriterStallControl.cs | 15 +++-- .../Index/FreqProxTermsWriterPerField.cs | 3 +- src/Lucene.Net/Index/IndexFileDeleter.cs | 6 +- src/Lucene.Net/Index/IndexWriter.cs | 37 +++--------- src/Lucene.Net/Index/PrefixCodedTerms.cs | 3 + src/Lucene.Net/Index/ReadersAndUpdates.cs | 17 +++--- src/Lucene.Net/Index/SortedDocValuesWriter.cs | 1 + .../Index/SortedSetDocValuesWriter.cs | 2 +- src/Lucene.Net/Index/StoredFieldsProcessor.cs | 11 ++-- src/Lucene.Net/Index/TermVectorsConsumer.cs | 3 +- .../Index/TermVectorsConsumerPerField.cs | 6 +- ...ThreadAffinityDocumentsWriterThreadPool.cs | 6 +- .../Search/FieldCacheRangeFilter.cs | 3 +- src/Lucene.Net/Search/FieldComparator.cs | 4 +- src/Lucene.Net/Search/ReferenceManager.cs | 4 +- src/Lucene.Net/Store/ByteBufferIndexInput.cs | 2 + src/Lucene.Net/Store/NIOFSDirectory.cs | 13 +---- src/Lucene.Net/Store/SimpleFSDirectory.cs | 4 +- src/Lucene.Net/Util/BroadWord.cs | 3 +- src/Lucene.Net/Util/Fst/FST.cs | 5 +- src/Lucene.Net/Util/Fst/NodeHash.cs | 3 +- .../Util/Packed/EliasFanoEncoder.cs | 3 +- src/Lucene.Net/Util/Packed/PackedDataInput.cs | 3 +- src/Lucene.Net/Util/Packed/PackedInts.cs | 11 ++-- src/Lucene.Net/Util/Packed/PackedWriter.cs | 3 +- src/Lucene.Net/Util/PagedBytes.cs | 3 +- src/Lucene.Net/Util/RamUsageEstimator.cs | 2 +- src/Lucene.Net/Util/RollingBuffer.cs | 2 + 53 files changed, 190 insertions(+), 217 deletions(-) diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs index 420822b480..bd3fa5beb1 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs @@ -114,7 +114,8 @@ protected virtual void AddOffCorrectMap(int off, int cumulativeDiff) } int offset = offsets[(size == 0) ? 0 : size - 1]; - Debugging.Assert(() => size == 0 || off >= offset, () => "Offset #" + size + "(" + off + ") is less than the last recorded offset " + offset + "\n" + Arrays.ToString(offsets) + "\n" + Arrays.ToString(diffs)); + Debugging.Assert(() => size == 0 || off >= offset, + () => "Offset #" + size + "(" + off + ") is less than the last recorded offset " + offset + "\n" + Arrays.ToString(offsets) + "\n" + Arrays.ToString(diffs)); if (size == 0 || off != offsets[size - 1]) { diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs index 942e6d5aa6..f1b4d6a795 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs @@ -1,6 +1,7 @@ using J2N.Collections.Generic.Extensions; using J2N.Text; using Lucene.Net.Analysis.Util; +using Lucene.Net.Diagnostics; using Lucene.Net.Util; using System; using System.Collections.Generic; @@ -304,7 +305,7 @@ private static Step ParseStep(TextReader r, string header) { throw new Exception("Illegal Step header specified at line " /*+ r.LineNumber*/); // TODO Line number } - //Debugging.Assert(headerPattern.GetGroupNumbers().Length == 4); + //Debugging.Assert(() => headerPattern.GetGroupNumbers().Length == 4); // Not possible to read the number of groups that matched in .NET string name = matcher.Groups[1].Value; int min = int.Parse(matcher.Groups[2].Value, CultureInfo.InvariantCulture); int type = int.Parse(matcher.Groups[3].Value, CultureInfo.InvariantCulture); diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs index 1b53e2dd9b..151afe8876 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs @@ -85,10 +85,8 @@ internal SimpleTextDocValuesReader(SegmentReadState state, string ext) ReadLine(); Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.TYPE), () => scratch.Utf8ToString()); - var dvType = - (DocValuesType) - Enum.Parse(typeof (DocValuesType), StripPrefix(SimpleTextDocValuesWriter.TYPE)); - + var dvType = (DocValuesType)Enum.Parse(typeof(DocValuesType), StripPrefix(SimpleTextDocValuesWriter.TYPE)); + // Debugging.Assert(() => dvType != null); // LUCENENET: Not possible for an enum to be null in .NET if (dvType == DocValuesType.NUMERIC) { ReadLine(); diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs index 04f652277a..a4432a82cd 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs @@ -159,6 +159,7 @@ public override SeekStatus SeekCeil(BytesRef text) public override BytesRef Next() { + //Debugging.Assert(() => !ended); // LUCENENET: Ended field is never set, so this can never fail var result = _fstEnum.Next(); if (result == null) return null; @@ -317,7 +318,7 @@ public override int NextDoc() Debugging.Assert( () => StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.TERM) || StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.FIELD) || // LUCENENET TODO: This assert fails sometimes, which in turns causes _scratch.Utf8ToString() to throw an index out of range exception - StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.END) /*, "scratch=" + _scratch.Utf8ToString()*/); + StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.END), () => "scratch=" + _scratch.Utf8ToString()); if (!first && (_liveDocs == null || _liveDocs.Get(_docId))) { diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs index 778219ea01..95abd7f913 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs @@ -540,18 +540,8 @@ public override BytesRef GetPayload() public override int NextPosition() { - //Debugging.Assert((_positions != null && _nextPos < _positions.Length) || - // _startOffsets != null && _nextPos < _startOffsets.Length); - - // LUCENENET: The above assertion was for control flow when testing. In Java, it would throw an AssertionError, which is - // caught by the BaseTermVectorsFormatTestCase.assertEquals(RandomTokenStream tk, FieldType ft, Terms terms) method in the - // part that is checking for an error after reading to the end of the enumerator. - - // Since there is no way to turn on assertions in a release build in .NET, we are throwing an InvalidOperationException - // in this case, which matches the behavior of Lucene 8. See #267. - - if (((_positions != null && _nextPos < _positions.Length) || _startOffsets != null && _nextPos < _startOffsets.Length) == false) - throw new InvalidOperationException("Read past last position"); + Debugging.Assert(() => (_positions != null && _nextPos < _positions.Length) || + _startOffsets != null && _nextPos < _startOffsets.Length); if (_positions != null) { diff --git a/src/Lucene.Net.Tests/Index/TestFlushByRamOrCountsPolicy.cs b/src/Lucene.Net.Tests/Index/TestFlushByRamOrCountsPolicy.cs index c870dda6ea..4bdfe89a27 100644 --- a/src/Lucene.Net.Tests/Index/TestFlushByRamOrCountsPolicy.cs +++ b/src/Lucene.Net.Tests/Index/TestFlushByRamOrCountsPolicy.cs @@ -305,9 +305,9 @@ internal virtual void AssertActiveBytesAfter(DocumentsWriterFlushControl flushCo while (allActiveThreads.MoveNext()) { ThreadState next = allActiveThreads.Current; - if (next.DocumentsWriterPerThread != null) + if (next.dwpt != null) { - bytesUsed += next.DocumentsWriterPerThread.BytesUsed; + bytesUsed += next.dwpt.BytesUsed; } } Assert.AreEqual(bytesUsed, flushControl.ActiveBytes); diff --git a/src/Lucene.Net/Analysis/Tokenizer.cs b/src/Lucene.Net/Analysis/Tokenizer.cs index 25a136a61f..4edf011b29 100644 --- a/src/Lucene.Net/Analysis/Tokenizer.cs +++ b/src/Lucene.Net/Analysis/Tokenizer.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; using System.IO; namespace Lucene.Net.Analysis @@ -115,7 +115,7 @@ public void SetReader(TextReader input) throw new InvalidOperationException("TokenStream contract violation: Close() call missing"); } this.inputPending = input; - if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) SetReaderTestPoint(); + Debugging.Assert(SetReaderTestPoint); } public override void Reset() diff --git a/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs b/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs index 2be06072df..30064e44f5 100644 --- a/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs +++ b/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs @@ -4,7 +4,6 @@ using Lucene.Net.Util.Fst; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Text; using JCG = J2N.Collections.Generic; @@ -29,7 +28,6 @@ namespace Lucene.Net.Codecs */ using ArrayUtil = Lucene.Net.Util.ArrayUtil; - using IBits = Lucene.Net.Util.IBits; using ByteArrayDataInput = Lucene.Net.Store.ByteArrayDataInput; using ByteSequenceOutputs = Lucene.Net.Util.Fst.ByteSequenceOutputs; using BytesRef = Lucene.Net.Util.BytesRef; @@ -40,6 +38,7 @@ namespace Lucene.Net.Codecs using DocsEnum = Lucene.Net.Index.DocsEnum; using FieldInfo = Lucene.Net.Index.FieldInfo; using FieldInfos = Lucene.Net.Index.FieldInfos; + using IBits = Lucene.Net.Util.IBits; using IndexFileNames = Lucene.Net.Index.IndexFileNames; using IndexInput = Lucene.Net.Store.IndexInput; using IndexOptions = Lucene.Net.Index.IndexOptions; @@ -2467,7 +2466,7 @@ public override BytesRef Next() if (currentFrame.ord == 0) { //if (DEBUG) System.out.println(" return null"); - Debugging.Assert(() => SetEOF()); + Debugging.Assert(SetEOF); term.Length = 0; validIndexPrefix = 0; currentFrame.Rewind(); @@ -2579,7 +2578,7 @@ public override void SeekExact(BytesRef target, TermState otherState) // if (DEBUG) { // System.out.println("BTTR.seekExact termState seg=" + segment + " target=" + target.utf8ToString() + " " + target + " state=" + otherState); // } - Debugging.Assert(() => ClearEOF()); + Debugging.Assert(ClearEOF); if (target.CompareTo(term) != 0 || !termExists) { Debugging.Assert(() => otherState != null && otherState is BlockTermState); diff --git a/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs b/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs index 424bd4ebdd..2e8fc86343 100644 --- a/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs +++ b/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs @@ -479,8 +479,7 @@ private string ToString(IList blocks) // For assert public void CompileIndex(IList floorBlocks, RAMOutputStream scratchBytes) { // LUCENENET specific - we use a custom wrapper function to display floorBlocks, since - // it might contain garbage that cannot be converted into text. This is compiled out - // of the relese, though. + // it might contain garbage that cannot be converted into text. Debugging.Assert( () => (IsFloor && floorBlocks != null && floorBlocks.Count != 0) || (!IsFloor && floorBlocks == null), () => "isFloor=" + IsFloor + " floorBlocks=" + ToString(floorBlocks)); diff --git a/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs b/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs index 5ca2019787..fe3e2267dd 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs @@ -261,6 +261,8 @@ internal DeflateCompressor(CompressionLevel level) public override void Compress(byte[] bytes, int off, int len, DataOutput output) { + // LUCENENET specific - since DeflateStream works a bit differently than Java's Deflate class, + // we are unable to assert the total count byte[] resultArray = null; using (MemoryStream compressionMemoryStream = new MemoryStream()) { diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs index 7f2f5a65aa..4822cd5958 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs @@ -737,16 +737,7 @@ public override BytesRef GetPayload() public override int NextPosition() { - //Debugging.Assert((positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length); - - // LUCENENET: The above assertion was for control flow when testing. In Java, it would throw an AssertionError, which is - // caught by the BaseTermVectorsFormatTestCase.assertEquals(RandomTokenStream tk, FieldType ft, Terms terms) method in the - // part that is checking for an error after reading to the end of the enumerator. - - // Since there is no way to turn on assertions in a release build in .NET, we are throwing an InvalidOperationException - // in this case, which matches the behavior of Lucene 8. See #267. - if (((positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length) == false) - throw new InvalidOperationException("Read past last position"); + Debugging.Assert(() => (positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length); if (positions != null) { diff --git a/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs b/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs index cd77fbc8de..82adf13cdd 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs @@ -294,7 +294,7 @@ internal TermInfo SeekEnum(SegmentTermEnum enumerator, Term term, TermInfoAndOrd else { Debugging.Assert(() => SameTermInfo(ti, tiOrd, enumerator)); - Debugging.Assert(() => enumerator.position == tiOrd.termOrd); + Debugging.Assert(() => (int)enumerator.position == tiOrd.termOrd); } } } diff --git a/src/Lucene.Net/Codecs/Lucene40/BitVector.cs b/src/Lucene.Net/Codecs/Lucene40/BitVector.cs index e8265ce932..f9409ae88f 100644 --- a/src/Lucene.Net/Codecs/Lucene40/BitVector.cs +++ b/src/Lucene.Net/Codecs/Lucene40/BitVector.cs @@ -275,8 +275,7 @@ public void Write(Directory d, string name, IOContext context) WriteBits(output); } CodecUtil.WriteFooter(output); - bool verified = VerifyCount(); - Debugging.Assert(() => verified); + Debugging.Assert(VerifyCount); } finally { @@ -475,8 +474,7 @@ private bool VerifyCount() Debugging.Assert(() => count != -1); int countSav = count; count = -1; - bool checkCount = countSav == Count(); - Debugging.Assert(() => checkCount, () => "saved count was " + countSav + " but recomputed count is " + count); + Debugging.Assert(() => countSav == Count(), () => "saved count was " + countSav + " but recomputed count is " + count); return true; } diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs index ceca2e2029..8251d2e075 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs @@ -730,16 +730,7 @@ public override BytesRef GetPayload() public override int NextPosition() { - //Debugging.Assert((positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length); - - // LUCENENET: The above assertion was for control flow when testing. In Java, it would throw an AssertionError, which is - // caught by the BaseTermVectorsFormatTestCase.assertEquals(RandomTokenStream tk, FieldType ft, Terms terms) method in the - // part that is checking for an error after reading to the end of the enumerator. - - // Since there is no way to turn on assertions in a release build in .NET, we are throwing an InvalidOperationException - // in this case, which matches the behavior of Lucene 8. See #267. - if (((positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length) == false) - throw new InvalidOperationException("Read past last position"); + Debugging.Assert(() => (positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length); if (positions != null) { diff --git a/src/Lucene.Net/Index/CheckIndex.cs b/src/Lucene.Net/Index/CheckIndex.cs index 2d5858162c..e2e30fdf5b 100644 --- a/src/Lucene.Net/Index/CheckIndex.cs +++ b/src/Lucene.Net/Index/CheckIndex.cs @@ -1235,13 +1235,14 @@ private static Status.TermIndexStatus CheckFields(Fields fields, IBits liveDocs, } lastPos = pos; BytesRef payload = postings.GetPayload(); - if (payload != null) + // LUCENENET specific - restructured to reduce number of checks in production + if (!(payload is null)) { Debugging.Assert(payload.IsValid); - } - if (payload != null && payload.Length < 1) - { - throw new Exception("term " + term + ": doc " + doc + ": pos " + pos + " payload length is out of bounds " + payload.Length); + if (payload.Length < 1) + { + throw new Exception("term " + term + ": doc " + doc + ": pos " + pos + " payload length is out of bounds " + payload.Length); + } } if (hasOffsets) { @@ -2356,19 +2357,20 @@ public virtual void FixIndex(Status result) result.NewSegments.Commit(result.Dir); } - private static bool assertsOn; + // LUCENENET: Not used + //private static bool assertsOn; - private static bool TestAsserts() - { - assertsOn = true; - return true; - } + //private static bool TestAsserts() + //{ + // assertsOn = true; + // return true; + //} - private static bool AssertsOn() - { - Debugging.Assert(TestAsserts); - return assertsOn; - } + //private static bool AssertsOn() + //{ + // Debugging.Assert(TestAsserts); + // return assertsOn; + //} ///// Command-line interface to check and fix an index. ///// @@ -2473,12 +2475,14 @@ public static void Main(string[] args) //Environment.Exit(1); } - // LUCENENET specific - doesn't apply + // LUCENENET specific - rather than having the user specify whether to enable asserts, we always run with them enabled. + Debugging.AssertsEnabled = true; //if (!AssertsOn()) //{ // Console.WriteLine("\nNOTE: testing will be more thorough if you run java with '-ea:org.apache.lucene...', so assertions are enabled"); //} + if (onlySegments.Count == 0) { onlySegments = null; diff --git a/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs b/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs index f394b4f5ae..c6d0913d06 100644 --- a/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs +++ b/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs @@ -1,5 +1,6 @@ #if FEATURE_CONCURRENTMERGESCHEDULER using J2N.Threading; +using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; using System.Runtime.CompilerServices; @@ -394,7 +395,7 @@ public override void Merge(IndexWriter writer, MergeTrigger trigger, bool newMer { lock (this) { - //Debugging.Assert(!Thread.holdsLock(writer)); + Debugging.Assert(() => !Monitor.IsEntered(writer)); this.m_writer = writer; diff --git a/src/Lucene.Net/Index/DirectoryReader.cs b/src/Lucene.Net/Index/DirectoryReader.cs index f34c2e5d97..4bb5997e4c 100644 --- a/src/Lucene.Net/Index/DirectoryReader.cs +++ b/src/Lucene.Net/Index/DirectoryReader.cs @@ -1,7 +1,6 @@ using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; namespace Lucene.Net.Index diff --git a/src/Lucene.Net/Index/DocFieldProcessor.cs b/src/Lucene.Net/Index/DocFieldProcessor.cs index 2cdce5ed63..177b450185 100644 --- a/src/Lucene.Net/Index/DocFieldProcessor.cs +++ b/src/Lucene.Net/Index/DocFieldProcessor.cs @@ -2,7 +2,6 @@ using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Runtime.CompilerServices; using JCG = J2N.Collections.Generic; diff --git a/src/Lucene.Net/Index/DocTermOrds.cs b/src/Lucene.Net/Index/DocTermOrds.cs index eaa41ca4f5..79121c45d3 100644 --- a/src/Lucene.Net/Index/DocTermOrds.cs +++ b/src/Lucene.Net/Index/DocTermOrds.cs @@ -1,7 +1,6 @@ using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; namespace Lucene.Net.Index @@ -23,9 +22,9 @@ namespace Lucene.Net.Index * limitations under the License. */ - using IBits = Lucene.Net.Util.IBits; using BytesRef = Lucene.Net.Util.BytesRef; using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator; + using IBits = Lucene.Net.Util.IBits; using PagedBytes = Lucene.Net.Util.PagedBytes; using PostingsFormat = Lucene.Net.Codecs.PostingsFormat; // javadocs using SeekStatus = Lucene.Net.Index.TermsEnum.SeekStatus; diff --git a/src/Lucene.Net/Index/DocValuesFieldUpdates.cs b/src/Lucene.Net/Index/DocValuesFieldUpdates.cs index b2eaf0a5d7..983914a8fd 100644 --- a/src/Lucene.Net/Index/DocValuesFieldUpdates.cs +++ b/src/Lucene.Net/Index/DocValuesFieldUpdates.cs @@ -114,14 +114,14 @@ internal virtual DocValuesFieldUpdates NewUpdates(string field, DocValuesFieldUp { case DocValuesFieldUpdatesType.NUMERIC: NumericDocValuesFieldUpdates numericUpdates; - Debugging.Assert(() => !numericDVUpdates.TryGetValue(field, out numericUpdates)); + Debugging.Assert(() => !numericDVUpdates.ContainsKey(field)); numericUpdates = new NumericDocValuesFieldUpdates(field, maxDoc); numericDVUpdates[field] = numericUpdates; return numericUpdates; case DocValuesFieldUpdatesType.BINARY: BinaryDocValuesFieldUpdates binaryUpdates; - Debugging.Assert(() => !binaryDVUpdates.TryGetValue(field, out binaryUpdates)); + Debugging.Assert(() => !binaryDVUpdates.ContainsKey(field)); binaryUpdates = new BinaryDocValuesFieldUpdates(field, maxDoc); binaryDVUpdates[field] = binaryUpdates; return binaryUpdates; diff --git a/src/Lucene.Net/Index/DocumentsWriter.cs b/src/Lucene.Net/Index/DocumentsWriter.cs index 72649029da..4707b40453 100644 --- a/src/Lucene.Net/Index/DocumentsWriter.cs +++ b/src/Lucene.Net/Index/DocumentsWriter.cs @@ -3,10 +3,8 @@ using System; using System.Collections.Concurrent; using System.Collections.Generic; -using System.Diagnostics; -using System.Threading; -using System.Reflection; using System.Runtime.CompilerServices; +using System.Threading; using JCG = J2N.Collections.Generic; namespace Lucene.Net.Index @@ -32,8 +30,8 @@ namespace Lucene.Net.Index using BinaryDocValuesUpdate = Lucene.Net.Index.DocValuesUpdate.BinaryDocValuesUpdate; using BytesRef = Lucene.Net.Util.BytesRef; using Directory = Lucene.Net.Store.Directory; - using IEvent = Lucene.Net.Index.IndexWriter.IEvent; using FlushedSegment = Lucene.Net.Index.DocumentsWriterPerThread.FlushedSegment; + using IEvent = Lucene.Net.Index.IndexWriter.IEvent; using InfoStream = Lucene.Net.Util.InfoStream; using NumericDocValuesUpdate = Lucene.Net.Index.DocValuesUpdate.NumericDocValuesUpdate; using Query = Lucene.Net.Search.Query; @@ -246,7 +244,7 @@ internal void Abort(IndexWriter writer) { lock (this) { - //Debugging.Assert(!Thread.HoldsLock(writer), "IndexWriter lock should never be hold when aborting"); + Debugging.Assert(() => !Monitor.IsEntered(writer), () => "IndexWriter lock should never be hold when aborting"); bool success = false; JCG.HashSet newFilesSet = new JCG.HashSet(); try @@ -289,7 +287,7 @@ internal void LockAndAbortAll(IndexWriter indexWriter) { lock (this) { - //Debugging.Assert(indexWriter.HoldsFullFlushLock()); + Debugging.Assert(() => indexWriter.HoldsFullFlushLock); if (infoStream.IsEnabled("DW")) { infoStream.Message("DW", "lockAndAbortAll"); @@ -329,7 +327,7 @@ internal void LockAndAbortAll(IndexWriter indexWriter) private void AbortThreadState(ThreadState perThread, ISet newFiles) { - //Debugging.Assert(perThread.HeldByCurrentThread); + Debugging.Assert(() => perThread.IsHeldByCurrentThread); if (perThread.IsActive) // we might be closed { if (perThread.IsInitialized) @@ -360,7 +358,7 @@ internal void UnlockAllAfterAbortAll(IndexWriter indexWriter) { lock (this) { - //Debugging.Assert(indexWriter.HoldsFullFlushLock()); + Debugging.Assert(() => indexWriter.HoldsFullFlushLock); if (infoStream.IsEnabled("DW")) { infoStream.Message("DW", "unlockAll"); @@ -371,10 +369,10 @@ internal void UnlockAllAfterAbortAll(IndexWriter indexWriter) try { ThreadState perThread = perThreadPool.GetThreadState(i); - //if (perThread.HeldByCurrentThread) - //{ - perThread.Unlock(); - //} + if (perThread.IsHeldByCurrentThread) + { + perThread.Unlock(); + } } catch (Exception e) { diff --git a/src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs b/src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs index 0eee1e087e..b79a7be7e2 100644 --- a/src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs +++ b/src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs @@ -2,7 +2,6 @@ using Lucene.Net.Support; using Lucene.Net.Support.Threading; using System; -using System.Diagnostics; using System.Threading; namespace Lucene.Net.Index diff --git a/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs b/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs index 527413ed76..20582bb0a2 100644 --- a/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs +++ b/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs @@ -3,7 +3,6 @@ using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Threading; using JCG = J2N.Collections.Generic; @@ -153,7 +152,11 @@ private bool AssertMemory() * fail. To prevent this we only assert if the the largest document seen * is smaller than the 1/2 of the maxRamBufferMB */ - Debugging.Assert(() => ram <= expected, () => "actual mem: " + ram + " byte, expected mem: " + expected + " byte, flush mem: " + flushBytes + ", active mem: " + activeBytes + ", pending DWPT: " + numPending + ", flushing DWPT: " + NumFlushingDWPT + ", blocked DWPT: " + NumBlockedFlushes + ", peakDelta mem: " + peakDelta + " byte"); + Debugging.Assert(() => ram <= expected, () => "actual mem: " + ram + " byte, expected mem: " + expected + + " byte, flush mem: " + flushBytes + ", active mem: " + activeBytes + + ", pending DWPT: " + numPending + ", flushing DWPT: " + + NumFlushingDWPT + ", blocked DWPT: " + NumBlockedFlushes + + ", peakDelta mem: " + peakDelta + " byte"); } } return true; @@ -290,7 +293,7 @@ internal void DoAfterFlush(DocumentsWriterPerThread dwpt) private bool UpdateStallState() { - //Debugging.Assert(Thread.holdsLock(this)); + Debugging.Assert(() => Monitor.IsEntered(this)); long limit = StallLimitBytes; /* * we block indexing threads if net byte grows due to slow flushes diff --git a/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs b/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs index 67350ce826..49f535bb29 100644 --- a/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs +++ b/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs @@ -2,7 +2,7 @@ using Lucene.Net.Diagnostics; using Lucene.Net.Support.Threading; using System.Collections.Generic; -using System.Diagnostics; +using System.Threading; namespace Lucene.Net.Index { @@ -128,7 +128,7 @@ internal virtual bool HasTickets private int InnerPurge(IndexWriter writer) { - //Debugging.Assert(PurgeLock.HeldByCurrentThread); + Debugging.Assert(() => purgeLock.IsHeldByCurrentThread); int numPurged = 0; while (true) { @@ -173,8 +173,8 @@ private int InnerPurge(IndexWriter writer) internal virtual int ForcePurge(IndexWriter writer) { - //Debugging.Assert(!Thread.HoldsLock(this)); - //Debugging.Assert(!Thread.holdsLock(writer)); + Debugging.Assert(() => !Monitor.IsEntered(this)); + Debugging.Assert(() => !Monitor.IsEntered(writer)); purgeLock.@Lock(); try { @@ -188,8 +188,8 @@ internal virtual int ForcePurge(IndexWriter writer) internal virtual int TryPurge(IndexWriter writer) { - //Debugging.Assert(!Thread.holdsLock(this)); - //Debugging.Assert(!Thread.holdsLock(writer)); + Debugging.Assert(() => !Monitor.IsEntered(this)); + Debugging.Assert(() => !Monitor.IsEntered(writer)); if (purgeLock.TryLock()) { try diff --git a/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs b/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs index b96f06354d..ef4deb7e0d 100644 --- a/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs +++ b/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs @@ -80,14 +80,14 @@ internal ThreadState(DocumentsWriterPerThread dpwt) /// internal void Deactivate() // LUCENENET NOTE: Made internal because it is called outside of this context { - //Debugging.Assert(this.HeldByCurrentThread); + Debugging.Assert(() => this.IsHeldByCurrentThread); isActive = false; Reset(); } internal void Reset() // LUCENENET NOTE: Made internal because it is called outside of this context { - //Debugging.Assert(this.HeldByCurrentThread); + Debugging.Assert(() => this.IsHeldByCurrentThread); this.dwpt = null; this.bytesUsed = 0; this.flushPending = false; @@ -98,30 +98,52 @@ internal void Reset() // LUCENENET NOTE: Made internal because it is called outs /// only return false iff the DW has been disposed and this /// is already checked out for flush. /// - internal bool IsActive => - //Debugging.Assert(this.HeldByCurrentThread); - isActive; + internal bool IsActive + { + get + { + Debugging.Assert(() => this.IsHeldByCurrentThread); + return isActive; + } + + } - internal bool IsInitialized => - //Debugging.Assert(this.HeldByCurrentThread); - IsActive && dwpt != null; + internal bool IsInitialized + { + get + { + Debugging.Assert(() => this.IsHeldByCurrentThread); + return IsActive && dwpt != null; + } + } + /// /// Returns the number of currently active bytes in this ThreadState's /// /// - public long BytesUsedPerThread => - //Debugging.Assert(this.HeldByCurrentThread); - // public for FlushPolicy - bytesUsed; + public long BytesUsedPerThread + { + get + { + Debugging.Assert(() => this.IsHeldByCurrentThread); + // public for FlushPolicy + return bytesUsed; + } + } /// /// Returns this s /// - public DocumentsWriterPerThread DocumentsWriterPerThread => - //Debugging.Assert(this.HeldByCurrentThread); - // public for FlushPolicy - dwpt; + public DocumentsWriterPerThread DocumentsWriterPerThread + { + get + { + Debugging.Assert(() => this.IsHeldByCurrentThread); + // public for FlushPolicy + return dwpt; + } + } /// /// Returns true iff this is marked as flush @@ -270,7 +292,7 @@ internal virtual void DeactivateUnreleasedStates() internal virtual DocumentsWriterPerThread Reset(ThreadState threadState, bool closed) { - //Debugging.Assert(threadState.HeldByCurrentThread); + Debugging.Assert(() => threadState.IsHeldByCurrentThread); DocumentsWriterPerThread dwpt = threadState.dwpt; if (!closed) { diff --git a/src/Lucene.Net/Index/DocumentsWriterStallControl.cs b/src/Lucene.Net/Index/DocumentsWriterStallControl.cs index 11c1f1b115..ab3bb0dd4a 100644 --- a/src/Lucene.Net/Index/DocumentsWriterStallControl.cs +++ b/src/Lucene.Net/Index/DocumentsWriterStallControl.cs @@ -85,13 +85,13 @@ internal void WaitIfStalled() // try // { //#endif - // make sure not to run IncWaiters / DecrWaiters in Debug.Assert as that gets - // removed at compile time if built in Release mode + // LUCENENET: make sure not to run IncWaiters / DecrWaiters in Debugging.Assert as that gets + // disabled in production var result = IncWaiters(); - Debugging.Assert(() => result); - Monitor.Wait(this); - result = DecrWaiters(); - Debugging.Assert(() => result); + Debugging.Assert(() => result); + Monitor.Wait(this); + result = DecrWaiters(); + Debugging.Assert(() => result); //#if !NETSTANDARD1_6 // LUCENENET NOTE: Senseless to catch and rethrow the same exception type // } // catch (ThreadInterruptedException e) @@ -112,8 +112,7 @@ internal bool AnyStalledThreads() private bool IncWaiters() { numWaiting++; - bool existed = waiting.ContainsKey(ThreadJob.CurrentThread); - Debugging.Assert(() => !existed); + Debugging.Assert(() => !waiting.ContainsKey(ThreadJob.CurrentThread)); waiting[ThreadJob.CurrentThread] = true; return numWaiting > 0; diff --git a/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs b/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs index 8e961a7c60..e5757ea3be 100644 --- a/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs +++ b/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs @@ -197,8 +197,7 @@ internal override void NewTerm(int termID) { // First time we're seeing this term since the last // flush - // LUCENENET: .NET doesn't support asserts in release mode - if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) docState.TestPoint("FreqProxTermsWriterPerField.newTerm start"); + Debugging.Assert(() => docState.TestPoint("FreqProxTermsWriterPerField.newTerm start")); FreqProxPostingsArray postings = (FreqProxPostingsArray)termsHashPerField.postingsArray; postings.lastDocIDs[termID] = docState.docID; diff --git a/src/Lucene.Net/Index/IndexFileDeleter.cs b/src/Lucene.Net/Index/IndexFileDeleter.cs index 6b38589558..df29f181fa 100644 --- a/src/Lucene.Net/Index/IndexFileDeleter.cs +++ b/src/Lucene.Net/Index/IndexFileDeleter.cs @@ -532,7 +532,7 @@ public void Checkpoint(SegmentInfos segmentInfos, bool isCommit) { Debugging.Assert(() => IsLocked); - //Debugging.Assert(Thread.holdsLock(Writer)); + Debugging.Assert(() => Monitor.IsEntered(writer)); long t0 = 0; if (infoStream.IsEnabled("IFD")) { @@ -650,7 +650,7 @@ public bool Exists(string fileName) { Debugging.Assert(() => IsLocked); // LUCENENET: Using TryGetValue to eliminate extra lookup - return refCounts.TryGetValue(fileName, out RefCount value) ? value.count > 0 : false; + return refCounts.TryGetValue(fileName, out RefCount value) && value.count > 0; } private RefCount GetRefCount(string fileName) @@ -724,7 +724,7 @@ internal void DeleteFile(string fileName) // the file is open in another process, and queue // the file for subsequent deletion. - //Debugging.Assert(e.Message.Contains("cannot delete")); + //Debugging.Assert(() => e.Message.Contains("cannot delete")); if (infoStream.IsEnabled("IFD")) { diff --git a/src/Lucene.Net/Index/IndexWriter.cs b/src/Lucene.Net/Index/IndexWriter.cs index 0c23c0c41b..8d3d47fd07 100644 --- a/src/Lucene.Net/Index/IndexWriter.cs +++ b/src/Lucene.Net/Index/IndexWriter.cs @@ -6,7 +6,6 @@ using System; using System.Collections.Concurrent; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; using System.IO; using System.Runtime.CompilerServices; @@ -708,7 +707,7 @@ public virtual ReadersAndUpdates Get(SegmentCommitInfo info, bool create) } else { - Debugging.Assert(() => rld.Info == info, () => "Infos are not equal");//, "rld.info=" + rld.Info + " info=" + info + " isLive?=" + InfoIsLive(rld.Info) + " vs " + InfoIsLive(info)); + Debugging.Assert(() => rld.Info == info, () => "rld.info=" + rld.Info + " info=" + info + " isLive?=" + InfoIsLive(rld.Info) + " vs " + InfoIsLive(info)); } if (create) @@ -2570,8 +2569,7 @@ private void RollbackInternal() infoStream.Message("IW", "rollback: infos=" + SegString(segmentInfos.Segments)); } - // LUCENENET: .NET doesn't support asserts in release mode - if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) TestPoint("rollback before checkpoint"); + Debugging.Assert(() => TestPoint("rollback before checkpoint")); // Ask deleter to locate unreferenced files & remove // them: @@ -3816,12 +3814,8 @@ private void FinishCommit() /// private readonly object fullFlushLock = new object(); - // LUCENENET NOTE: Not possible in .NET - //// for assert - //internal virtual bool HoldsFullFlushLock() - //{ - // return Thread.holdsLock(FullFlushLock); - //} + // for assert + internal virtual bool HoldsFullFlushLock => Monitor.IsEntered(fullFlushLock); /// /// Flush all in-memory buffered updates (adds and deletes) @@ -3978,16 +3972,7 @@ public long RamSizeInBytes() // for testing only internal virtual DocumentsWriter DocsWriter - { - get - { - bool test = false; - // LUCENENET NOTE: Must set test outside of Debug.Assert!! - bool isTest = test = true; - Debugging.Assert(() => isTest); - return test ? docWriter : null; - } - } + => Debugging.AssertsEnabled ? docWriter : null; // LUCENENET specific - just read the status, simpler than using Assert() to set a local variable /// /// Expert: Return the number of documents currently @@ -4098,8 +4083,7 @@ private ReadersAndUpdates CommitMergedDeletesAndUpdates(MergePolicy.OneMerge mer { lock (this) { - // LUCENENET: .NET doesn't support asserts in release mode - if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) TestPoint("startCommitMergeDeletes"); + Debugging.Assert(() => TestPoint("startCommitMergeDeletes")); IList sourceSegments = merge.Segments; @@ -4335,8 +4319,7 @@ private bool CommitMerge(MergePolicy.OneMerge merge, MergeState mergeState) { lock (this) { - // LUCENENET: .NET doesn't support asserts in release mode - if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) TestPoint("startCommitMerge"); + Debugging.Assert(() => TestPoint("startCommitMerge")); if (hitOOM) { @@ -5511,8 +5494,7 @@ private void StartCommit(SegmentInfos toSync) infoStream.Message("IW", "done all syncs: " + string.Format(J2N.Text.StringFormatter.InvariantCulture, "{0}", filesToSync)); } - // LUCENENET: .NET doesn't support asserts in release mode - if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) TestPoint("midStartCommitSuccess"); + Debugging.Assert(() => TestPoint("midStartCommitSuccess")); } finally { @@ -5542,8 +5524,7 @@ private void StartCommit(SegmentInfos toSync) { HandleOOM(oom, "startCommit"); } - // LUCENENET: .NET doesn't support asserts in release mode - if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) TestPoint("finishStartCommit"); + Debugging.Assert(() => TestPoint("finishStartCommit")); } /// diff --git a/src/Lucene.Net/Index/PrefixCodedTerms.cs b/src/Lucene.Net/Index/PrefixCodedTerms.cs index b914b5fd4b..a0f03b9f79 100644 --- a/src/Lucene.Net/Index/PrefixCodedTerms.cs +++ b/src/Lucene.Net/Index/PrefixCodedTerms.cs @@ -92,6 +92,9 @@ public virtual void Dispose() public virtual bool MoveNext() { + // LUCENENET specific - Since there is no way to check for a next element + // without calling this method in .NET, the assert is redundant and ineffective. + //Debugging.Assert(() => input.GetFilePointer() < input.Length); // Has next if (input.GetFilePointer() < input.Length) { try diff --git a/src/Lucene.Net/Index/ReadersAndUpdates.cs b/src/Lucene.Net/Index/ReadersAndUpdates.cs index 976d412950..7f8b360bed 100644 --- a/src/Lucene.Net/Index/ReadersAndUpdates.cs +++ b/src/Lucene.Net/Index/ReadersAndUpdates.cs @@ -6,6 +6,7 @@ using System.Globalization; using System.Runtime.CompilerServices; using System.Text; +using System.Threading; namespace Lucene.Net.Index { @@ -230,7 +231,7 @@ public virtual bool Delete(int docID) lock (this) { Debugging.Assert(() => liveDocs != null); - //Debugging.Assert(Thread.holdsLock(Writer)); + Debugging.Assert(() => Monitor.IsEntered(writer)); Debugging.Assert(() => docID >= 0 && docID < liveDocs.Length, () => "out of bounds: docid=" + docID + " liveDocsLength=" + liveDocs.Length + " seg=" + Info.Info.Name + " docCount=" + Info.Info.DocCount); Debugging.Assert(() => !liveDocsShared); bool didDelete = liveDocs.Get(docID); @@ -318,7 +319,7 @@ public virtual void InitWritableLiveDocs() { lock (this) { - //Debugging.Assert(Thread.holdsLock(Writer)); + Debugging.Assert(() => Monitor.IsEntered(writer)); Debugging.Assert(() => Info.Info.DocCount > 0); //System.out.println("initWritableLivedocs seg=" + info + " liveDocs=" + liveDocs + " shared=" + shared); if (liveDocsShared) @@ -348,7 +349,7 @@ public virtual IBits LiveDocs { lock (this) { - //Debugging.Assert(Thread.holdsLock(Writer)); + Debugging.Assert(() => Monitor.IsEntered(writer)); return liveDocs; } } @@ -359,7 +360,7 @@ public virtual IBits GetReadOnlyLiveDocs() lock (this) { //System.out.println("getROLiveDocs seg=" + info); - //Debugging.Assert(Thread.holdsLock(Writer)); + Debugging.Assert(() => Monitor.IsEntered(writer)); liveDocsShared = true; //if (liveDocs != null) { //System.out.println(" liveCount=" + liveDocs.count()); @@ -393,7 +394,7 @@ public virtual bool WriteLiveDocs(Directory dir) { lock (this) { - //Debugging.Assert(Thread.holdsLock(Writer)); + Debugging.Assert(() => Monitor.IsEntered(writer)); //System.out.println("rld.writeLiveDocs seg=" + info + " pendingDelCount=" + pendingDeleteCount + " numericUpdates=" + numericUpdates); if (pendingDeleteCount == 0) { @@ -458,7 +459,7 @@ public virtual void WriteFieldUpdates(Directory dir, DocValuesFieldUpdates.Conta { lock (this) { - //Debugging.Assert(Thread.holdsLock(Writer)); + Debugging.Assert(() => Monitor.IsEntered(writer)); //System.out.println("rld.writeFieldUpdates: seg=" + info + " numericFieldUpdates=" + numericFieldUpdates); Debugging.Assert(dvUpdates.Any); @@ -692,6 +693,7 @@ public virtual void WriteFieldUpdates(Directory dir, DocValuesFieldUpdates.Conta } else { // no update for this document + Debugging.Assert(() => curDoc < updateDoc); if (currentValues != null && DocsWithField.Get(curDoc)) { // only read the current value if the document had a value before @@ -724,6 +726,7 @@ private IEnumerable GetBytesRefEnumerable(SegmentReader reader, string } else { // no update for this document + Debugging.Assert(() => curDoc < updateDoc); if (currentValues != null && DocsWithField.Get(curDoc)) { // only read the current value if the document had a value before @@ -746,7 +749,7 @@ internal virtual SegmentReader GetReaderForMerge(IOContext context) { lock (this) { - //Debugging.Assert(Thread.holdsLock(Writer)); + Debugging.Assert(() => Monitor.IsEntered(writer)); // must execute these two statements as atomic operation, otherwise we // could lose updates if e.g. another thread calls writeFieldUpdates in // between, or the updates are applied to the obtained reader, but then diff --git a/src/Lucene.Net/Index/SortedDocValuesWriter.cs b/src/Lucene.Net/Index/SortedDocValuesWriter.cs index 17072c52e6..08c7985932 100644 --- a/src/Lucene.Net/Index/SortedDocValuesWriter.cs +++ b/src/Lucene.Net/Index/SortedDocValuesWriter.cs @@ -148,6 +148,7 @@ private IEnumerable GetBytesRefEnumberable(int valueCount, int[] sorte private IEnumerable GetOrdsEnumberable(int maxDoc, int[] ordMap) { AppendingDeltaPackedInt64Buffer.Iterator iter = pending.GetIterator(); + Debugging.Assert(() => pending.Count == maxDoc); for (int i = 0; i < maxDoc; ++i) { diff --git a/src/Lucene.Net/Index/SortedSetDocValuesWriter.cs b/src/Lucene.Net/Index/SortedSetDocValuesWriter.cs index a85d348043..3a4460aa7f 100644 --- a/src/Lucene.Net/Index/SortedSetDocValuesWriter.cs +++ b/src/Lucene.Net/Index/SortedSetDocValuesWriter.cs @@ -203,7 +203,7 @@ private IEnumerable GetBytesRefEnumberable(int valueCount, int[] sorte { AppendingDeltaPackedInt64Buffer.Iterator iter = pendingCounts.GetIterator(); - Debugging.Assert(() => maxDoc == pendingCounts.Count, () => "MaxDoc: " + maxDoc + ", pending.Count: " + pending.Count); + Debugging.Assert(() => pendingCounts.Count == maxDoc, () => "MaxDoc: " + maxDoc + ", pending.Count: " + pending.Count); for (int i = 0; i < maxDoc; ++i) { diff --git a/src/Lucene.Net/Index/StoredFieldsProcessor.cs b/src/Lucene.Net/Index/StoredFieldsProcessor.cs index aac5b12ae2..fb75321d3b 100644 --- a/src/Lucene.Net/Index/StoredFieldsProcessor.cs +++ b/src/Lucene.Net/Index/StoredFieldsProcessor.cs @@ -1,6 +1,6 @@ +using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Index @@ -139,8 +139,7 @@ internal void Fill(int docID) [MethodImpl(MethodImplOptions.NoInlining)] internal override void FinishDocument() { - // LUCENENET: .NET doesn't support asserts in release mode - if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) docWriter.TestPoint("StoredFieldsWriter.finishDocument start"); + Debugging.Assert(() => docWriter.TestPoint("StoredFieldsWriter.finishDocument start")); InitFieldsWriter(IOContext.DEFAULT); Fill(docState.docID); @@ -157,8 +156,7 @@ internal override void FinishDocument() } Reset(); - // LUCENENET: .NET doesn't support asserts in release mode - if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) docWriter.TestPoint("StoredFieldsWriter.finishDocument end"); + Debugging.Assert(() => docWriter.TestPoint("StoredFieldsWriter.finishDocument end")); } public override void AddField(int docID, IIndexableField field, FieldInfo fieldInfo) @@ -181,8 +179,7 @@ public override void AddField(int docID, IIndexableField field, FieldInfo fieldI fieldInfos[numStoredFields] = fieldInfo; numStoredFields++; - // LUCENENET: .NET doesn't support asserts in release mode - if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) docState.TestPoint("StoredFieldsWriterPerThread.processFields.writeField"); + Debugging.Assert(() => docState.TestPoint("StoredFieldsWriterPerThread.processFields.writeField")); } } } diff --git a/src/Lucene.Net/Index/TermVectorsConsumer.cs b/src/Lucene.Net/Index/TermVectorsConsumer.cs index 94c48e7048..30fb1dab10 100644 --- a/src/Lucene.Net/Index/TermVectorsConsumer.cs +++ b/src/Lucene.Net/Index/TermVectorsConsumer.cs @@ -114,8 +114,7 @@ private void InitTermVectorsWriter() [MethodImpl(MethodImplOptions.NoInlining)] internal override void FinishDocument(TermsHash termsHash) { - // LUCENENET: .NET doesn't support asserts in release mode - if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) docWriter.TestPoint("TermVectorsTermsWriter.finishDocument start"); + Debugging.Assert(() => docWriter.TestPoint("TermVectorsTermsWriter.finishDocument start")); if (!hasVectors) { diff --git a/src/Lucene.Net/Index/TermVectorsConsumerPerField.cs b/src/Lucene.Net/Index/TermVectorsConsumerPerField.cs index f8de489682..032bb04b4f 100644 --- a/src/Lucene.Net/Index/TermVectorsConsumerPerField.cs +++ b/src/Lucene.Net/Index/TermVectorsConsumerPerField.cs @@ -293,8 +293,7 @@ internal void WriteProx(TermVectorsPostingsArray postings, int termID) internal override void NewTerm(int termID) { - // LUCENENET: .NET doesn't support asserts in release mode - if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) docState.TestPoint("TermVectorsTermsWriterPerField.newTerm start"); + Debugging.Assert(() => docState.TestPoint("TermVectorsTermsWriterPerField.newTerm start")); TermVectorsPostingsArray postings = (TermVectorsPostingsArray)termsHashPerField.postingsArray; postings.freqs[termID] = 1; @@ -306,8 +305,7 @@ internal override void NewTerm(int termID) internal override void AddTerm(int termID) { - // LUCENENET: .NET doesn't support asserts in release mode - if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) docState.TestPoint("TermVectorsTermsWriterPerField.addTerm start"); + Debugging.Assert(() => docState.TestPoint("TermVectorsTermsWriterPerField.addTerm start")); TermVectorsPostingsArray postings = (TermVectorsPostingsArray)termsHashPerField.postingsArray; postings.freqs[termID]++; diff --git a/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs b/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs index 3327ee9672..5b46c72597 100644 --- a/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs +++ b/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs @@ -50,9 +50,7 @@ public ThreadAffinityDocumentsWriterThreadPool(int maxNumPerThreads) public override ThreadState GetAndLock(Thread requestingThread, DocumentsWriter documentsWriter) { - ThreadState threadState; - threadBindings.TryGetValue(requestingThread, out threadState); - if (threadState != null && threadState.TryLock()) + if (threadBindings.TryGetValue(requestingThread, out ThreadState threadState) && threadState.TryLock()) { return threadState; } @@ -67,7 +65,7 @@ we should somehow prevent this. */ ThreadState newState = NewThreadState(); // state is already locked if non-null if (newState != null) { - //Debugging.Assert(newState.HeldByCurrentThread); + Debugging.Assert(() => newState.IsHeldByCurrentThread); threadBindings[requestingThread] = newState; return newState; } diff --git a/src/Lucene.Net/Search/FieldCacheRangeFilter.cs b/src/Lucene.Net/Search/FieldCacheRangeFilter.cs index 13438e032e..de70b6f85b 100644 --- a/src/Lucene.Net/Search/FieldCacheRangeFilter.cs +++ b/src/Lucene.Net/Search/FieldCacheRangeFilter.cs @@ -230,7 +230,8 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo return null; ; } - //assert inclusiveLowerPoint >= 0 && inclusiveUpperPoint >= 0; + Debugging.Assert(() => inclusiveLowerPoint >= 0 && inclusiveUpperPoint >= 0); + return new AnonymousClassFieldCacheDocIdSet(fcsi, inclusiveLowerPoint, inclusiveUpperPoint, context.AtomicReader.MaxDoc, acceptDocs); } } diff --git a/src/Lucene.Net/Search/FieldComparator.cs b/src/Lucene.Net/Search/FieldComparator.cs index ae30170032..9555bee4e0 100644 --- a/src/Lucene.Net/Search/FieldComparator.cs +++ b/src/Lucene.Net/Search/FieldComparator.cs @@ -981,7 +981,7 @@ public override int CompareValues(float first, float second) // LUCENENET specific special case: // In case of zero, we may have a "positive 0" or "negative 0" - // to tie-break. So, we use JCG.Comparer to do the comparison. + // to tie-break. So, we use JCG.Comparer to do the comparison. return JCG.Comparer.Default.Compare(second, first); } @@ -992,7 +992,7 @@ public override int CompareTop(int doc) // LUCENENET specific special case: // In case of zero, we may have a "positive 0" or "negative 0" - // to tie-break. So, we use JCG.Comparer to do the comparison. + // to tie-break. So, we use JCG.Comparer to do the comparison. return JCG.Comparer.Default.Compare(docValue, topValue); } } diff --git a/src/Lucene.Net/Search/ReferenceManager.cs b/src/Lucene.Net/Search/ReferenceManager.cs index 06f62928fa..d9ab9c87ec 100644 --- a/src/Lucene.Net/Search/ReferenceManager.cs +++ b/src/Lucene.Net/Search/ReferenceManager.cs @@ -200,7 +200,7 @@ private void DoMaybeRefresh() G newReference = RefreshIfNeeded(reference); if (newReference != null) { - Debugging.Assert(() => (object)newReference != (object)reference, () => "refreshIfNeeded should return null if refresh wasn't needed"); + Debugging.Assert(() => !ReferenceEquals(newReference, reference), () => "refreshIfNeeded should return null if refresh wasn't needed"); try { SwapReference(newReference); @@ -311,7 +311,7 @@ protected virtual void AfterMaybeRefresh() /// If the release operation on the given resource throws an public void Release(G reference) { - Debugging.Assert(() => reference != null); + Debugging.Assert(() => !(reference is null)); DecRef(reference); } diff --git a/src/Lucene.Net/Store/ByteBufferIndexInput.cs b/src/Lucene.Net/Store/ByteBufferIndexInput.cs index 821ce705a7..8402ad9ff3 100644 --- a/src/Lucene.Net/Store/ByteBufferIndexInput.cs +++ b/src/Lucene.Net/Store/ByteBufferIndexInput.cs @@ -1,5 +1,6 @@ using J2N.IO; using Lucene.Net.Diagnostics; +using Lucene.Net.Util.Fst; using System; using System.IO; using System.Runtime.CompilerServices; @@ -390,6 +391,7 @@ protected override void Dispose(bool disposing) #if FEATURE_CONDITIONALWEAKTABLE_ENUMERATOR foreach (var pair in clones) { + Debugging.Assert(() => pair.Key.isClone); pair.Key.UnsetBuffers(); } this.clones.Clear(); diff --git a/src/Lucene.Net/Store/NIOFSDirectory.cs b/src/Lucene.Net/Store/NIOFSDirectory.cs index bf26d21140..8eefa140eb 100644 --- a/src/Lucene.Net/Store/NIOFSDirectory.cs +++ b/src/Lucene.Net/Store/NIOFSDirectory.cs @@ -257,16 +257,9 @@ protected override void ReadInternal(byte[] b, int offset, int len) { while (readLength > 0) { - int limit; - if (readLength > CHUNK_SIZE) - { - limit = readOffset + CHUNK_SIZE; - } - else - { - limit = readOffset + readLength; - } - bb.Limit = limit; + int toRead = Math.Min(CHUNK_SIZE, readLength); + bb.Limit = readOffset + toRead; + Debugging.Assert(() => bb.Remaining == toRead); int i = m_channel.Read(bb, pos); if (i <= 0) // be defensive here, even though we checked before hand, something could have changed { diff --git a/src/Lucene.Net/Store/SimpleFSDirectory.cs b/src/Lucene.Net/Store/SimpleFSDirectory.cs index 8006f06434..35167d9954 100644 --- a/src/Lucene.Net/Store/SimpleFSDirectory.cs +++ b/src/Lucene.Net/Store/SimpleFSDirectory.cs @@ -1,5 +1,5 @@ +using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; using System.IO; namespace Lucene.Net.Store @@ -235,7 +235,7 @@ protected override void ReadInternal(byte[] b, int offset, int len) // all we need to do is Read(). total = m_file.Read(b, offset, len); - //Debugging.Assert(total == len); + Debugging.Assert(() => total == len); } catch (IOException ioe) { diff --git a/src/Lucene.Net/Util/BroadWord.cs b/src/Lucene.Net/Util/BroadWord.cs index c70029ce5c..db7241caef 100644 --- a/src/Lucene.Net/Util/BroadWord.cs +++ b/src/Lucene.Net/Util/BroadWord.cs @@ -1,5 +1,6 @@ using J2N.Numerics; using Lucene.Net.Diagnostics; +using System.Globalization; namespace Lucene.Net.Util { @@ -70,7 +71,7 @@ public static int Select(long x, int r) long b = (long)((ulong)(((long)((ulong)SmallerUpTo7_8(s, (r * L8_L)) >> 7)) * L8_L) >> 53); // & (~7L); // Step 3, side ways addition for byte number times 8 long l = r - (((long)((ulong)(s << 8) >> (int)b)) & 0xFFL); // Step 4, byte wise rank, subtract the rank with byte at b-8, or zero for b=0; - Debugging.Assert(() => 0L <= 1); + Debugging.Assert(() => 0L <= 1, () => l.ToString(CultureInfo.InvariantCulture)); //assert l < 8 : l; //fails when bit r is not available. // Select bit l from byte (x >>> b): diff --git a/src/Lucene.Net/Util/Fst/FST.cs b/src/Lucene.Net/Util/Fst/FST.cs index 2eec5f70b0..86bd5283dd 100644 --- a/src/Lucene.Net/Util/Fst/FST.cs +++ b/src/Lucene.Net/Util/Fst/FST.cs @@ -376,8 +376,7 @@ private void CacheRootArcs() cachedRootArcs = (FST.Arc[])new FST.Arc[0x80]; ReadRootArcs(cachedRootArcs); - bool set = SetAssertingRootArcs(cachedRootArcs); - Debugging.Assert(() => set); + Debugging.Assert(() => SetAssertingRootArcs(cachedRootArcs)); Debugging.Assert(AssertRootArcs); } @@ -409,7 +408,7 @@ public void ReadRootArcs(FST.Arc[] arcs) } } - private bool SetAssertingRootArcs(FST.Arc[] arcs) + private bool SetAssertingRootArcs(FST.Arc[] arcs) // Only called from assert { assertingCachedRootArcs = (FST.Arc[])new FST.Arc[arcs.Length]; ReadRootArcs(assertingCachedRootArcs); diff --git a/src/Lucene.Net/Util/Fst/NodeHash.cs b/src/Lucene.Net/Util/Fst/NodeHash.cs index ad402d0ff0..8e6460f557 100644 --- a/src/Lucene.Net/Util/Fst/NodeHash.cs +++ b/src/Lucene.Net/Util/Fst/NodeHash.cs @@ -162,8 +162,7 @@ public long Add(Builder.UnCompiledNode nodeIn) // freeze & add long node = fst.AddNode(nodeIn); //System.out.println(" now freeze node=" + node); - long hashNode = Hash(node); - Debugging.Assert(() => hashNode == h, () => "frozenHash=" + hashNode + " vs h=" + h); + Debugging.Assert(() => Hash(node) == h, () => "frozenHash=" + Hash(node) + " vs h=" + h); count++; table.Set(pos, node); // Rehash at 2/3 occupancy: diff --git a/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs b/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs index 1fb241caae..470b76ee1a 100644 --- a/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs +++ b/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs @@ -3,6 +3,7 @@ using Lucene.Net.Support; using System; using System.Diagnostics.CodeAnalysis; +using System.Globalization; using System.Text; namespace Lucene.Net.Util.Packed @@ -219,7 +220,7 @@ public EliasFanoEncoder(long numValues, long upperBound) /// private static long NumInt64sForBits(long numBits) // Note: int version in FixedBitSet.bits2words() { - Debugging.Assert(() => numBits >= 0, numBits.ToString); + Debugging.Assert(() => numBits >= 0, () => numBits.ToString(CultureInfo.InvariantCulture)); return (long)((ulong)(numBits + (sizeof(long) * 8 - 1)) >> LOG2_INT64_SIZE); } diff --git a/src/Lucene.Net/Util/Packed/PackedDataInput.cs b/src/Lucene.Net/Util/Packed/PackedDataInput.cs index bf0921353f..89e266ac31 100644 --- a/src/Lucene.Net/Util/Packed/PackedDataInput.cs +++ b/src/Lucene.Net/Util/Packed/PackedDataInput.cs @@ -1,5 +1,6 @@ using Lucene.Net.Diagnostics; using System; +using System.Globalization; namespace Lucene.Net.Util.Packed { @@ -52,7 +53,7 @@ public PackedDataInput(DataInput @in) /// public long ReadInt64(int bitsPerValue) { - Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, bitsPerValue.ToString); + Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => bitsPerValue.ToString(CultureInfo.InvariantCulture)); long r = 0; while (bitsPerValue > 0) { diff --git a/src/Lucene.Net/Util/Packed/PackedInts.cs b/src/Lucene.Net/Util/Packed/PackedInts.cs index c9da5f8d8b..fac1a53af2 100644 --- a/src/Lucene.Net/Util/Packed/PackedInts.cs +++ b/src/Lucene.Net/Util/Packed/PackedInts.cs @@ -3,6 +3,7 @@ using Lucene.Net.Support; using System; using System.Collections.Generic; +using System.Globalization; using System.IO; namespace Lucene.Net.Util.Packed @@ -141,8 +142,8 @@ public override bool IsSupported(int bitsPerValue) /// public override float OverheadPerValue(int bitsPerValue) { + Debugging.Assert(() => IsSupported(bitsPerValue)); int valuesPerBlock = 64 / bitsPerValue; - int overhead = 64 % bitsPerValue; return (float)overhead / valuesPerBlock; } @@ -204,6 +205,7 @@ internal Format(int id) /// public virtual long ByteCount(int packedIntsVersion, int valueCount, int bitsPerValue) { + Debugging.Assert(() => bitsPerValue >= 0 && bitsPerValue <= 64, () => bitsPerValue.ToString(CultureInfo.InvariantCulture)); // assume long-aligned return 8L * Int64Count(packedIntsVersion, valueCount, bitsPerValue); } @@ -216,16 +218,13 @@ public virtual long ByteCount(int packedIntsVersion, int valueCount, int bitsPer /// public virtual int Int64Count(int packedIntsVersion, int valueCount, int bitsPerValue) { + Debugging.Assert(() => bitsPerValue >= 0 && bitsPerValue <= 64, () => bitsPerValue.ToString(CultureInfo.InvariantCulture)); long byteCount = ByteCount(packedIntsVersion, valueCount, bitsPerValue); - + Debugging.Assert(() => byteCount < 8L * int.MaxValue); if ((byteCount % 8) == 0) - { return (int)(byteCount / 8); - } else - { return (int)(byteCount / 8 + 1); - } } /// diff --git a/src/Lucene.Net/Util/Packed/PackedWriter.cs b/src/Lucene.Net/Util/Packed/PackedWriter.cs index aaea2434c9..2df619f068 100644 --- a/src/Lucene.Net/Util/Packed/PackedWriter.cs +++ b/src/Lucene.Net/Util/Packed/PackedWriter.cs @@ -1,5 +1,6 @@ using Lucene.Net.Diagnostics; using Lucene.Net.Support; +using System.Globalization; using System.IO; using System.Runtime.CompilerServices; @@ -55,7 +56,7 @@ internal PackedWriter(PackedInt32s.Format format, DataOutput @out, int valueCoun public override void Add(long v) { - Debugging.Assert(() => m_bitsPerValue == 64 || (v >= 0 && v <= PackedInt32s.MaxValue(m_bitsPerValue)), m_bitsPerValue.ToString); + Debugging.Assert(() => m_bitsPerValue == 64 || (v >= 0 && v <= PackedInt32s.MaxValue(m_bitsPerValue)), () => m_bitsPerValue.ToString(CultureInfo.InvariantCulture)); Debugging.Assert(() => !finished); if (m_valueCount != -1 && written >= m_valueCount) { diff --git a/src/Lucene.Net/Util/PagedBytes.cs b/src/Lucene.Net/Util/PagedBytes.cs index e80ab68336..dd392cb285 100644 --- a/src/Lucene.Net/Util/PagedBytes.cs +++ b/src/Lucene.Net/Util/PagedBytes.cs @@ -2,6 +2,7 @@ using Lucene.Net.Support; using System; using System.Collections.Generic; +using System.Globalization; namespace Lucene.Net.Util { @@ -162,7 +163,7 @@ public long RamBytesUsed() /// public PagedBytes(int blockBits) { - Debugging.Assert(() => blockBits > 0 && blockBits <= 31, blockBits.ToString); + Debugging.Assert(() => blockBits > 0 && blockBits <= 31, () => blockBits.ToString(CultureInfo.InvariantCulture)); this.blockSize = 1 << blockBits; this.blockBits = blockBits; blockMask = blockSize - 1; diff --git a/src/Lucene.Net/Util/RamUsageEstimator.cs b/src/Lucene.Net/Util/RamUsageEstimator.cs index 493104b172..298009bcab 100644 --- a/src/Lucene.Net/Util/RamUsageEstimator.cs +++ b/src/Lucene.Net/Util/RamUsageEstimator.cs @@ -507,7 +507,7 @@ private static long MeasureObjectSize(object root) seen.Add(ob); Type obClazz = ob.GetType(); - + // LUCENENET specific - .NET cannot return a null type for an object, so no need to assert it if (obClazz.Equals(typeof(string))) { // LUCENENET specific - we can get a closer estimate of a string diff --git a/src/Lucene.Net/Util/RollingBuffer.cs b/src/Lucene.Net/Util/RollingBuffer.cs index c6479cab2f..7558150343 100644 --- a/src/Lucene.Net/Util/RollingBuffer.cs +++ b/src/Lucene.Net/Util/RollingBuffer.cs @@ -140,6 +140,8 @@ public virtual T Get(int pos) } Debugging.Assert(() => InBounds(pos)); int index = GetIndex(pos); + //System.out.println(" pos=" + pos + " nextPos=" + nextPos + " -> index=" + index); + //assert buffer[index].pos == pos; return buffer[index]; } From 2f352bae8afaac2d8b6c1e09825e39fb6b8d41a6 Mon Sep 17 00:00:00 2001 From: Shad Storhaug Date: Tue, 18 Aug 2020 19:31:28 +0700 Subject: [PATCH 07/13] Added if (Debugging.AssertsEnabled) blocks to improve performance when asserts are disabled --- .../Analysis/CharFilter/BaseCharFilter.cs | 2 +- .../CharFilter/HTMLStripCharFilter.cs | 22 +- .../Analysis/CharFilter/MappingCharFilter.cs | 2 +- .../Analysis/CharFilter/NormalizeCharMap.cs | 2 +- .../Compound/CompoundWordTokenFilterBase.cs | 2 +- .../Analysis/Gl/GalicianStemmer.cs | 2 +- .../Analysis/Hunspell/Dictionary.cs | 4 +- .../Analysis/Hunspell/Stemmer.cs | 4 +- .../Miscellaneous/ASCIIFoldingFilter.cs | 2 +- .../Miscellaneous/SingleTokenTokenStream.cs | 4 +- .../Analysis/NGram/NGramTokenizer.cs | 4 +- .../Pattern/PatternCaptureGroupTokenFilter.cs | 2 +- .../Analysis/Pt/PortugueseStemmer.cs | 2 +- .../Analysis/Pt/RSLPStemmerBase.cs | 2 +- .../Analysis/Synonym/SynonymFilter.cs | 18 +- .../Analysis/Synonym/SynonymMap.cs | 9 +- .../Analysis/Util/CharArrayMap.cs | 2 +- .../Analysis/Util/CharTokenizer.cs | 4 +- .../Analysis/Util/CharacterUtils.cs | 32 ++- .../Analysis/Util/RollingCharBuffer.cs | 27 ++- .../Analysis/Util/SegmentingTokenizerBase.cs | 2 +- .../Analysis/Util/StemmerUtil.cs | 4 +- .../Analysis/Icu/ICUNormalizer2CharFilter.cs | 2 +- .../Analysis/Icu/Segmentation/ICUTokenizer.cs | 2 +- .../Icu/Segmentation/ICUTokenizerFactory.cs | 4 +- .../Dict/TokenInfoFST.cs | 2 +- .../GraphvizFormatter.cs | 7 +- .../JapaneseIterationMarkCharFilter.cs | 2 +- .../JapaneseTokenizer.cs | 39 +-- .../Tools/BinaryDictionaryWriter.cs | 27 ++- .../Tools/ConnectionCostsBuilder.cs | 6 +- .../Tools/ConnectionCostsWriter.cs | 4 +- .../BeiderMorseFilter.cs | 2 +- .../ByTask/Utils/AnalyzerFactory.cs | 2 +- .../Quality/QualityStats.cs | 2 +- .../Quality/Trec/TrecJudge.cs | 2 +- .../BlockTerms/BlockTermsReader.cs | 35 +-- .../BlockTerms/BlockTermsWriter.cs | 15 +- .../BlockTerms/FixedGapTermsIndexReader.cs | 27 ++- .../BlockTerms/FixedGapTermsIndexWriter.cs | 2 +- .../BlockTerms/VariableGapTermsIndexReader.cs | 2 +- .../BlockTerms/VariableGapTermsIndexWriter.cs | 2 +- .../Bloom/BloomFilteringPostingsFormat.cs | 2 +- src/Lucene.Net.Codecs/Bloom/FuzzySet.cs | 2 +- .../IntBlock/FixedIntBlockIndexInput.cs | 4 +- .../IntBlock/FixedIntBlockIndexOutput.cs | 2 +- .../IntBlock/VariableIntBlockIndexInput.cs | 2 +- .../IntBlock/VariableIntBlockIndexOutput.cs | 8 +- .../Memory/DirectDocValuesConsumer.cs | 2 +- .../Memory/DirectDocValuesProducer.cs | 2 +- .../Memory/DirectPostingsFormat.cs | 58 +++-- .../Memory/FSTOrdTermsReader.cs | 12 +- .../Memory/FSTTermOutputs.cs | 6 +- .../Memory/FSTTermsReader.cs | 8 +- .../Memory/MemoryDocValuesConsumer.cs | 2 +- .../Memory/MemoryDocValuesProducer.cs | 2 +- .../Memory/MemoryPostingsFormat.cs | 24 +- .../Pulsing/PulsingPostingsFormat.cs | 2 +- .../Pulsing/PulsingPostingsReader.cs | 10 +- .../Pulsing/PulsingPostingsWriter.cs | 20 +- .../Sep/SepPostingsReader.cs | 10 +- .../Sep/SepPostingsWriter.cs | 11 +- .../Sep/SepSkipListReader.cs | 2 +- .../Sep/SepSkipListWriter.cs | 2 +- .../SimpleText/SimpleTextDocValuesReader.cs | 47 ++-- .../SimpleText/SimpleTextDocValuesWriter.cs | 52 ++-- .../SimpleText/SimpleTextFieldInfosReader.cs | 28 +-- .../SimpleText/SimpleTextFieldInfosWriter.cs | 2 +- .../SimpleText/SimpleTextFieldsReader.cs | 12 +- .../SimpleText/SimpleTextFieldsWriter.cs | 11 +- .../SimpleText/SimpleTextLiveDocsFormat.cs | 6 +- .../SimpleText/SimpleTextSegmentInfoReader.cs | 16 +- .../SimpleTextStoredFieldsReader.cs | 14 +- .../SimpleText/SimpleTextTermVectorsReader.cs | 34 +-- .../SimpleText/SimpleTextTermVectorsWriter.cs | 4 +- .../ExpressionComparator.cs | 4 +- .../ScoreFunctionValues.cs | 2 +- src/Lucene.Net.Facet/DrillDownQuery.cs | 2 +- src/Lucene.Net.Facet/DrillSideways.cs | 2 +- src/Lucene.Net.Facet/DrillSidewaysScorer.cs | 6 +- src/Lucene.Net.Facet/FacetsConfig.cs | 2 +- .../Range/LongRangeCounter.cs | 10 +- src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs | 4 +- .../Directory/DirectoryTaxonomyWriter.cs | 6 +- .../Taxonomy/Directory/TaxonomyIndexArrays.cs | 2 +- src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs | 2 +- .../Taxonomy/FloatTaxonomyFacets.cs | 2 +- .../Taxonomy/TaxonomyReader.cs | 2 +- .../AbstractFirstPassGroupingCollector.cs | 10 +- .../BlockGroupingCollector.cs | 11 +- src/Lucene.Net.Grouping/SearchGroup.cs | 14 +- .../Term/TermGroupFacetCollector.cs | 4 +- .../MultiTermHighlighting.cs | 4 +- .../PostingsHighlight/Passage.cs | 4 +- .../PostingsHighlight/PostingsHighlighter.cs | 16 +- .../VectorHighlight/BaseFragListBuilder.cs | 2 +- .../VectorHighlight/FieldTermStack.cs | 2 +- src/Lucene.Net.Join/ToChildBlockJoinQuery.cs | 10 +- .../ToParentBlockJoinCollector.cs | 4 +- src/Lucene.Net.Join/ToParentBlockJoinQuery.cs | 4 +- .../MemoryIndex.MemoryIndexReader.cs | 13 +- src/Lucene.Net.Memory/MemoryIndex.cs | 20 +- src/Lucene.Net.Misc/Document/LazyDocument.cs | 9 +- .../Index/MultiPassIndexSplitter.cs | 2 +- src/Lucene.Net.Misc/Index/PKIndexSplitter.cs | 2 +- src/Lucene.Net.Misc/Index/Sorter/Sorter.cs | 7 +- .../Index/Sorter/SortingAtomicReader.cs | 2 +- .../Index/Sorter/SortingMergePolicy.cs | 2 +- src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs | 4 +- .../Util/Fst/UpToTwoPositiveIntOutputs.cs | 43 ++-- src/Lucene.Net.Queries/BooleanFilter.cs | 2 +- src/Lucene.Net.Queries/CommonTermsQuery.cs | 2 +- .../Processors/AnalyzerQueryNodeProcessor.cs | 10 +- .../Simple/SimpleQueryParser.cs | 4 +- .../IndexAndTaxonomyRevision.cs | 2 +- src/Lucene.Net.Replicator/IndexRevision.cs | 2 +- .../ReplicationClient.cs | 4 +- .../Queries/SortedSetSortField.cs | 2 +- .../Prefix/AbstractPrefixTreeFilter.cs | 2 +- .../AbstractVisitingPrefixTreeFilter.cs | 18 +- .../Prefix/ContainsPrefixTreeFilter.cs | 15 +- src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs | 4 +- .../Prefix/Tree/QuadPrefixTree.cs | 4 +- .../Prefix/Tree/SpatialPrefixTree.cs | 4 +- .../Prefix/WithinPrefixTreeFilter.cs | 7 +- .../Vector/DistanceValueSource.cs | 2 +- .../Suggest/Analyzing/AnalyzingSuggester.cs | 23 +- .../Analyzing/BlendedInfixSuggester.cs | 2 +- .../Suggest/Analyzing/FSTUtil.cs | 8 +- .../Suggest/Analyzing/FreeTextSuggester.cs | 14 +- .../Suggest/Analyzing/SuggestStopFilter.cs | 2 +- .../Suggest/Fst/FSTCompletion.cs | 2 +- .../Suggest/Fst/WFSTCompletionLookup.cs | 8 +- .../Suggest/UnsortedInputIterator.cs | 6 +- .../Analysis/LookaheadTokenFilter.cs | 22 +- .../Analysis/MockCharFilter.cs | 2 +- .../Analysis/MockReaderWrapper.cs | 6 +- .../Analysis/MockTokenizer.cs | 16 +- .../Asserting/AssertingDocValuesFormat.cs | 82 +++---- .../Codecs/Asserting/AssertingNormsFormat.cs | 6 +- .../Asserting/AssertingPostingsFormat.cs | 68 +++--- .../Asserting/AssertingStoredFieldsFormat.cs | 20 +- .../Asserting/AssertingTermVectorsFormat.cs | 46 ++-- .../Dummy/DummyCompressingCodec.cs | 2 +- .../Lucene3x/PreFlexRWFieldInfosWriter.cs | 4 +- .../Codecs/Lucene3x/PreFlexRWFieldsWriter.cs | 10 +- .../Codecs/Lucene3x/PreFlexRWNormsConsumer.cs | 2 +- .../Lucene3x/PreFlexRWStoredFieldsWriter.cs | 4 +- .../Lucene3x/PreFlexRWTermVectorsWriter.cs | 6 +- .../Codecs/Lucene3x/TermInfosWriter.cs | 12 +- .../Lucene40/Lucene40DocValuesWriter.cs | 16 +- .../Lucene40/Lucene40FieldInfosWriter.cs | 8 +- .../Codecs/Lucene40/Lucene40PostingsWriter.cs | 16 +- .../Codecs/Lucene40/Lucene40SkipListWriter.cs | 8 +- .../Lucene42/Lucene42DocValuesConsumer.cs | 2 +- .../Lucene42/Lucene42FieldInfosWriter.cs | 4 +- .../MockVariableIntBlockPostingsFormat.cs | 2 +- .../MockRandom/MockRandomPostingsFormat.cs | 2 +- .../Codecs/RAMOnly/RAMOnlyPostingsFormat.cs | 10 +- .../Index/AllDeletedFilterReader.cs | 2 +- .../Index/AssertingAtomicReader.cs | 222 +++++++++--------- .../Index/BaseDocValuesFormatTestCase.cs | 64 ++--- .../Index/BasePostingsFormatTestCase.cs | 2 +- .../Index/MockRandomMergePolicy.cs | 2 +- .../Index/RandomCodec.cs | 4 +- .../RandomDocumentsWriterPerThreadPool.cs | 10 +- .../Index/RandomIndexWriter.cs | 2 +- .../ThreadedIndexingAndSearchingTestCase.cs | 4 +- .../Search/AssertingBulkScorer.cs | 2 +- .../Search/AssertingCollector.cs | 2 +- .../Search/AssertingScorer.cs | 8 +- .../Search/QueryUtils.cs | 4 +- .../Search/RandomSimilarityProvider.cs | 2 +- .../Search/ShardSearchingTestBase.cs | 20 +- .../Store/MockDirectoryWrapper.cs | 6 +- .../JavaCompatibility/LuceneTestCase.cs | 2 +- .../Util/Automaton/AutomatonTestUtil.cs | 4 +- .../Util/BaseDocIdSetTestCase.cs | 2 +- .../Util/FailOnNonBulkMergesInfoStream.cs | 2 +- .../Util/Fst/FSTTester.cs | 8 +- .../Util/LuceneTestCase.cs | 14 +- .../Util/NullInfoStream.cs | 6 +- .../Util/TestRuleAssertionsRequired.cs | 2 +- .../Util/TestRuleSetupAndRestoreClassEnv.cs | 14 +- .../Util/ThrottledIndexOutput.cs | 2 +- .../CharFilters/TestMappingCharFilter.cs | 2 +- .../Analysis/Core/TestFactories.cs | 2 +- .../Analysis/Core/TestRandomChains.cs | 4 +- .../Analysis/Hunspell/TestAllDictionaries.cs | 12 +- .../Analysis/Hunspell/TestAllDictionaries2.cs | 12 +- .../Analysis/Synonym/TestSynonymMapFilter.cs | 2 +- src/Lucene.Net.Tests.Facet/FacetTestCase.cs | 2 +- .../Range/TestRangeFacetCounts.cs | 2 +- .../TestTaxonomyFacetSumValueSource.cs | 2 +- .../TestDrillSideways.cs | 6 +- .../GroupFacetCollectorTest.cs | 4 +- .../TestPostingsHighlighter.cs | 6 +- src/Lucene.Net.Tests.Join/TestJoinUtil.cs | 2 +- .../Classic/TestQueryParser.cs | 14 +- .../Flexible/Standard/TestStandardQP.cs | 10 +- .../IndexAndTaxonomyReplicationClientTest.cs | 2 +- .../IndexReplicationClientTest.cs | 2 +- .../SpatialTestCase.cs | 6 +- .../Analyzing/AnalyzingSuggesterTest.cs | 2 +- .../Suggest/Analyzing/FuzzySuggesterTest.cs | 4 +- .../Analyzing/TestFreeTextSuggester.cs | 2 +- .../Suggest/LookupBenchmarkTest.cs | 4 +- .../Analysis/TestGraphTokenizers.cs | 2 +- .../Lucene41/TestBlockPostingsFormat3.cs | 2 +- .../PerField/TestPerFieldDocValuesFormat.cs | 2 +- .../Index/TestBackwardsCompatibility.cs | 2 +- .../Index/TestBackwardsCompatibility3x.cs | 2 +- src/Lucene.Net.Tests/Index/TestCodecs.cs | 6 +- src/Lucene.Net.Tests/Index/TestIndexWriter.cs | 4 +- .../Index/TestIndexWriterMerging.cs | 2 +- .../Index/TestIndexableField.cs | 4 +- .../Index/TestLongPostings.cs | 4 +- src/Lucene.Net.Tests/Index/TestNRTThreads.cs | 2 +- src/Lucene.Net.Tests/Index/TestPayloads.cs | 2 +- .../Index/TestPayloadsOnVectors.cs | 4 +- .../Index/TestPostingsOffsets.cs | 8 +- .../Index/TestStressIndexing2.cs | 4 +- src/Lucene.Net.Tests/Index/TestStressNRT.cs | 4 +- src/Lucene.Net.Tests/Index/TestTermsEnum.cs | 2 +- .../Search/Spans/MultiSpansWrapper.cs | 2 +- .../Search/TestBooleanScorer.cs | 2 +- .../Search/TestConstantScoreQuery.cs | 2 +- src/Lucene.Net.Tests/Search/TestFieldCache.cs | 2 +- .../Search/TestMinShouldMatch2.cs | 10 +- .../Search/TestMultiThreadTermVectors.cs | 2 +- .../Search/TestNumericRangeQuery32.cs | 4 +- .../Search/TestNumericRangeQuery64.cs | 4 +- .../Search/TestTimeLimitingCollector.cs | 2 +- .../Util/Automaton/TestUTF32ToUTF8.cs | 6 +- src/Lucene.Net.Tests/Util/Fst/TestFSTs.cs | 2 +- .../Util/Packed/TestEliasFanoDocIdSet.cs | 2 +- .../Util/Packed/TestEliasFanoSequence.cs | 6 +- src/Lucene.Net.Tests/Util/Test2BPagedBytes.cs | 2 +- src/Lucene.Net.Tests/Util/TestWAH8DocIdSet.cs | 2 +- src/Lucene.Net/Analysis/NumericTokenStream.cs | 2 +- .../Analysis/TokenStreamToAutomaton.cs | 4 +- src/Lucene.Net/Analysis/Tokenizer.cs | 2 +- src/Lucene.Net/Codecs/BlockTermState.cs | 2 +- src/Lucene.Net/Codecs/BlockTreeTermsReader.cs | 175 +++++++------- src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs | 78 +++--- src/Lucene.Net/Codecs/CodecUtil.cs | 2 +- .../CompressingStoredFieldsIndexWriter.cs | 8 +- .../CompressingStoredFieldsReader.cs | 21 +- .../CompressingStoredFieldsWriter.cs | 19 +- .../CompressingTermVectorsReader.cs | 20 +- .../CompressingTermVectorsWriter.cs | 44 ++-- .../Codecs/Compressing/CompressionMode.cs | 6 +- src/Lucene.Net/Codecs/Compressing/LZ4.cs | 18 +- src/Lucene.Net/Codecs/DocValuesConsumer.cs | 4 +- src/Lucene.Net/Codecs/FieldsConsumer.cs | 2 +- .../Codecs/Lucene3x/Lucene3xFields.cs | 46 ++-- .../Codecs/Lucene3x/Lucene3xNormsProducer.cs | 6 +- .../Lucene3x/Lucene3xSegmentInfoReader.cs | 6 +- .../Lucene3x/Lucene3xStoredFieldsReader.cs | 4 +- .../Lucene3x/Lucene3xTermVectorsReader.cs | 17 +- .../Codecs/Lucene3x/SegmentTermDocs.cs | 2 +- .../Codecs/Lucene3x/SegmentTermEnum.cs | 7 +- .../Codecs/Lucene3x/SegmentTermPositions.cs | 2 +- src/Lucene.Net/Codecs/Lucene3x/TermBuffer.cs | 6 +- .../Codecs/Lucene3x/TermInfosReader.cs | 6 +- src/Lucene.Net/Codecs/Lucene40/BitVector.cs | 24 +- .../Codecs/Lucene40/Lucene40LiveDocsFormat.cs | 14 +- .../Codecs/Lucene40/Lucene40PostingsFormat.cs | 2 +- .../Codecs/Lucene40/Lucene40PostingsReader.cs | 45 ++-- .../Lucene40/Lucene40StoredFieldsReader.cs | 11 +- .../Lucene40/Lucene40StoredFieldsWriter.cs | 13 +- .../Lucene40/Lucene40TermVectorsReader.cs | 27 ++- .../Lucene40/Lucene40TermVectorsWriter.cs | 27 ++- src/Lucene.Net/Codecs/Lucene41/ForUtil.cs | 25 +- .../Codecs/Lucene41/Lucene41PostingsFormat.cs | 4 +- .../Codecs/Lucene41/Lucene41PostingsReader.cs | 22 +- .../Codecs/Lucene41/Lucene41PostingsWriter.cs | 15 +- .../Codecs/Lucene41/Lucene41SkipReader.cs | 2 +- .../Codecs/Lucene42/Lucene42NormsConsumer.cs | 4 +- .../Lucene45/Lucene45DocValuesConsumer.cs | 4 +- .../Lucene46/Lucene46FieldInfosWriter.cs | 4 +- src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs | 2 +- .../Codecs/MultiLevelSkipListReader.cs | 2 +- .../Codecs/MultiLevelSkipListWriter.cs | 2 +- .../PerField/PerFieldDocValuesFormat.cs | 10 +- .../Codecs/PerField/PerFieldPostingsFormat.cs | 8 +- src/Lucene.Net/Codecs/PostingsConsumer.cs | 2 +- src/Lucene.Net/Codecs/TermVectorsWriter.cs | 19 +- src/Lucene.Net/Codecs/TermsConsumer.cs | 10 +- src/Lucene.Net/Index/AtomicReader.cs | 11 +- src/Lucene.Net/Index/AtomicReaderContext.cs | 2 +- src/Lucene.Net/Index/AutomatonTermsEnum.cs | 8 +- src/Lucene.Net/Index/BitsSlice.cs | 4 +- src/Lucene.Net/Index/BufferedUpdatesStream.cs | 57 +++-- src/Lucene.Net/Index/ByteSliceReader.cs | 22 +- src/Lucene.Net/Index/ByteSliceWriter.cs | 12 +- src/Lucene.Net/Index/CheckIndex.cs | 30 +-- src/Lucene.Net/Index/CompositeReader.cs | 4 +- .../Index/CompositeReaderContext.cs | 4 +- .../Index/ConcurrentMergeScheduler.cs | 2 +- src/Lucene.Net/Index/DirectoryReader.cs | 6 +- src/Lucene.Net/Index/DocFieldProcessor.cs | 8 +- src/Lucene.Net/Index/DocTermOrds.cs | 20 +- src/Lucene.Net/Index/DocValuesFieldUpdates.cs | 4 +- src/Lucene.Net/Index/DocValuesProcessor.cs | 4 +- src/Lucene.Net/Index/DocumentsWriter.cs | 41 ++-- .../Index/DocumentsWriterDeleteQueue.cs | 6 +- .../Index/DocumentsWriterFlushControl.cs | 92 +++++--- .../Index/DocumentsWriterFlushQueue.cs | 43 ++-- .../Index/DocumentsWriterPerThread.cs | 39 +-- .../Index/DocumentsWriterPerThreadPool.cs | 24 +- .../Index/DocumentsWriterStallControl.cs | 8 +- src/Lucene.Net/Index/FieldInfo.cs | 31 +-- src/Lucene.Net/Index/FieldInfos.cs | 13 +- src/Lucene.Net/Index/FilteredTermsEnum.cs | 6 +- src/Lucene.Net/Index/FlushPolicy.cs | 6 +- src/Lucene.Net/Index/FreqProxTermsWriter.cs | 2 +- .../Index/FreqProxTermsWriterPerField.cs | 51 ++-- src/Lucene.Net/Index/FrozenBufferedUpdates.cs | 6 +- src/Lucene.Net/Index/IndexFileDeleter.cs | 42 ++-- src/Lucene.Net/Index/IndexFileNames.cs | 4 +- .../Index/IndexFormatTooNewException.cs | 2 +- .../Index/IndexFormatTooOldException.cs | 4 +- src/Lucene.Net/Index/IndexWriter.cs | 165 +++++++------ src/Lucene.Net/Index/LogMergePolicy.cs | 8 +- src/Lucene.Net/Index/MergePolicy.cs | 8 +- src/Lucene.Net/Index/MergeState.cs | 4 +- src/Lucene.Net/Index/MultiBits.cs | 15 +- src/Lucene.Net/Index/MultiDocValues.cs | 16 +- .../Index/MultiDocsAndPositionsEnum.cs | 4 +- src/Lucene.Net/Index/MultiDocsEnum.cs | 2 +- src/Lucene.Net/Index/MultiFields.cs | 16 +- src/Lucene.Net/Index/MultiTerms.cs | 2 +- src/Lucene.Net/Index/MultiTermsEnum.cs | 20 +- src/Lucene.Net/Index/NormsConsumer.cs | 6 +- .../Index/NumericDocValuesFieldUpdates.cs | 2 +- src/Lucene.Net/Index/OrdTermState.cs | 2 +- .../Index/ParallelCompositeReader.cs | 2 +- src/Lucene.Net/Index/PrefixCodedTerms.cs | 4 +- src/Lucene.Net/Index/ReadersAndUpdates.cs | 54 +++-- src/Lucene.Net/Index/SegmentCoreReaders.cs | 6 +- src/Lucene.Net/Index/SegmentDocValues.cs | 4 +- src/Lucene.Net/Index/SegmentInfo.cs | 4 +- src/Lucene.Net/Index/SegmentInfos.cs | 10 +- src/Lucene.Net/Index/SegmentMerger.cs | 4 +- src/Lucene.Net/Index/SegmentReader.cs | 12 +- .../Index/SimpleMergedSegmentWarmer.cs | 2 +- .../Index/SingletonSortedSetDocValues.cs | 2 +- .../Index/SlowCompositeReaderWrapper.cs | 4 +- .../Index/SnapshotDeletionPolicy.cs | 2 +- .../Index/SortedDocValuesTermsEnum.cs | 4 +- src/Lucene.Net/Index/SortedDocValuesWriter.cs | 4 +- .../Index/SortedSetDocValuesTermsEnum.cs | 4 +- .../Index/SortedSetDocValuesWriter.cs | 4 +- .../Index/StandardDirectoryReader.cs | 9 +- src/Lucene.Net/Index/StoredFieldsProcessor.cs | 6 +- src/Lucene.Net/Index/TermContext.cs | 15 +- src/Lucene.Net/Index/TermVectorsConsumer.cs | 12 +- .../Index/TermVectorsConsumerPerField.cs | 12 +- src/Lucene.Net/Index/TermsHashPerField.cs | 6 +- ...ThreadAffinityDocumentsWriterThreadPool.cs | 6 +- src/Lucene.Net/Search/CachingWrapperFilter.cs | 2 +- src/Lucene.Net/Search/CollectionStatistics.cs | 11 +- .../Search/ConstantScoreAutoRewrite.cs | 6 +- src/Lucene.Net/Search/ConstantScoreQuery.cs | 12 +- src/Lucene.Net/Search/DisjunctionScorer.cs | 4 +- src/Lucene.Net/Search/DocIdSetIterator.cs | 11 +- .../Search/DocTermOrdsRangeFilter.cs | 2 +- .../Search/DocTermOrdsRewriteMethod.cs | 2 +- src/Lucene.Net/Search/ExactPhraseScorer.cs | 2 +- src/Lucene.Net/Search/FieldCacheImpl.cs | 12 +- .../Search/FieldCacheRangeFilter.cs | 4 +- .../Search/FieldCacheRewriteMethod.cs | 2 +- src/Lucene.Net/Search/FieldComparator.cs | 12 +- src/Lucene.Net/Search/FieldValueHitQueue.cs | 14 +- src/Lucene.Net/Search/FilteredQuery.cs | 8 +- src/Lucene.Net/Search/FuzzyTermsEnum.cs | 2 +- src/Lucene.Net/Search/IndexSearcher.cs | 6 +- .../Search/MinShouldMatchSumScorer.cs | 4 +- src/Lucene.Net/Search/MultiPhraseQuery.cs | 4 +- .../Search/MultiTermQueryWrapperFilter.cs | 2 +- src/Lucene.Net/Search/NumericRangeQuery.cs | 14 +- src/Lucene.Net/Search/PhraseQuery.cs | 6 +- src/Lucene.Net/Search/QueryRescorer.cs | 2 +- src/Lucene.Net/Search/ReferenceManager.cs | 6 +- src/Lucene.Net/Search/ReqOptSumScorer.cs | 7 +- src/Lucene.Net/Search/ScoringRewrite.cs | 10 +- src/Lucene.Net/Search/SearcherManager.cs | 4 +- .../Search/Similarities/SimilarityBase.cs | 2 +- src/Lucene.Net/Search/SloppyPhraseScorer.cs | 4 +- src/Lucene.Net/Search/SortField.cs | 2 +- src/Lucene.Net/Search/SortRescorer.cs | 2 +- .../Search/Spans/NearSpansOrdered.cs | 6 +- src/Lucene.Net/Search/Spans/SpanFirstQuery.cs | 2 +- .../Search/Spans/SpanPositionRangeQuery.cs | 2 +- src/Lucene.Net/Search/Spans/TermSpans.cs | 2 +- .../Search/TermCollectingRewrite.cs | 2 +- src/Lucene.Net/Search/TermQuery.cs | 10 +- src/Lucene.Net/Search/TermScorer.cs | 2 +- src/Lucene.Net/Search/TermStatistics.cs | 7 +- src/Lucene.Net/Search/TopDocs.cs | 10 +- src/Lucene.Net/Search/TopScoreDocCollector.cs | 20 +- src/Lucene.Net/Search/TopTermsRewrite.cs | 18 +- src/Lucene.Net/Store/BaseDirectory.cs | 2 +- src/Lucene.Net/Store/BufferedIndexInput.cs | 2 +- src/Lucene.Net/Store/ByteArrayDataOutput.cs | 4 +- src/Lucene.Net/Store/ByteBufferIndexInput.cs | 11 +- src/Lucene.Net/Store/CompoundFileDirectory.cs | 8 +- src/Lucene.Net/Store/CompoundFileWriter.cs | 18 +- src/Lucene.Net/Store/DataInput.cs | 2 +- src/Lucene.Net/Store/DataOutput.cs | 4 +- src/Lucene.Net/Store/IOContext.cs | 9 +- src/Lucene.Net/Store/MMapDirectory.cs | 2 +- src/Lucene.Net/Store/NIOFSDirectory.cs | 6 +- src/Lucene.Net/Store/RAMOutputStream.cs | 2 +- .../Store/RateLimitedDirectoryWrapper.cs | 2 +- src/Lucene.Net/Store/SimpleFSDirectory.cs | 4 +- src/Lucene.Net/Support/BitArrayExtensions.cs | 2 +- src/Lucene.Net/Support/Collections.cs | 2 +- src/Lucene.Net/Util/ArrayUtil.cs | 40 ++-- src/Lucene.Net/Util/AttributeSource.cs | 2 +- src/Lucene.Net/Util/Automaton/Automaton.cs | 4 +- .../Util/Automaton/BasicOperations.cs | 8 +- .../Util/Automaton/CompiledAutomaton.cs | 8 +- .../Automaton/DaciukMihovAutomatonBuilder.cs | 17 +- .../Automaton/Lev1ParametricDescription.cs | 4 +- .../Automaton/Lev1TParametricDescription.cs | 4 +- .../Automaton/Lev2ParametricDescription.cs | 4 +- .../Automaton/Lev2TParametricDescription.cs | 4 +- .../Util/Automaton/LevenshteinAutomata.cs | 2 +- src/Lucene.Net/Util/Automaton/SortedIntSet.cs | 2 +- src/Lucene.Net/Util/Automaton/State.cs | 2 +- src/Lucene.Net/Util/Automaton/Transition.cs | 9 +- src/Lucene.Net/Util/Automaton/UTF32ToUTF8.cs | 7 +- src/Lucene.Net/Util/BroadWord.cs | 4 +- src/Lucene.Net/Util/ByteBlockPool.cs | 2 +- src/Lucene.Net/Util/BytesRef.cs | 12 +- src/Lucene.Net/Util/BytesRefArray.cs | 2 +- src/Lucene.Net/Util/BytesRefHash.cs | 44 ++-- src/Lucene.Net/Util/CharsRef.cs | 4 +- src/Lucene.Net/Util/FilterIterator.cs | 2 +- src/Lucene.Net/Util/FixedBitSet.cs | 39 +-- src/Lucene.Net/Util/Fst/Builder.cs | 56 +++-- .../Util/Fst/ByteSequenceOutputs.cs | 34 ++- src/Lucene.Net/Util/Fst/BytesRefFSTEnum.cs | 2 +- src/Lucene.Net/Util/Fst/BytesStore.cs | 24 +- .../Util/Fst/CharSequenceOutputs.cs | 37 ++- src/Lucene.Net/Util/Fst/FST.cs | 54 +++-- src/Lucene.Net/Util/Fst/FSTEnum.cs | 29 ++- src/Lucene.Net/Util/Fst/IntSequenceOutputs.cs | 37 ++- src/Lucene.Net/Util/Fst/IntsRefFSTEnum.cs | 2 +- src/Lucene.Net/Util/Fst/NoOutputs.cs | 28 ++- src/Lucene.Net/Util/Fst/NodeHash.cs | 2 +- src/Lucene.Net/Util/Fst/PairOutputs.cs | 27 ++- src/Lucene.Net/Util/Fst/PositiveIntOutputs.cs | 32 ++- src/Lucene.Net/Util/Fst/Util.cs | 12 +- .../Util/IndexableBinaryStringTools.cs | 4 +- src/Lucene.Net/Util/InfoStream.cs | 2 +- src/Lucene.Net/Util/IntBlockPool.cs | 17 +- src/Lucene.Net/Util/IntsRef.cs | 4 +- src/Lucene.Net/Util/LongBitSet.cs | 39 +-- src/Lucene.Net/Util/LongsRef.cs | 4 +- src/Lucene.Net/Util/MergedIterator.cs | 2 +- src/Lucene.Net/Util/OfflineSorter.cs | 15 +- src/Lucene.Net/Util/OpenBitSet.cs | 34 +-- src/Lucene.Net/Util/PForDeltaDocIdSet.cs | 34 +-- .../Packed/AbstractAppendingLongBuffer.cs | 13 +- .../Util/Packed/AbstractBlockPackedWriter.cs | 2 +- .../Util/Packed/AbstractPagedMutable.cs | 6 +- .../Util/Packed/BlockPackedReader.cs | 2 +- .../Util/Packed/BlockPackedReaderIterator.cs | 12 +- .../Util/Packed/BlockPackedWriter.cs | 2 +- src/Lucene.Net/Util/Packed/BulkOperation.cs | 4 +- .../Util/Packed/BulkOperationPacked.cs | 16 +- src/Lucene.Net/Util/Packed/Direct16.cs | 20 +- src/Lucene.Net/Util/Packed/Direct32.cs | 20 +- src/Lucene.Net/Util/Packed/Direct64.cs | 18 +- src/Lucene.Net/Util/Packed/Direct8.cs | 20 +- .../Util/Packed/EliasFanoDecoder.cs | 28 +-- .../Util/Packed/EliasFanoEncoder.cs | 4 +- src/Lucene.Net/Util/Packed/GrowableWriter.cs | 2 +- .../Packed/MonotonicAppendingLongBuffer.cs | 2 +- .../Util/Packed/MonotonicBlockPackedReader.cs | 2 +- .../Util/Packed/MonotonicBlockPackedWriter.cs | 4 +- .../Util/Packed/Packed16ThreeBlocks.cs | 18 +- src/Lucene.Net/Util/Packed/Packed64.cs | 47 ++-- .../Util/Packed/Packed64SingleBlock.cs | 52 ++-- .../Util/Packed/Packed8ThreeBlocks.cs | 18 +- src/Lucene.Net/Util/Packed/PackedDataInput.cs | 2 +- .../Util/Packed/PackedDataOutput.cs | 2 +- src/Lucene.Net/Util/Packed/PackedInts.cs | 88 ++++--- .../Util/Packed/PackedReaderIterator.cs | 11 +- src/Lucene.Net/Util/Packed/PackedWriter.cs | 9 +- src/Lucene.Net/Util/Packed/PagedMutable.cs | 2 +- src/Lucene.Net/Util/PagedBytes.cs | 17 +- src/Lucene.Net/Util/QueryBuilder.cs | 12 +- src/Lucene.Net/Util/RamUsageEstimator.cs | 18 +- .../Util/RecyclingByteBlockAllocator.cs | 6 +- .../Util/RecyclingIntBlockAllocator.cs | 6 +- src/Lucene.Net/Util/RollingBuffer.cs | 9 +- src/Lucene.Net/Util/SentinelIntSet.cs | 4 +- src/Lucene.Net/Util/Sorter.cs | 2 +- src/Lucene.Net/Util/TimSorter.cs | 18 +- src/Lucene.Net/Util/UnicodeUtil.cs | 4 +- src/Lucene.Net/Util/WAH8DocIdSet.cs | 70 ++++-- .../TestICUPostingsHighlighter.cs | 9 +- 506 files changed, 3167 insertions(+), 2561 deletions(-) diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs index bd3fa5beb1..f490dc0f37 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs @@ -114,7 +114,7 @@ protected virtual void AddOffCorrectMap(int off, int cumulativeDiff) } int offset = offsets[(size == 0) ? 0 : size - 1]; - Debugging.Assert(() => size == 0 || off >= offset, + if (Debugging.AssertsEnabled) Debugging.Assert(() => size == 0 || off >= offset, () => "Offset #" + size + "(" + off + ") is less than the last recorded offset " + offset + "\n" + Arrays.ToString(offsets) + "\n" + Arrays.ToString(diffs)); if (size == 0 || off != offsets[size - 1]) diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs index eb28c1f57f..611e72b08c 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs @@ -30953,7 +30953,7 @@ internal void Restart() /// internal int NextChar() { - Debugging.Assert(() => !IsRead, () => "Attempting to read past the end of a segment."); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !IsRead, () => "Attempting to read past the end of a segment."); return m_buf[pos++]; } @@ -31378,7 +31378,7 @@ private int NextChar() } catch (Exception /*e*/) { - Debugging.Assert(() => false, () => "Exception parsing code point '" + decimalCharRef + "'"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "Exception parsing code point '" + decimalCharRef + "'"); } if (codePoint <= 0x10FFFF) { @@ -31638,7 +31638,7 @@ string hexCharRef } catch (Exception /*e*/) { - Debugging.Assert(() => false, () => "Exception parsing hex code point '" + hexCharRef + "'"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "Exception parsing hex code point '" + hexCharRef + "'"); } if (codePoint <= 0x10FFFF) { @@ -31901,7 +31901,7 @@ string hexCharRef } catch (Exception /*e*/) { // should never happen - Debugging.Assert(() => false, () => "Exception parsing high surrogate '" + if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "Exception parsing high surrogate '" + surrogatePair.Substring(2, 6 - 2) + "'"); } try @@ -31910,7 +31910,7 @@ string hexCharRef } catch (Exception /*e*/) { // should never happen - Debugging.Assert(() => false, () => "Exception parsing low surrogate '" + surrogatePair.Substring(10, 14 - 10) + "'"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "Exception parsing low surrogate '" + surrogatePair.Substring(10, 14 - 10) + "'"); } // add (previously matched input length) + (this match length) - (substitution length) cumulativeDiff += inputSegment.Length + YyLength - 2; @@ -31932,7 +31932,7 @@ string hexCharRef } catch (Exception /*e*/) { // should never happen - Debugging.Assert(() => false, () => "Exception parsing high surrogate '" + if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "Exception parsing high surrogate '" + surrogatePair.Substring(2, 6 - 2) + "'"); } try @@ -31941,7 +31941,7 @@ string hexCharRef } catch (Exception /*e*/) { // should never happen - Debugging.Assert(() => false, () => "Exception parsing low surrogate '" + if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "Exception parsing low surrogate '" + surrogatePair.Substring(9, 14 - 9) + "'"); } if (char.IsLowSurrogate(lowSurrogate)) @@ -31973,7 +31973,7 @@ string hexCharRef } catch (Exception /*e*/) { // should never happen - Debugging.Assert(() => false, () => "Exception parsing high surrogate '" + if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "Exception parsing high surrogate '" + surrogatePair.Substring(1, 6 - 1) + "'"); } if (char.IsHighSurrogate(highSurrogate)) @@ -31986,7 +31986,7 @@ string hexCharRef } catch (Exception /*e*/) { // should never happen - Debugging.Assert(() => false, () => "Exception parsing low surrogate '" + if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "Exception parsing low surrogate '" + surrogatePair.Substring(10, 14 - 10) + "'"); } // add (previously matched input length) + (this match length) - (substitution length) @@ -32013,7 +32013,7 @@ string hexCharRef } catch (Exception /*e*/) { // should never happen - Debugging.Assert(() => false, () => "Exception parsing high surrogate '" + if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "Exception parsing high surrogate '" + surrogatePair.Substring(1, 6 - 1) + "'"); } if (char.IsHighSurrogate(highSurrogate)) @@ -32025,7 +32025,7 @@ string hexCharRef } catch (Exception /*e*/) { // should never happen - Debugging.Assert(() => false, () => "Exception parsing low surrogate '" + if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "Exception parsing low surrogate '" + surrogatePair.Substring(9, 14 - 9) + "'"); } if (char.IsLowSurrogate(lowSurrogate)) diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilter.cs index 42d03df016..a921887db2 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilter.cs @@ -136,7 +136,7 @@ public override int Read() if (!FST.TargetHasArcs(arc)) { // Fast pass for single character match: - Debugging.Assert(() => arc.IsFinal); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.IsFinal); lastMatchLen = 1; lastMatch = arc.Output; } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/NormalizeCharMap.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/NormalizeCharMap.cs index 3858445026..7186c94660 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/NormalizeCharMap.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/NormalizeCharMap.cs @@ -54,7 +54,7 @@ private NormalizeCharMap(FST map) map.ReadFirstRealTargetArc(scratchArc.Target, scratchArc, fstReader); while (true) { - Debugging.Assert(() => scratchArc.Label != FST.END_LABEL); + if (Debugging.AssertsEnabled) Debugging.Assert(() => scratchArc.Label != FST.END_LABEL); cachedRootArcs[Convert.ToChar((char)scratchArc.Label)] = (new FST.Arc()).CopyFrom(scratchArc); if (scratchArc.IsLast) { diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Compound/CompoundWordTokenFilterBase.cs b/src/Lucene.Net.Analysis.Common/Analysis/Compound/CompoundWordTokenFilterBase.cs index 518651ba14..b60791e833 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Compound/CompoundWordTokenFilterBase.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Compound/CompoundWordTokenFilterBase.cs @@ -110,7 +110,7 @@ public override sealed bool IncrementToken() { if (m_tokens.Count > 0) { - Debugging.Assert(() => current != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => current != null); CompoundToken token = m_tokens.Dequeue(); RestoreState(current); // keep all other attributes untouched m_termAtt.SetEmpty().Append(token.Text); diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianStemmer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianStemmer.cs index 7aa674ebd8..bf49cde4a2 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianStemmer.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianStemmer.cs @@ -47,7 +47,7 @@ static GalicianStemmer() /// new valid length, stemmed public virtual int Stem(char[] s, int len) { - Debugging.Assert(() => s.Length >= len + 1, () => "this stemmer requires an oversized array of at least 1"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => s.Length >= len + 1, () => "this stemmer requires an oversized array of at least 1"); len = plural.Apply(s, len); len = unification.Apply(s, len); diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs index f770e91df6..305f0d1e13 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs @@ -375,7 +375,7 @@ private void ReadAffixFile(Stream affixStream, Encoding decoder) strip.CopyTo(0, stripData, currentOffset, strip.Length - 0); currentOffset += strip.Length; } - Debugging.Assert(() => currentIndex == seenStrips.Count); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentIndex == seenStrips.Count); stripOffsets[currentIndex] = currentOffset; } @@ -424,7 +424,7 @@ private void ParseAffix(JCG.SortedDictionary> affixes, stri for (int i = 0; i < numLines; i++) { - Debugging.Assert(() => affixWriter.Position == currentAffix << 3); + if (Debugging.AssertsEnabled) Debugging.Assert(() => affixWriter.Position == currentAffix << 3); string line = reader.ReadLine(); string[] ruleArgs = whitespacePattern.Split(line).TrimEnd(); diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Stemmer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Stemmer.cs index 0b40b56818..4ad0ee10a8 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Stemmer.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Stemmer.cs @@ -210,7 +210,7 @@ private IList Stem(char[] word, int length, int previous, int prevFlag // cross check incoming continuation class (flag of previous affix) against list. dictionary.flagLookup.Get(append, scratch); char[] appendFlags = Dictionary.DecodeFlags(scratch); - Debugging.Assert(() => prevFlag >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => prevFlag >= 0); compatible = HasCrossCheckedFlag((char)prevFlag, appendFlags, false); } else @@ -279,7 +279,7 @@ private IList Stem(char[] word, int length, int previous, int prevFlag // cross check incoming continuation class (flag of previous affix) against list. dictionary.flagLookup.Get(append, scratch); char[] appendFlags = Dictionary.DecodeFlags(scratch); - Debugging.Assert(() => prevFlag >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => prevFlag >= 0); compatible = HasCrossCheckedFlag((char)prevFlag, appendFlags, previousWasPrefix); } else diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ASCIIFoldingFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ASCIIFoldingFilter.cs index d47ed475ab..aa76247b45 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ASCIIFoldingFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ASCIIFoldingFilter.cs @@ -92,7 +92,7 @@ public override bool IncrementToken() { if (state != null) { - Debugging.Assert(() => preserveOriginal, () => "state should only be captured if preserveOriginal is true"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => preserveOriginal, () => "state should only be captured if preserveOriginal is true"); RestoreState(state); posIncAttr.PositionIncrement = 0; state = null; diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/SingleTokenTokenStream.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/SingleTokenTokenStream.cs index 4855e8dcc7..225fae8a66 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/SingleTokenTokenStream.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/SingleTokenTokenStream.cs @@ -35,11 +35,11 @@ public sealed class SingleTokenTokenStream : TokenStream public SingleTokenTokenStream(Token token) : base(Token.TOKEN_ATTRIBUTE_FACTORY) { - Debugging.Assert(() => token != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => token != null); this.singleToken = (Token)token.Clone(); tokenAtt = AddAttribute(); - Debugging.Assert(() => tokenAtt is Token); + if (Debugging.AssertsEnabled) Debugging.Assert(() => tokenAtt is Token); } public override sealed bool IncrementToken() diff --git a/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenizer.cs b/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenizer.cs index 132720008c..60bc879315 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenizer.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenizer.cs @@ -232,7 +232,7 @@ public override sealed bool IncrementToken() { if (bufferStart + 1 + minGram > bufferEnd) { - Debugging.Assert(() => exhausted); + if (Debugging.AssertsEnabled) Debugging.Assert(() => exhausted); return false; } Consume(); @@ -295,7 +295,7 @@ protected virtual bool IsTokenChar(int chr) public override sealed void End() { base.End(); - Debugging.Assert(() => bufferStart <= bufferEnd); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bufferStart <= bufferEnd); int endOffset = offset; for (int i = bufferStart; i < bufferEnd; ++i) { diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternCaptureGroupTokenFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternCaptureGroupTokenFilter.cs index 8ca48d4c30..e901768536 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternCaptureGroupTokenFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternCaptureGroupTokenFilter.cs @@ -155,7 +155,7 @@ public override bool IncrementToken() { if (currentMatcher != -1 && NextCapture()) { - Debugging.Assert(() => state != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != null); ClearAttributes(); RestoreState(state); int start = matchers[currentMatcher].Groups[currentGroup[currentMatcher]].Index; diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseStemmer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseStemmer.cs index cdd1eced4d..8943de0589 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseStemmer.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseStemmer.cs @@ -46,7 +46,7 @@ static PortugueseStemmer() /// new valid length, stemmed public virtual int Stem(char[] s, int len) { - Debugging.Assert(() => s.Length >= len + 1, () => "this stemmer requires an oversized array of at least 1"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => s.Length >= len + 1, () => "this stemmer requires an oversized array of at least 1"); len = plural.Apply(s, len); len = adverb.Apply(s, len); diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs index f1b4d6a795..594c88bf68 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs @@ -305,7 +305,7 @@ private static Step ParseStep(TextReader r, string header) { throw new Exception("Illegal Step header specified at line " /*+ r.LineNumber*/); // TODO Line number } - //Debugging.Assert(() => headerPattern.GetGroupNumbers().Length == 4); // Not possible to read the number of groups that matched in .NET + //if (Debugging.AssertsEnabled) Debugging.Assert(() => headerPattern.GetGroupNumbers().Length == 4); // Not possible to read the number of groups that matched in .NET string name = matcher.Groups[1].Value; int min = int.Parse(matcher.Groups[2].Value, CultureInfo.InvariantCulture); int type = int.Parse(matcher.Groups[3].Value, CultureInfo.InvariantCulture); diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilter.cs index 52adde9099..a0c0786d5b 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilter.cs @@ -176,7 +176,7 @@ public virtual void Reset() public virtual CharsRef PullNext() { - Debugging.Assert(() => upto < count); + if (Debugging.AssertsEnabled) Debugging.Assert(() => upto < count); lastEndOffset = endOffsets[upto]; lastPosLength = posLengths[upto]; CharsRef result = outputs[upto++]; @@ -306,7 +306,7 @@ private void Capture() nextWrite = RollIncr(nextWrite); // Buffer head should never catch up to tail: - Debugging.Assert(() => nextWrite != nextRead); + if (Debugging.AssertsEnabled) Debugging.Assert(() => nextWrite != nextRead); } /* @@ -325,7 +325,7 @@ private void Parse() { //System.out.println("\nS: parse"); - Debugging.Assert(() => inputSkipCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => inputSkipCount == 0); int curNextRead = nextRead; @@ -337,7 +337,7 @@ private void Parse() BytesRef pendingOutput = fst.Outputs.NoOutput; fst.GetFirstArc(scratchArc); - Debugging.Assert(() => scratchArc.Output == fst.Outputs.NoOutput); + if (Debugging.AssertsEnabled) Debugging.Assert(() => scratchArc.Output == fst.Outputs.NoOutput); int tokenCount = 0; @@ -364,7 +364,7 @@ private void Parse() else { //System.out.println(" input.incrToken"); - Debugging.Assert(() => futureInputs[nextWrite].consumed); + if (Debugging.AssertsEnabled) Debugging.Assert(() => futureInputs[nextWrite].consumed); // Not correct: a syn match whose output is longer // than its input can set future inputs keepOrig // to true: @@ -480,7 +480,7 @@ private void Parse() } else { - Debugging.Assert(() => finished); + if (Debugging.AssertsEnabled) Debugging.Assert(() => finished); } //System.out.println(" parse done inputSkipCount=" + inputSkipCount + " nextRead=" + nextRead + " nextWrite=" + nextWrite); @@ -510,7 +510,7 @@ private void AddOutput(BytesRef bytes, int matchInputLength, int matchEndOffset) int outputLen = chIDX - lastStart; // Caller is not allowed to have empty string in // the output: - Debugging.Assert(() => outputLen > 0, () => "output contains empty string: " + scratchChars); + if (Debugging.AssertsEnabled) Debugging.Assert(() => outputLen > 0, () => "output contains empty string: " + scratchChars); int endOffset; int posLen; if (chIDX == chEnd && lastStart == scratchChars.Offset) @@ -536,7 +536,7 @@ private void AddOutput(BytesRef bytes, int matchInputLength, int matchEndOffset) lastStart = 1 + chIDX; //System.out.println(" slot=" + outputUpto + " keepOrig=" + keepOrig); outputUpto = RollIncr(outputUpto); - Debugging.Assert(() => futureOutputs[outputUpto].posIncr == 1, () => "outputUpto=" + outputUpto + " vs nextWrite=" + nextWrite); + if (Debugging.AssertsEnabled) Debugging.Assert(() => futureOutputs[outputUpto].posIncr == 1, () => "outputUpto=" + outputUpto + " vs nextWrite=" + nextWrite); } } } @@ -602,7 +602,7 @@ public override bool IncrementToken() { // Pass-through case: return token we just pulled // but didn't capture: - Debugging.Assert(() => inputSkipCount == 1, () => "inputSkipCount=" + inputSkipCount + " nextRead=" + nextRead); + if (Debugging.AssertsEnabled) Debugging.Assert(() => inputSkipCount == 1, () => "inputSkipCount=" + inputSkipCount + " nextRead=" + nextRead); } input.Reset(); if (outputs.count > 0) diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs index 99c743a35a..539233ee90 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs @@ -173,8 +173,11 @@ internal virtual void Add(CharsRef input, int numInputWords, CharsRef output, in throw new ArgumentException("output.length must be > 0 (got " + output.Length + ")"); } - Debugging.Assert(() => !HasHoles(input), () => "input has holes: " + input); - Debugging.Assert(() => !HasHoles(output), () => "output has holes: " + output); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => !HasHoles(input), () => "input has holes: " + input); + Debugging.Assert(() => !HasHoles(output), () => "output has holes: " + output); + } //System.out.println("fmap.add input=" + input + " numInputWords=" + numInputWords + " output=" + output + " numOutputWords=" + numOutputWords); UnicodeUtil.UTF16toUTF8(output.Chars, output.Offset, output.Length, utf8Scratch); @@ -281,7 +284,7 @@ public virtual SynonymMap Build() scratch.Grow(estimatedSize); scratchOutput.Reset(scratch.Bytes, scratch.Offset, scratch.Bytes.Length); - Debugging.Assert(() => scratch.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => scratch.Offset == 0); // now write our output data: int count = 0; diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs index 434d2c420a..a4be3e6b85 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs @@ -632,7 +632,7 @@ public virtual void PutAll(IEnumerable> collection) private void Rehash() { - Debugging.Assert(() => keys.Length == values.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => keys.Length == values.Length); int newSize = 2 * keys.Length; char[][] oldkeys = keys; MapValue[] oldvalues = values; diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharTokenizer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharTokenizer.cs index 9669dd5522..9eda6068b5 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharTokenizer.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharTokenizer.cs @@ -169,7 +169,7 @@ public override sealed bool IncrementToken() { if (length == 0) // start of token { - Debugging.Assert(() => start == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => start == -1); start = offset + bufferIndex - charCount; end = start; } // check if a supplementary could run out of bounds @@ -191,7 +191,7 @@ public override sealed bool IncrementToken() } termAtt.Length = length; - Debugging.Assert(() => start != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => start != -1); offsetAtt.SetOffset(CorrectOffset(start), finalOffset = CorrectOffset(end)); return true; } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharacterUtils.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharacterUtils.cs index 9c2f5054b1..38229512cd 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharacterUtils.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharacterUtils.cs @@ -176,8 +176,11 @@ public static CharacterBuffer NewCharacterBuffer(int bufferSize) /// the number of characters in the buffer to lower case public virtual void ToLower(char[] buffer, int offset, int length) // LUCENENET specific - marked virtual so we can override the default { - Debugging.Assert(() => buffer.Length >= length); - Debugging.Assert(() => offset <= 0 && offset <= buffer.Length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => buffer.Length >= length); + Debugging.Assert(() => offset <= 0 && offset <= buffer.Length); + } // Slight optimization, eliminating a few method calls internally CultureInfo.InvariantCulture.TextInfo @@ -207,8 +210,11 @@ public static CharacterBuffer NewCharacterBuffer(int bufferSize) /// the number of characters in the buffer to lower case public virtual void ToUpper(char[] buffer, int offset, int length) // LUCENENET specific - marked virtual so we can override the default { - Debugging.Assert(() => buffer.Length >= length); - Debugging.Assert(() => offset <= 0 && offset <= buffer.Length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => buffer.Length >= length); + Debugging.Assert(() => offset <= 0 && offset <= buffer.Length); + } // Slight optimization, eliminating a few method calls internally CultureInfo.InvariantCulture.TextInfo @@ -349,7 +355,7 @@ public override int CodePointAt(char[] chars, int offset, int limit) public override bool Fill(CharacterBuffer buffer, TextReader reader, int numChars) { - Debugging.Assert(() => buffer.Buffer.Length >= 2); + if (Debugging.AssertsEnabled) Debugging.Assert(() => buffer.Buffer.Length >= 2); if (numChars < 2 || numChars > buffer.Buffer.Length) { throw new ArgumentException("numChars must be >= 2 and <= the buffer size"); @@ -470,7 +476,7 @@ public override int CodePointAt(char[] chars, int offset, int limit) public override bool Fill(CharacterBuffer buffer, TextReader reader, int numChars) { - Debugging.Assert(() => buffer.Buffer.Length >= 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => buffer.Buffer.Length >= 1); if (numChars < 1 || numChars > buffer.Buffer.Length) { throw new ArgumentException("numChars must be >= 1 and <= the buffer size"); @@ -533,8 +539,11 @@ private class Java4CharacterUtilsBWCompatibility : Java4CharacterUtils { public override void ToLower(char[] buffer, int offset, int limit) { - Debugging.Assert(() => buffer.Length >= limit); - Debugging.Assert(() => offset <= 0 && offset <= buffer.Length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => buffer.Length >= limit); + Debugging.Assert(() => offset <= 0 && offset <= buffer.Length); + } for (int i = offset; i < limit;) { @@ -546,8 +555,11 @@ public override void ToLower(char[] buffer, int offset, int limit) public override void ToUpper(char[] buffer, int offset, int limit) { - Debugging.Assert(() => buffer.Length >= limit); - Debugging.Assert(() => offset <= 0 && offset <= buffer.Length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => buffer.Length >= limit); + Debugging.Assert(() => offset <= 0 && offset <= buffer.Length); + } for (int i = offset; i < limit;) { diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/RollingCharBuffer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/RollingCharBuffer.cs index e4a708698d..49d2d81e2f 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/RollingCharBuffer.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/RollingCharBuffer.cs @@ -108,10 +108,10 @@ public int Get(int pos) else { // Cannot read from future (except by 1): - Debugging.Assert(() => pos < nextPos); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pos < nextPos); // Cannot read from already freed past: - Debugging.Assert(() => nextPos - pos <= count, () => "nextPos=" + nextPos + " pos=" + pos + " count=" + count); + if (Debugging.AssertsEnabled) Debugging.Assert(() => nextPos - pos <= count, () => "nextPos=" + nextPos + " pos=" + pos + " count=" + count); return buffer[GetIndex(pos)]; } @@ -130,15 +130,18 @@ private int GetIndex(int pos) { // Wrap: index += buffer.Length; - Debugging.Assert(() => index >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0); } return index; } public char[] Get(int posStart, int length) { - Debugging.Assert(() => length > 0); - Debugging.Assert(() => InBounds(posStart), () => "posStart=" + posStart + " length=" + length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => length > 0); + Debugging.Assert(() => InBounds(posStart), () => "posStart=" + posStart + " length=" + length); + } //System.out.println(" buffer.Get posStart=" + posStart + " len=" + length); int startIndex = GetIndex(posStart); @@ -166,11 +169,17 @@ public char[] Get(int posStart, int length) /// public void FreeBefore(int pos) { - Debugging.Assert(() => pos >= 0); - Debugging.Assert(() => pos <= nextPos); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => pos >= 0); + Debugging.Assert(() => pos <= nextPos); + } int newCount = nextPos - pos; - Debugging.Assert(() => newCount <= count, () => "newCount=" + newCount + " count=" + count); - Debugging.Assert(() => newCount <= buffer.Length, () => "newCount=" + newCount + " buf.length=" + buffer.Length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => newCount <= count, () => "newCount=" + newCount + " count=" + count); + Debugging.Assert(() => newCount <= buffer.Length, () => "newCount=" + newCount + " buf.length=" + buffer.Length); + } count = newCount; } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/SegmentingTokenizerBase.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/SegmentingTokenizerBase.cs index 23cbfae930..39c6594569 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/SegmentingTokenizerBase.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/SegmentingTokenizerBase.cs @@ -184,7 +184,7 @@ private void Refill() /// commons-io's readFully, but without bugs if offset != 0 private static int Read(TextReader input, char[] buffer, int offset, int length) { - Debugging.Assert(() => length >= 0, () => "length must not be negative: " + length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => length >= 0, () => "length must not be negative: " + length); int remaining = length; while (remaining > 0) diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/StemmerUtil.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/StemmerUtil.cs index 9c8d61cddc..d4eb3e7612 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/StemmerUtil.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/StemmerUtil.cs @@ -117,7 +117,7 @@ public static bool EndsWith(char[] s, int len, char[] suffix) /// length of input buffer after deletion public static int Delete(char[] s, int pos, int len) { - Debugging.Assert(() => pos < len); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pos < len); if (pos < len - 1) // don't arraycopy if asked to delete last character { Array.Copy(s, pos + 1, s, pos, len - pos - 1); @@ -135,7 +135,7 @@ public static int Delete(char[] s, int pos, int len) /// length of input buffer after deletion public static int DeleteN(char[] s, int pos, int len, int nChars) { - Debugging.Assert(() => pos + nChars <= len); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pos + nChars <= len); if (pos + nChars < len) // don't arraycopy if asked to delete the last characters { Array.Copy(s, pos + nChars, s, pos, len - pos - nChars); diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilter.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilter.cs index 981e073186..fcd4bb03ff 100644 --- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilter.cs +++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilter.cs @@ -128,7 +128,7 @@ private void ReadInputToBuffer() bool hasRemainingChars = CharacterUtils.GetInstance(LuceneVersion.LUCENE_CURRENT).Fill(tmpBuffer, m_input); #pragma warning restore 612, 618 - Debugging.Assert(() => tmpBuffer.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => tmpBuffer.Offset == 0); inputBuffer.Append(tmpBuffer.Buffer, 0, tmpBuffer.Length); if (hasRemainingChars == false) diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizer.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizer.cs index 213fa2f1ce..9eb4d514db 100644 --- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizer.cs +++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizer.cs @@ -194,7 +194,7 @@ private void Refill() /// commons-io's readFully, but without bugs if offset != 0 private static int Read(TextReader input, char[] buffer, int offset, int length) { - Debugging.Assert(() => length >= 0, () => "length must not be negative: " + length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => length >= 0, () => "length must not be negative: " + length); int remaining = length; while (remaining > 0) diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs index c4870896d1..8629acd1f8 100644 --- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs @@ -102,7 +102,7 @@ public ICUTokenizerFactory(IDictionary args) public virtual void Inform(IResourceLoader loader) { - Debugging.Assert(() => tailored != null, () => "init must be called first!"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => tailored != null, () => "init must be called first!"); if (tailored.Count == 0) { config = new DefaultICUTokenizerConfig(cjkAsWords, myanmarAsWords); @@ -162,7 +162,7 @@ private BreakIterator ParseRules(string filename, IResourceLoader loader) public override Tokenizer Create(AttributeSource.AttributeFactory factory, TextReader input) { - Debugging.Assert(() => config != null, () => "inform must be called first!"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => config != null, () => "inform must be called first!"); return new ICUTokenizer(factory, input, config); } } diff --git a/src/Lucene.Net.Analysis.Kuromoji/Dict/TokenInfoFST.cs b/src/Lucene.Net.Analysis.Kuromoji/Dict/TokenInfoFST.cs index 04ca42272e..354c377f02 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Dict/TokenInfoFST.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Dict/TokenInfoFST.cs @@ -75,7 +75,7 @@ public TokenInfoFST(FST fst, bool fasterButMoreRam) { if (useCache && ch >= 0x3040 && ch <= cacheCeiling) { - Debugging.Assert(() => ch != FST.END_LABEL); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ch != FST.END_LABEL); FST.Arc result = rootCache[ch - 0x3040]; if (result == null) { diff --git a/src/Lucene.Net.Analysis.Kuromoji/GraphvizFormatter.cs b/src/Lucene.Net.Analysis.Kuromoji/GraphvizFormatter.cs index aca253ce4a..617f748c5a 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/GraphvizFormatter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/GraphvizFormatter.cs @@ -89,8 +89,11 @@ private void SetBestPathMap(WrappedPositionArray positions, int startPos, Positi string toNodeID = GetNodeID(pos, bestIDX); string fromNodeID = GetNodeID(backPos, backIDX); - Debugging.Assert(() => !bestPathMap.ContainsKey(fromNodeID)); - Debugging.Assert(() => !bestPathMap.Values.Contains(toNodeID)); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => !bestPathMap.ContainsKey(fromNodeID)); + Debugging.Assert(() => !bestPathMap.Values.Contains(toNodeID)); + } bestPathMap[fromNodeID] = toNodeID; pos = backPos; bestIDX = backIDX; diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilter.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilter.cs index 931d70ecce..fb899c70f2 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilter.cs @@ -133,7 +133,7 @@ static JapaneseIterationMarkCharFilter() // Make katakana dakuten map from hiragana map char codePointDifference = (char)('\u30ab' - '\u304b'); // カ - か - Debugging.Assert(() => h2d.Length == k2d.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => h2d.Length == k2d.Length); for (int i = 0; i < k2d.Length; i++) { k2d[i] = (char)(h2d[i] + codePointDifference); diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs index 2d3c99321e..8210652f07 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs @@ -314,7 +314,7 @@ private void Add(IDictionary dict, Position fromPosData, int endPos, int wordID, int leftID = dict.GetLeftId(wordID); int leastCost = int.MaxValue; int leastIDX = -1; - Debugging.Assert(() => fromPosData.count > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fromPosData.count > 0); for (int idx = 0; idx < fromPosData.count; idx++) { // Cost is path cost so far, plus word cost (added at @@ -356,7 +356,7 @@ private void Add(IDictionary dict, Position fromPosData, int endPos, int wordID, } //positions.get(endPos).add(leastCost, dict.getRightId(wordID), fromPosData.pos, leastIDX, wordID, type); - Debugging.Assert(() => leftID == dict.GetRightId(wordID)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => leftID == dict.GetRightId(wordID)); positions.Get(endPos).Add(leastCost, leftID, fromPosData.pos, leastIDX, wordID, type); } @@ -387,7 +387,7 @@ public override bool IncrementToken() int position = token.Position; int length = token.Length; ClearAttributes(); - Debugging.Assert(() => length > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => length > 0); //System.out.println("off=" + token.getOffset() + " len=" + length + " vs " + token.getSurfaceForm().length); termAtt.CopyBuffer(token.SurfaceForm, token.Offset, length); offsetAtt.SetOffset(CorrectOffset(position), CorrectOffset(position + length)); @@ -402,7 +402,7 @@ public override bool IncrementToken() } else { - Debugging.Assert(() => token.Position > lastTokenPos); + if (Debugging.AssertsEnabled) Debugging.Assert(() => token.Position > lastTokenPos); posIncAtt.PositionIncrement = 1; posLengthAtt.PositionLength = 1; } @@ -511,7 +511,7 @@ private void Parse() } // We will always have at least one live path: - Debugging.Assert(() => leastIDX != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => leastIDX != -1); // Second pass: prune all but the best path: for (int pos2 = pos; pos2 < positions.GetNextPos(); pos2++) @@ -544,7 +544,7 @@ private void Parse() if (pos != leastPosData.pos) { // We jumped into a future position: - Debugging.Assert(() => pos < leastPosData.pos); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pos < leastPosData.pos); pos = leastPosData.pos; } @@ -913,10 +913,10 @@ private void Backtrace(Position endPosData, int fromIDX) { //System.out.println("BT: back pos=" + pos + " bestIDX=" + bestIDX); Position posData = positions.Get(pos); - Debugging.Assert(() => bestIDX < posData.count); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bestIDX < posData.count); int backPos = posData.backPos[bestIDX]; - Debugging.Assert(() => backPos >= lastBackTracePos, () => "backPos=" + backPos + " vs lastBackTracePos=" + lastBackTracePos); + if (Debugging.AssertsEnabled) Debugging.Assert(() => backPos >= lastBackTracePos, () => "backPos=" + backPos + " vs lastBackTracePos=" + lastBackTracePos); int length = pos - backPos; JapaneseTokenizerType backType = posData.backType[bestIDX]; int backID = posData.backID[bestIDX]; @@ -989,7 +989,7 @@ private void Backtrace(Position endPosData, int fromIDX) if (leastIDX != -1 && leastCost <= maxCost && posData.backPos[leastIDX] != backPos) { // We should have pruned the altToken from the graph: - Debugging.Assert(() => posData.backPos[leastIDX] != backPos); + if (Debugging.AssertsEnabled) Debugging.Assert(() => posData.backPos[leastIDX] != backPos); // Save the current compound token, to output when // this alternate path joins back: @@ -1024,7 +1024,7 @@ private void Backtrace(Position endPosData, int fromIDX) } int offset = backPos - lastBackTracePos; - Debugging.Assert(() => offset >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => offset >= 0); if (altToken != null && altToken.Position >= backPos) { @@ -1035,7 +1035,7 @@ private void Backtrace(Position endPosData, int fromIDX) // The pruning we did when we created the altToken // ensures that the back trace will align back with // the start of the altToken: - Debugging.Assert(() => altToken.Position == backPos, () => altToken.Position + " vs " + backPos); + if (Debugging.AssertsEnabled) Debugging.Assert(() => altToken.Position == backPos, () => altToken.Position + " vs " + backPos); // NOTE: not quite right: the compound token may // have had all punctuation back traced so far, but @@ -1060,7 +1060,7 @@ private void Backtrace(Position endPosData, int fromIDX) { Console.WriteLine(" discard all-punctuation altToken=" + altToken); } - Debugging.Assert(() => discardPunctuation); + if (Debugging.AssertsEnabled) Debugging.Assert(() => discardPunctuation); } altToken = null; } @@ -1355,7 +1355,7 @@ public void Reset() { count = 0; // forwardCount naturally resets after it runs: - Debugging.Assert(() => forwardCount == 0, () => "pos=" + pos + " forwardCount=" + forwardCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => forwardCount == 0, () => "pos=" + pos + " forwardCount=" + forwardCount); } } @@ -1432,13 +1432,13 @@ public Position Get(int pos) nextWrite = 0; } // Should have already been reset: - Debugging.Assert(() => positions[nextWrite].count == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => positions[nextWrite].count == 0); positions[nextWrite++].pos = nextPos++; count++; } - Debugging.Assert(() => InBounds(pos)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => InBounds(pos)); int index = GetIndex(pos); - Debugging.Assert(() => positions[index].pos == pos); + if (Debugging.AssertsEnabled) Debugging.Assert(() => positions[index].pos == pos); return positions[index]; } @@ -1466,8 +1466,11 @@ private int GetIndex(int pos) public void FreeBefore(int pos) { int toFree = count - (nextPos - pos); - Debugging.Assert(() => toFree >= 0); - Debugging.Assert(() => toFree <= count); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => toFree >= 0); + Debugging.Assert(() => toFree <= count); + } int index = nextWrite - count; if (index < 0) { diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs index ecb270d6c8..7df6295001 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs @@ -61,7 +61,7 @@ public virtual int Put(string[] entry) for (int i = 4; i < 8; i++) { string part = entry[i]; - Debugging.Assert(() => part.Length > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => part.Length > 0); if (!"*".Equals(part, StringComparison.Ordinal)) { if (sb.Length > 0) @@ -118,8 +118,11 @@ public virtual int Put(string[] entry) flags |= BinaryDictionary.HAS_PRONUNCIATION; } - Debugging.Assert(() => leftId == rightId); - Debugging.Assert(() => leftId < 4096); // there are still unused bits + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => leftId == rightId); + Debugging.Assert(() => leftId < 4096); // there are still unused bits + } // add pos mapping int toFill = 1 + leftId - posDict.Count; for (int i = 0; i < toFill; i++) @@ -128,7 +131,7 @@ public virtual int Put(string[] entry) } string existing = posDict[leftId]; - Debugging.Assert(() => existing == null || existing.Equals(fullPOSData, StringComparison.Ordinal)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => existing == null || existing.Equals(fullPOSData, StringComparison.Ordinal)); posDict[leftId] = fullPOSData; m_buffer.PutInt16((short)(leftId << 3 | flags)); @@ -136,7 +139,7 @@ public virtual int Put(string[] entry) if ((flags & BinaryDictionary.HAS_BASEFORM) != 0) { - Debugging.Assert(() => baseForm.Length < 16); + if (Debugging.AssertsEnabled) Debugging.Assert(() => baseForm.Length < 16); int shared = SharedPrefix(entry[0], baseForm); int suffix = baseForm.Length - shared; m_buffer.Put((byte)(shared << 4 | suffix)); @@ -237,11 +240,11 @@ public static int SharedPrefix(string left, string right) public virtual void AddMapping(int sourceId, int wordId) { - Debugging.Assert(() => wordId > lastWordId, () => "words out of order: " + wordId + " vs lastID: " + lastWordId); + if (Debugging.AssertsEnabled) Debugging.Assert(() => wordId > lastWordId, () => "words out of order: " + wordId + " vs lastID: " + lastWordId); if (sourceId > lastSourceId) { - Debugging.Assert(() => sourceId > lastSourceId, () => "source ids out of order: lastSourceId=" + lastSourceId + " vs sourceId=" + sourceId); + if (Debugging.AssertsEnabled) Debugging.Assert(() => sourceId > lastSourceId, () => "source ids out of order: lastSourceId=" + lastSourceId + " vs sourceId=" + sourceId); targetMapOffsets = ArrayUtil.Grow(targetMapOffsets, sourceId + 1); for (int i = lastSourceId + 1; i <= sourceId; i++) { @@ -250,7 +253,7 @@ public virtual void AddMapping(int sourceId, int wordId) } else { - Debugging.Assert(() => sourceId == lastSourceId); + if (Debugging.AssertsEnabled) Debugging.Assert(() => sourceId == lastSourceId); } targetMap = ArrayUtil.Grow(targetMap, targetMapEndOffset + 1); @@ -305,7 +308,7 @@ protected virtual void WriteTargetMap(string filename) for (int ofs = 0; ofs < targetMapEndOffset; ofs++) { int val = targetMap[ofs], delta = val - prev; - Debugging.Assert(() => delta >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => delta >= 0); if (ofs == targetMapOffsets[sourceId]) { @out.WriteVInt32((delta << 1) | 0x01); @@ -317,7 +320,7 @@ protected virtual void WriteTargetMap(string filename) } prev += delta; } - Debugging.Assert(() => sourceId == numSourceIds, () => "sourceId:" + sourceId + " != numSourceIds:" + numSourceIds); + if (Debugging.AssertsEnabled) Debugging.Assert(() => sourceId == numSourceIds, () => "sourceId:" + sourceId + " != numSourceIds:" + numSourceIds); } } @@ -341,7 +344,7 @@ protected virtual void WritePosDict(string filename) else { string[] data = CSVUtil.Parse(s); - Debugging.Assert(() => data.Length == 3, () => "malformed pos/inflection: " + s); + if (Debugging.AssertsEnabled) Debugging.Assert(() => data.Length == 3, () => "malformed pos/inflection: " + s); @out.WriteString(data[0]); @out.WriteString(data[1]); @out.WriteString(data[2]); @@ -370,7 +373,7 @@ protected virtual void WriteDictionary(string filename) @out.WriteByte(m_buffer.Get()); } - Debugging.Assert(() => m_buffer.Remaining == 0L); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m_buffer.Remaining == 0L); } } } diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs index a653cdc134..3e66621dbb 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs @@ -41,12 +41,12 @@ public static ConnectionCostsWriter Build(string filename) string line = streamReader.ReadLine(); string[] dimensions = whiteSpaceRegex.Split(line).TrimEnd(); - Debugging.Assert(() => dimensions.Length == 2); + if (Debugging.AssertsEnabled) Debugging.Assert(() => dimensions.Length == 2); int forwardSize = int.Parse(dimensions[0], CultureInfo.InvariantCulture); int backwardSize = int.Parse(dimensions[1], CultureInfo.InvariantCulture); - Debugging.Assert(() => forwardSize > 0 && backwardSize > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => forwardSize > 0 && backwardSize > 0); ConnectionCostsWriter costs = new ConnectionCostsWriter(forwardSize, backwardSize); @@ -54,7 +54,7 @@ public static ConnectionCostsWriter Build(string filename) { string[] fields = whiteSpaceRegex.Split(line).TrimEnd(); - Debugging.Assert(() => fields.Length == 3); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fields.Length == 3); int forwardId = int.Parse(fields[0], CultureInfo.InvariantCulture); int backwardId = int.Parse(fields[1], CultureInfo.InvariantCulture); diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs index 9eea786bc0..e49253e2cd 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs @@ -63,10 +63,10 @@ public void Write(string baseDir) @out.WriteVInt32(forwardSize); @out.WriteVInt32(backwardSize); int last = 0; - Debugging.Assert(() => costs.Length == backwardSize); + if (Debugging.AssertsEnabled) Debugging.Assert(() => costs.Length == backwardSize); foreach (short[] a in costs) { - Debugging.Assert(() => a.Length == forwardSize); + if (Debugging.AssertsEnabled) Debugging.Assert(() => a.Length == forwardSize); for (int i = 0; i < a.Length; i++) { int delta = (int)a[i] - last; diff --git a/src/Lucene.Net.Analysis.Phonetic/BeiderMorseFilter.cs b/src/Lucene.Net.Analysis.Phonetic/BeiderMorseFilter.cs index 4a17a012ea..641a400582 100644 --- a/src/Lucene.Net.Analysis.Phonetic/BeiderMorseFilter.cs +++ b/src/Lucene.Net.Analysis.Phonetic/BeiderMorseFilter.cs @@ -85,7 +85,7 @@ public override bool IncrementToken() if (matcher.Success) { - Debugging.Assert(() => state != null && encoded != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != null && encoded != null); RestoreState(state); int start = matcher.Index; diff --git a/src/Lucene.Net.Benchmark/ByTask/Utils/AnalyzerFactory.cs b/src/Lucene.Net.Benchmark/ByTask/Utils/AnalyzerFactory.cs index d962a24b08..d7ea2535fb 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Utils/AnalyzerFactory.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Utils/AnalyzerFactory.cs @@ -42,7 +42,7 @@ public AnalyzerFactory(IList charFilterFactories, IList tokenFilterFactories) { this.charFilterFactories = charFilterFactories; - Debugging.Assert(() => null != tokenizerFactory); + if (Debugging.AssertsEnabled) Debugging.Assert(() => null != tokenizerFactory); this.tokenizerFactory = tokenizerFactory; this.tokenFilterFactories = tokenFilterFactories; } diff --git a/src/Lucene.Net.Benchmark/Quality/QualityStats.cs b/src/Lucene.Net.Benchmark/Quality/QualityStats.cs index 5a357c8953..633599fc22 100644 --- a/src/Lucene.Net.Benchmark/Quality/QualityStats.cs +++ b/src/Lucene.Net.Benchmark/Quality/QualityStats.cs @@ -242,7 +242,7 @@ public static QualityStats Average(QualityStats[] stats) } } } - Debugging.Assert(() => m > 0, () => "Fishy: no \"good\" queries!"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m > 0, () => "Fishy: no \"good\" queries!"); // take average: times go by all queries, other measures go by "good" queries only. avg.searchTime /= stats.Length; avg.docNamesExtractTime /= stats.Length; diff --git a/src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs b/src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs index 2f8ba27a26..4f51d9ae98 100644 --- a/src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs +++ b/src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs @@ -73,7 +73,7 @@ public TrecJudge(TextReader reader) st.MoveNext(); bool relevant = !zero.Equals(st.Current, StringComparison.Ordinal); // LUCENENET: don't call st.NextToken() unless the condition fails. - Debugging.Assert(() => st.RemainingTokens == 0, () => "wrong format: " + line + " next: " + (st.MoveNext() ? st.Current : "")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => st.RemainingTokens == 0, () => "wrong format: " + line + " next: " + (st.MoveNext() ? st.Current : "")); if (relevant) { // only keep relevant docs if (curr == null || !curr.queryID.Equals(queryID, StringComparison.Ordinal)) diff --git a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs index e33c4a9ddc..d9c8ccfeb9 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs @@ -127,7 +127,7 @@ public BlockTermsReader(TermsIndexReaderBase indexReader, Directory dir, FieldIn { int field = input.ReadVInt32(); long numTerms = input.ReadVInt64(); - Debugging.Assert(() => numTerms >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numTerms >= 0); long termsStartPointer = input.ReadVInt64(); FieldInfo fieldInfo = fieldInfos.FieldInfo(field); long sumTotalTermFreq = fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY ? -1 : input.ReadVInt64(); @@ -234,7 +234,7 @@ public override IEnumerator GetEnumerator() public override Terms GetTerms(string field) { - Debugging.Assert(() => field != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => field != null); FieldReader result; fields.TryGetValue(field, out result); @@ -258,7 +258,7 @@ private class FieldReader : Terms public FieldReader(BlockTermsReader outerInstance, FieldInfo fieldInfo, long numTerms, long termsStartPointer, long sumTotalTermFreq, long sumDocFreq, int docCount, int longsSize) { - Debugging.Assert(() => numTerms > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numTerms > 0); this.outerInstance = outerInstance; @@ -449,7 +449,7 @@ public override SeekStatus SeekCeil(BytesRef target) // Block must exist since, at least, the indexed term // is in the block: - Debugging.Assert(() => result); + if (Debugging.AssertsEnabled) Debugging.Assert(() => result); indexIsCurrent = true; didIndexNext = false; @@ -537,7 +537,7 @@ public override SeekStatus SeekCeil(BytesRef target) // Target's prefix is before the common prefix // of this block, so we position to start of // block and return NOT_FOUND: - Debugging.Assert(() => state.TermBlockOrd == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state.TermBlockOrd == 0); int suffix = termSuffixesReader.ReadVInt32(); term.Length = termBlockPrefix + suffix; @@ -642,7 +642,7 @@ public override SeekStatus SeekCeil(BytesRef target) // cross another index term (besides the first // one) while we are scanning: - Debugging.Assert(() => indexIsCurrent); + if (Debugging.AssertsEnabled) Debugging.Assert(() => indexIsCurrent); if (!NextBlock()) { @@ -665,7 +665,7 @@ public override BytesRef Next() // works properly: if (seekPending) { - Debugging.Assert(() => !indexIsCurrent); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !indexIsCurrent); input.Seek(state.BlockFilePointer); int pendingSeekCount = state.TermBlockOrd; bool result = NextBlock(); @@ -675,12 +675,12 @@ public override BytesRef Next() // Block must exist since seek(TermState) was called w/ a // TermState previously returned by this enum when positioned // on a real term: - Debugging.Assert(() => result); + if (Debugging.AssertsEnabled) Debugging.Assert(() => result); while (state.TermBlockOrd < pendingSeekCount) { BytesRef nextResult = _next(); - Debugging.Assert(() => nextResult != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => nextResult != null); } seekPending = false; state.Ord = savOrd; @@ -769,8 +769,11 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPos public override void SeekExact(BytesRef target, TermState otherState) { //System.out.println("BTR.seekExact termState target=" + target.utf8ToString() + " " + target + " this=" + this); - Debugging.Assert(() => otherState != null && otherState is BlockTermState); - Debugging.Assert(() => !doOrd || ((BlockTermState)otherState).Ord < outerInstance.numTerms); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => otherState != null && otherState is BlockTermState); + Debugging.Assert(() => !doOrd || ((BlockTermState)otherState).Ord < outerInstance.numTerms); + } state.CopyFrom(otherState); seekPending = true; indexIsCurrent = false; @@ -794,7 +797,7 @@ public override void SeekExact(long ord) throw new InvalidOperationException("terms index was not loaded"); } - Debugging.Assert(() => ord < outerInstance.numTerms); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ord < outerInstance.numTerms); // TODO: if ord is in same terms block and // after current ord, we should avoid this seek just @@ -803,7 +806,7 @@ public override void SeekExact(long ord) bool result = NextBlock(); // Block must exist since ord < numTerms: - Debugging.Assert(() => result); + if (Debugging.AssertsEnabled) Debugging.Assert(() => result); indexIsCurrent = true; didIndexNext = false; @@ -811,7 +814,7 @@ public override void SeekExact(long ord) seekPending = false; state.Ord = indexEnum.Ord - 1; - Debugging.Assert(() => state.Ord >= -1, () => "Ord=" + state.Ord); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state.Ord >= -1, () => "Ord=" + state.Ord); term.CopyBytes(indexEnum.Term); // Now, scan: @@ -819,9 +822,9 @@ public override void SeekExact(long ord) while (left > 0) { BytesRef term = _next(); - Debugging.Assert(() => term != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => term != null); left--; - Debugging.Assert(() => indexIsCurrent); + if (Debugging.AssertsEnabled) Debugging.Assert(() => indexIsCurrent); } } diff --git a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs index 63a82a984b..18eb752372 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs @@ -70,7 +70,7 @@ private class FieldMetaData public FieldMetaData(FieldInfo fieldInfo, long numTerms, long termsStartPointer, long sumTotalTermFreq, long sumDocFreq, int docCount, int int64sSize) { - Debugging.Assert(() => numTerms > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numTerms > 0); FieldInfo = fieldInfo; TermsStartPointer = termsStartPointer; @@ -123,7 +123,7 @@ private void WriteHeader(IndexOutput output) public override TermsConsumer AddField(FieldInfo field) { //System.out.println("\nBTW.addField seg=" + segment + " field=" + field.name); - Debugging.Assert(() => currentField == null || currentField.Name.CompareToOrdinal(field.Name) < 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentField == null || currentField.Name.CompareToOrdinal(field.Name) < 0); currentField = field; TermsIndexWriterBase.FieldWriter fieldIndexWriter = termsIndexWriter.AddField(field, m_output.GetFilePointer()); return new TermsWriter(this, fieldIndexWriter, field, postingsWriter); @@ -235,7 +235,7 @@ public override PostingsConsumer StartTerm(BytesRef text) public override void FinishTerm(BytesRef text, TermStats stats) { - Debugging.Assert(() => stats.DocFreq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => stats.DocFreq > 0); //System.out.println("BTW: finishTerm term=" + fieldInfo.name + ":" + text.utf8ToString() + " " + text + " seg=" + segment + " df=" + stats.docFreq); bool isIndexTerm = fieldIndexWriter.CheckIndexTerm(text, stats); @@ -302,8 +302,11 @@ public override void Finish(long sumTotalTermFreq, long sumDocFreq, int docCount private int SharedPrefix(BytesRef term1, BytesRef term2) { - Debugging.Assert(() => term1.Offset == 0); - Debugging.Assert(() => term2.Offset == 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => term1.Offset == 0); + Debugging.Assert(() => term2.Offset == 0); + } int pos1 = 0; int pos1End = pos1 + Math.Min(term1.Length, term2.Length); int pos2 = 0; @@ -359,7 +362,7 @@ private void FlushBlock() for (int termCount = 0; termCount < pendingCount; termCount++) { BlockTermState state = pendingTerms[termCount].State; - Debugging.Assert(() => state != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != null); bytesWriter.WriteVInt32(state.DocFreq); if (fieldInfo.IndexOptions != IndexOptions.DOCS_ONLY) { diff --git a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs index bf5ba28a1f..ca21abb680 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs @@ -70,7 +70,7 @@ public FixedGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, string seg { this.termComp = termComp; - Debugging.Assert(() => indexDivisor == -1 || indexDivisor > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => indexDivisor == -1 || indexDivisor > 0); input = dir.OpenInput(IndexFileNames.SegmentFileName(segment, segmentSuffix, FixedGapTermsIndexWriter.TERMS_INDEX_EXTENSION), context); @@ -101,7 +101,7 @@ public FixedGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, string seg // In case terms index gets loaded, later, on demand totalIndexInterval = indexInterval * indexDivisor; } - Debugging.Assert(() => totalIndexInterval > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => totalIndexInterval > 0); SeekDir(input, dirOffset); @@ -190,7 +190,7 @@ public override long Seek(BytesRef target) { int lo = 0; // binary search int hi = fieldIndex.numIndexTerms - 1; - Debugging.Assert(() => outerInstance.totalIndexInterval > 0, () => "totalIndexInterval=" + outerInstance.totalIndexInterval); + if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.totalIndexInterval > 0, () => "totalIndexInterval=" + outerInstance.totalIndexInterval); while (hi >= lo) { @@ -211,7 +211,7 @@ public override long Seek(BytesRef target) } else { - Debugging.Assert(() => mid >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => mid >= 0); ord = mid * outerInstance.totalIndexInterval; return fieldIndex.termsStart + fieldIndex.termsDictOffsets.Get(mid); } @@ -219,7 +219,7 @@ public override long Seek(BytesRef target) if (hi < 0) { - Debugging.Assert(() => hi == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => hi == -1); hi = 0; } @@ -252,7 +252,7 @@ public override long Seek(long ord) { int idx = (int)(ord / outerInstance.totalIndexInterval); // caller must ensure ord is in bounds - Debugging.Assert(() => idx < fieldIndex.numIndexTerms); + if (Debugging.AssertsEnabled) Debugging.Assert(() => idx < fieldIndex.numIndexTerms); long offset = fieldIndex.termOffsets.Get(idx); int length = (int)(fieldIndex.termOffsets.Get(1 + idx) - offset); outerInstance.termBytesReader.FillSlice(term, fieldIndex.termBytesStart + offset, length); @@ -328,11 +328,11 @@ public CoreFieldIndex(FieldIndexData outerInstance, long indexStart, long termsS // -1 is passed to mean "don't load term index", but // if we are then later loaded it's overwritten with // a real value - Debugging.Assert(() => outerInstance.outerInstance.indexDivisor > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.outerInstance.indexDivisor > 0); this.numIndexTerms = 1 + (numIndexTerms - 1) / outerInstance.outerInstance.indexDivisor; - Debugging.Assert(() => this.numIndexTerms > 0, () => "numIndexTerms=" + numIndexTerms + " indexDivisor=" + outerInstance.outerInstance.indexDivisor); + if (Debugging.AssertsEnabled) Debugging.Assert(() => this.numIndexTerms > 0, () => "numIndexTerms=" + numIndexTerms + " indexDivisor=" + outerInstance.outerInstance.indexDivisor); if (outerInstance.outerInstance.indexDivisor == 1) { @@ -345,11 +345,11 @@ public CoreFieldIndex(FieldIndexData outerInstance, long indexStart, long termsS // records offsets into main terms dict file termsDictOffsets = PackedInt32s.GetReader(clone); - Debugging.Assert(() => termsDictOffsets.Count == numIndexTerms); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termsDictOffsets.Count == numIndexTerms); // records offsets into byte[] term data termOffsets = PackedInt32s.GetReader(clone); - Debugging.Assert(() => termOffsets.Count == 1 + numIndexTerms); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termOffsets.Count == 1 + numIndexTerms); } finally { @@ -398,8 +398,11 @@ public CoreFieldIndex(FieldIndexData outerInstance, long indexStart, long termsS int numTermBytes = (int)(nextTermOffset - termOffset); clone.Seek(indexStart + termOffset); - Debugging.Assert(() => indexStart + termOffset < clone.Length, () => "indexStart=" + indexStart + " termOffset=" + termOffset + " len=" + clone.Length); - Debugging.Assert(() => indexStart + termOffset + numTermBytes < clone.Length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => indexStart + termOffset < clone.Length, () => "indexStart=" + indexStart + " termOffset=" + termOffset + " len=" + clone.Length); + Debugging.Assert(() => indexStart + termOffset + numTermBytes < clone.Length); + } outerInstance.outerInstance.termBytes.Copy(clone, numTermBytes); termOffsetUpto += numTermBytes; diff --git a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs index d0f8f5d3a0..ac25b8d583 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs @@ -187,7 +187,7 @@ public override void Add(BytesRef text, TermStats stats, long termsFilePointer) lastTermsPointer = termsFilePointer; // save term length (in bytes) - Debugging.Assert(() => indexedTermLength <= short.MaxValue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => indexedTermLength <= short.MaxValue); termLengths[numIndexTerms] = (short)indexedTermLength; totTermLength += indexedTermLength; diff --git a/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexReader.cs b/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexReader.cs index 5fcfd0ee24..d6caa49d21 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexReader.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexReader.cs @@ -55,7 +55,7 @@ public VariableGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, string input = dir.OpenInput(IndexFileNames.SegmentFileName(segment, segmentSuffix, VariableGapTermsIndexWriter.TERMS_INDEX_EXTENSION), new IOContext(context, true)); //this.segment = segment; // LUCENENET: Not used bool success = false; - Debugging.Assert(() => indexDivisor == -1 || indexDivisor > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => indexDivisor == -1 || indexDivisor > 0); try { diff --git a/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexWriter.cs b/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexWriter.cs index 58122454c7..db5c9a539e 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexWriter.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexWriter.cs @@ -292,7 +292,7 @@ public override void Add(BytesRef text, TermStats stats, long termsFilePointer) if (text.Length == 0) { // We already added empty string in ctor - Debugging.Assert(() => termsFilePointer == startTermsFilePointer); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termsFilePointer == startTermsFilePointer); return; } int lengthSave = text.Length; diff --git a/src/Lucene.Net.Codecs/Bloom/BloomFilteringPostingsFormat.cs b/src/Lucene.Net.Codecs/Bloom/BloomFilteringPostingsFormat.cs index c6b18d313c..b2aaf1d23c 100644 --- a/src/Lucene.Net.Codecs/Bloom/BloomFilteringPostingsFormat.cs +++ b/src/Lucene.Net.Codecs/Bloom/BloomFilteringPostingsFormat.cs @@ -380,7 +380,7 @@ public override TermsConsumer AddField(FieldInfo field) var bloomFilter = outerInstance._bloomFilterFactory.GetSetForField(_state, field); if (bloomFilter != null) { - Debugging.Assert(() => (_bloomFilters.ContainsKey(field) == false)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (_bloomFilters.ContainsKey(field) == false)); _bloomFilters.Add(field, bloomFilter); return new WrappedTermsConsumer(_delegateFieldsConsumer.AddField(field), bloomFilter); diff --git a/src/Lucene.Net.Codecs/Bloom/FuzzySet.cs b/src/Lucene.Net.Codecs/Bloom/FuzzySet.cs index 5b7e8046c6..76c007a625 100644 --- a/src/Lucene.Net.Codecs/Bloom/FuzzySet.cs +++ b/src/Lucene.Net.Codecs/Bloom/FuzzySet.cs @@ -228,7 +228,7 @@ public static FuzzySet Deserialize(DataInput input) private ContainsResult MayContainValue(int positiveHash) { - Debugging.Assert(() => (positiveHash >= 0)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (positiveHash >= 0)); // Bloom sizes are always base 2 and so can be ANDed for a fast modulo var pos = positiveHash & _bloomSize; diff --git a/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexInput.cs b/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexInput.cs index 400de2e714..f0ce63ccd0 100644 --- a/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexInput.cs +++ b/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexInput.cs @@ -109,7 +109,7 @@ public Reader(IndexInput input, int[] pending, IBlockReader blockReader) internal virtual void Seek(long fp, int upto) { - Debugging.Assert(() => upto < blockSize); + if (Debugging.AssertsEnabled) Debugging.Assert(() => upto < blockSize); if (seekPending || fp != lastBlockFP) { pendingFP = fp; @@ -173,7 +173,7 @@ public override void Read(DataInput indexIn, bool absolute) fp += indexIn.ReadVInt64(); } } - Debugging.Assert(() => upto < outerInstance.m_blockSize); + if (Debugging.AssertsEnabled) Debugging.Assert(() => upto < outerInstance.m_blockSize); } public override void Seek(Int32IndexInput.Reader other) diff --git a/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexOutput.cs b/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexOutput.cs index feb98e0ec4..869b93a199 100644 --- a/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexOutput.cs +++ b/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexOutput.cs @@ -101,7 +101,7 @@ public override void Write(DataOutput indexOut, bool absolute) else if (fp == lastFP) { // same block - Debugging.Assert(() => upto >= lastUpto); + if (Debugging.AssertsEnabled) Debugging.Assert(() => upto >= lastUpto); int uptoDelta = upto - lastUpto; indexOut.WriteVInt32(uptoDelta << 1 | 1); } diff --git a/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexInput.cs b/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexInput.cs index 89c956df2a..3c90d8c25f 100644 --- a/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexInput.cs +++ b/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexInput.cs @@ -118,7 +118,7 @@ internal virtual void Seek(long fp, int upto) // TODO: should we do this in real-time, not lazy? pendingFP = fp; pendingUpto = upto; - Debugging.Assert(() => pendingUpto >= 0, () => "pendingUpto=" + pendingUpto); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pendingUpto >= 0, () => "pendingUpto=" + pendingUpto); seekPending = true; } diff --git a/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexOutput.cs b/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexOutput.cs index 1ef5b83eeb..db3af9dc58 100644 --- a/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexOutput.cs +++ b/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexOutput.cs @@ -107,7 +107,7 @@ public override void CopyFrom(Int32IndexOutput.Index other, bool copyLast) public override void Write(DataOutput indexOut, bool absolute) { - Debugging.Assert(() => upto >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => upto >= 0); if (absolute) { indexOut.WriteVInt32(upto); @@ -116,7 +116,7 @@ public override void Write(DataOutput indexOut, bool absolute) else if (fp == lastFP) { // same block - Debugging.Assert(() => upto >= lastUpto); + if (Debugging.AssertsEnabled) Debugging.Assert(() => upto >= lastUpto); int uptoDelta = upto - lastUpto; indexOut.WriteVInt32(uptoDelta << 1 | 1); } @@ -136,7 +136,7 @@ public override void Write(int v) hitExcDuringWrite = true; upto -= Add(v) - 1; hitExcDuringWrite = false; - Debugging.Assert(() => upto >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => upto >= 0); } protected override void Dispose(bool disposing) @@ -152,7 +152,7 @@ protected override void Dispose(bool disposing) while (upto > stuffed) { upto -= Add(0) - 1; - Debugging.Assert(() => upto >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => upto >= 0); stuffed += 1; } } diff --git a/src/Lucene.Net.Codecs/Memory/DirectDocValuesConsumer.cs b/src/Lucene.Net.Codecs/Memory/DirectDocValuesConsumer.cs index 51e8238c23..2d92f3f991 100644 --- a/src/Lucene.Net.Codecs/Memory/DirectDocValuesConsumer.cs +++ b/src/Lucene.Net.Codecs/Memory/DirectDocValuesConsumer.cs @@ -372,7 +372,7 @@ public bool MoveNext() } else { - Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false); return false; } } diff --git a/src/Lucene.Net.Codecs/Memory/DirectDocValuesProducer.cs b/src/Lucene.Net.Codecs/Memory/DirectDocValuesProducer.cs index 4cd0dda152..330237a53d 100644 --- a/src/Lucene.Net.Codecs/Memory/DirectDocValuesProducer.cs +++ b/src/Lucene.Net.Codecs/Memory/DirectDocValuesProducer.cs @@ -552,7 +552,7 @@ private IBits GetMissingBits(int fieldNumber, long offset, long length) { var data = (IndexInput)this.data.Clone(); data.Seek(offset); - Debugging.Assert(() => length % 8 == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => length % 8 == 0); var bits = new long[(int)length >> 3]; for (var i = 0; i < bits.Length; i++) { diff --git a/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs b/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs index 16a0e4d9f7..63c9a80e2c 100644 --- a/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs +++ b/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs @@ -589,7 +589,7 @@ public DirectField(SegmentReadState state, string field, Terms termsIn, int minS upto++; } - Debugging.Assert(() => upto == docFreq); + if (Debugging.AssertsEnabled) Debugging.Assert(() => upto == docFreq); ent = new HighFreqTerm(docs, freqs, positions, payloads, totalTermFreq); } @@ -625,7 +625,7 @@ public DirectField(SegmentReadState state, string field, Terms termsIn, int minS } } this.skipOffsets[numTerms] = skipOffset; - Debugging.Assert(() => skipOffset == skipCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => skipOffset == skipCount); } /// Returns approximate RAM bytes used. @@ -738,7 +738,7 @@ private void SetSkips(int termOrd, byte[] termBytes) private void FinishSkips() { - Debugging.Assert(() => count == terms.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => count == terms.Length); int lastTermOffset = termOffsets[count - 1]; int lastTermLength = termOffsets[count] - lastTermOffset; @@ -972,7 +972,7 @@ public override void SeekExact(BytesRef term, TermState state) { termOrd = (int) ((OrdTermState) state).Ord; SetTerm(); - Debugging.Assert(() => term.Equals(scratch)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => term.Equals(scratch)); } public override BytesRef Term => scratch; @@ -1207,13 +1207,16 @@ public DirectIntersectTermsEnum(DirectPostingsFormat.DirectField outerInstance, while (label > states[i].transitionMax) { states[i].transitionUpto++; - Debugging.Assert(() => states[i].transitionUpto < states[i].transitions.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => states[i].transitionUpto < states[i].transitions.Length); states[i].transitionMin = states[i].transitions[states[i].transitionUpto].Min; states[i].transitionMax = states[i].transitions[states[i].transitionUpto].Max; - Debugging.Assert(() => states[i].transitionMin >= 0); - Debugging.Assert(() => states[i].transitionMin <= 255); - Debugging.Assert(() => states[i].transitionMax >= 0); - Debugging.Assert(() => states[i].transitionMax <= 255); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => states[i].transitionMin >= 0); + Debugging.Assert(() => states[i].transitionMin <= 255); + Debugging.Assert(() => states[i].transitionMax >= 0); + Debugging.Assert(() => states[i].transitionMax <= 255); + } } // Skip forwards until we find a term matching @@ -1254,7 +1257,7 @@ public DirectIntersectTermsEnum(DirectPostingsFormat.DirectField outerInstance, // System.out.println(" no match; already beyond; return termOrd=" + termOrd); // } stateUpto -= skipUpto; - Debugging.Assert(() => stateUpto >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => stateUpto >= 0); return; } else if (label == (outerInstance.termBytes[termOffset_i + i] & 0xFF)) @@ -1269,7 +1272,7 @@ public DirectIntersectTermsEnum(DirectPostingsFormat.DirectField outerInstance, int nextState = runAutomaton.Step(states[stateUpto].state, label); // Automaton is required to accept startTerm: - Debugging.Assert(() => nextState != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => nextState != -1); stateUpto++; states[stateUpto].changeOrd = outerInstance.skips[skipOffset + skipUpto++]; @@ -1300,12 +1303,12 @@ public DirectIntersectTermsEnum(DirectPostingsFormat.DirectField outerInstance, while (termOrd < outerInstance.terms.Length && outerInstance.Compare(termOrd, startTerm) <= 0) { - Debugging.Assert(() => termOrd == startTermOrd || + if (Debugging.AssertsEnabled) Debugging.Assert(() => termOrd == startTermOrd || outerInstance.skipOffsets[termOrd] == outerInstance.skipOffsets[termOrd + 1]); termOrd++; } - Debugging.Assert(() => termOrd - startTermOrd < outerInstance.minSkipCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termOrd - startTermOrd < outerInstance.minSkipCount); termOrd--; stateUpto -= skipUpto; // if (DEBUG) { @@ -1386,7 +1389,7 @@ public override BytesRef Next() if (termOrd == 0 && outerInstance.termOffsets[1] == 0) { // Special-case empty string: - Debugging.Assert(() => stateUpto == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => stateUpto == 0); // if (DEBUG) { // System.out.println(" visit empty string"); // } @@ -1435,9 +1438,9 @@ public override BytesRef Next() // System.out.println(" term=" + new BytesRef(termBytes, termOffset, termLength).utf8ToString() + " skips=" + Arrays.toString(skips)); // } - Debugging.Assert(() => termOrd < state.changeOrd); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termOrd < state.changeOrd); - Debugging.Assert(() => stateUpto <= termLength, () => "term.length=" + termLength + "; stateUpto=" + stateUpto); + if (Debugging.AssertsEnabled) Debugging.Assert(() => stateUpto <= termLength, () => "term.length=" + termLength + "; stateUpto=" + stateUpto); int label = outerInstance.termBytes[termOffset + stateUpto] & 0xFF; while (label > state.transitionMax) @@ -1456,7 +1459,7 @@ public override BytesRef Next() } else { - Debugging.Assert(() => state.changeOrd > termOrd); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state.changeOrd > termOrd); // if (DEBUG) { // System.out.println(" jumpend " + (state.changeOrd - termOrd)); // } @@ -1467,14 +1470,17 @@ public override BytesRef Next() } goto nextTermContinue; } - Debugging.Assert(() => state.transitionUpto < state.transitions.Length, + if (Debugging.AssertsEnabled) Debugging.Assert(() => state.transitionUpto < state.transitions.Length, () => " state.transitionUpto=" + state.transitionUpto + " vs " + state.transitions.Length); state.transitionMin = state.transitions[state.transitionUpto].Min; state.transitionMax = state.transitions[state.transitionUpto].Max; - Debugging.Assert(() => state.transitionMin >= 0); - Debugging.Assert(() => state.transitionMin <= 255); - Debugging.Assert(() => state.transitionMax >= 0); - Debugging.Assert(() => state.transitionMax <= 255); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => state.transitionMin >= 0); + Debugging.Assert(() => state.transitionMin <= 255); + Debugging.Assert(() => state.transitionMax >= 0); + Debugging.Assert(() => state.transitionMax <= 255); + } } int targetLabel = state.transitionMin; @@ -1603,7 +1609,7 @@ public override BytesRef Next() if (compiledAutomaton.CommonSuffixRef != null) { //System.out.println("suffix " + compiledAutomaton.commonSuffixRef.utf8ToString()); - Debugging.Assert(() => compiledAutomaton.CommonSuffixRef.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => compiledAutomaton.CommonSuffixRef.Offset == 0); if (termLength < compiledAutomaton.CommonSuffixRef.Length) { termOrd++; @@ -2002,7 +2008,7 @@ public override int NextDoc() if (upto < postings.Length) { freq = postings[upto + 1]; - Debugging.Assert(() => freq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => freq > 0); return postings[upto]; } } @@ -2011,7 +2017,7 @@ public override int NextDoc() while (upto < postings.Length) { freq = postings[upto + 1]; - Debugging.Assert(() => freq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => freq > 0); if (liveDocs.Get(postings[upto])) { return postings[upto]; @@ -2186,7 +2192,7 @@ public override int NextDoc() public override int NextPosition() { - Debugging.Assert(() => skipPositions > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => skipPositions > 0); skipPositions--; int pos = postings[upto++]; diff --git a/src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs b/src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs index 55d733e6cd..6d79270d49 100644 --- a/src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs +++ b/src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs @@ -166,7 +166,7 @@ public override IEnumerator GetEnumerator() public override Terms GetTerms(string field) { - Debugging.Assert(() => field != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => field != null); TermsReader result; fields.TryGetValue(field, out result); return result; @@ -218,7 +218,7 @@ internal TermsReader(FSTOrdTermsReader outerInstance, FieldInfo fieldInfo, Index this.longsSize = longsSize; this.index = index; - Debugging.Assert(() => (numTerms & (~0xffffffffL)) == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (numTerms & (~0xffffffffL)) == 0); int numBlocks = (int)(numTerms + INTERVAL - 1) / INTERVAL; this.numSkipInfo = longsSize + 3; this.skipInfo = new long[numBlocks * numSkipInfo]; @@ -500,7 +500,7 @@ public override BytesRef Next() { seekPending = false; var status = SeekCeil(term); - Debugging.Assert(() => status == SeekStatus.FOUND); // must positioned on valid term + if (Debugging.AssertsEnabled) Debugging.Assert(() => status == SeekStatus.FOUND); // must positioned on valid term } UpdateEnum(fstEnum.Next()); return term; @@ -631,7 +631,7 @@ internal override void DecodeMetaData() internal override void DecodeStats() { var arc = TopFrame().arc; - Debugging.Assert(() => arc.NextFinalOutput == fstOutputs.NoOutput); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.NextFinalOutput == fstOutputs.NoOutput); ord = arc.Output.Value; base.DecodeStats(); } @@ -699,7 +699,7 @@ private BytesRef DoSeekCeil(BytesRef target) { break; } - Debugging.Assert(() => IsValid(frame)); // target must be fetched from automaton + if (Debugging.AssertsEnabled) Debugging.Assert(() => IsValid(frame)); // target must be fetched from automaton PushFrame(frame); upto++; } @@ -837,7 +837,7 @@ private void PushFrame(Frame frame) arc.Output = fstOutputs.Add(TopFrame().arc.Output, arc.Output); term = Grow(arc.Label); level++; - Debugging.Assert(() => frame == stack[level]); + if (Debugging.AssertsEnabled) Debugging.Assert(() => frame == stack[level]); } private Frame PopFrame() diff --git a/src/Lucene.Net.Codecs/Memory/FSTTermOutputs.cs b/src/Lucene.Net.Codecs/Memory/FSTTermOutputs.cs index 93c61d11f4..16d2d0c265 100644 --- a/src/Lucene.Net.Codecs/Memory/FSTTermOutputs.cs +++ b/src/Lucene.Net.Codecs/Memory/FSTTermOutputs.cs @@ -128,7 +128,7 @@ public override TermData Common(TermData t1, TermData t2) if (Equals(t1, NO_OUTPUT) || Equals(t2, NO_OUTPUT)) return NO_OUTPUT; - Debugging.Assert(() => t1.longs.Length == t2.longs.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => t1.longs.Length == t2.longs.Length); long[] min = t1.longs, max = t2.longs; int pos = 0; @@ -183,7 +183,7 @@ public override TermData Subtract(TermData t1, TermData t2) if (Equals(t2, NO_OUTPUT)) return t1; - Debugging.Assert(() => t1.longs.Length == t2.longs.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => t1.longs.Length == t2.longs.Length); int pos = 0; long diff = 0; @@ -220,7 +220,7 @@ public override TermData Add(TermData t1, TermData t2) if (Equals(t2, NO_OUTPUT)) return t1; - Debugging.Assert(() => t1.longs.Length == t2.longs.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => t1.longs.Length == t2.longs.Length); var pos = 0; var accum = new long[_longsSize]; diff --git a/src/Lucene.Net.Codecs/Memory/FSTTermsReader.cs b/src/Lucene.Net.Codecs/Memory/FSTTermsReader.cs index 4eb4a3220a..e12eb5adcb 100644 --- a/src/Lucene.Net.Codecs/Memory/FSTTermsReader.cs +++ b/src/Lucene.Net.Codecs/Memory/FSTTermsReader.cs @@ -166,7 +166,7 @@ public override IEnumerator GetEnumerator() public override Terms GetTerms(string field) { - Debugging.Assert(() => field != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => field != null); TermsReader result; fields.TryGetValue(field, out result); return result; @@ -369,7 +369,7 @@ public override BytesRef Next() { seekPending = false; SeekStatus status = SeekCeil(term); - Debugging.Assert(() => status == SeekStatus.FOUND); // must positioned on valid term + if (Debugging.AssertsEnabled) Debugging.Assert(() => status == SeekStatus.FOUND); // must positioned on valid term } UpdateEnum(fstEnum.Next()); return term; @@ -500,7 +500,7 @@ internal IntersectTermsEnum(FSTTermsReader.TermsReader outerInstance, CompiledAu internal override void DecodeMetaData() { - Debugging.Assert(() => term != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => term != null); if (!decoded) { if (meta.bytes != null) @@ -611,7 +611,7 @@ private BytesRef DoSeekCeil(BytesRef target) { break; } - Debugging.Assert(() => IsValid(frame)); // target must be fetched from automaton + if (Debugging.AssertsEnabled) Debugging.Assert(() => IsValid(frame)); // target must be fetched from automaton PushFrame(frame); upto++; } diff --git a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs index b7c595cc40..2176e99f92 100644 --- a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs +++ b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs @@ -144,7 +144,7 @@ internal virtual void AddNumericField(FieldInfo field, IEnumerable values ++count; } - Debugging.Assert(() => count == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => count == maxDoc); } if (missing) diff --git a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesProducer.cs b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesProducer.cs index de1b5fadeb..802ca19189 100644 --- a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesProducer.cs +++ b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesProducer.cs @@ -675,7 +675,7 @@ private IBits GetMissingBits(int fieldNumber, long offset, long length) { var data = (IndexInput)this.data.Clone(); data.Seek(offset); - Debugging.Assert(() => length % 8 == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => length % 8 == 0); var bits = new long[(int) length >> 3]; for (var i = 0; i < bits.Length; i++) { diff --git a/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs b/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs index 8df0e00536..7fa27bfcdb 100644 --- a/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs +++ b/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs @@ -146,7 +146,7 @@ public PostingsWriter(MemoryPostingsFormat.TermsWriter outerInstance) public override void StartDoc(int docID, int termDocFreq) { int delta = docID - lastDocID; - Debugging.Assert(() => docID == 0 || delta > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docID == 0 || delta > 0); lastDocID = docID; docCount++; @@ -161,7 +161,7 @@ public override void StartDoc(int docID, int termDocFreq) else { buffer.WriteVInt32(delta << 1); - Debugging.Assert(() => termDocFreq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termDocFreq > 0); buffer.WriteVInt32(termDocFreq); } @@ -171,12 +171,12 @@ public override void StartDoc(int docID, int termDocFreq) public override void AddPosition(int pos, BytesRef payload, int startOffset, int endOffset) { - Debugging.Assert(() => payload == null || outerInstance.field.HasPayloads); + if (Debugging.AssertsEnabled) Debugging.Assert(() => payload == null || outerInstance.field.HasPayloads); //System.out.println(" addPos pos=" + pos + " payload=" + payload); int delta = pos - lastPos; - Debugging.Assert(() => delta >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => delta >= 0); lastPos = pos; int payloadLen = 0; @@ -231,7 +231,7 @@ public override void FinishDoc() public virtual PostingsWriter Reset() { - Debugging.Assert(() => buffer.GetFilePointer() == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => buffer.GetFilePointer() == 0); lastDocID = 0; docCount = 0; lastPayloadLen = 0; @@ -255,9 +255,9 @@ public override PostingsConsumer StartTerm(BytesRef text) public override void FinishTerm(BytesRef text, TermStats stats) { - Debugging.Assert(() => postingsWriter.docCount == stats.DocFreq); + if (Debugging.AssertsEnabled) Debugging.Assert(() => postingsWriter.docCount == stats.DocFreq); - Debugging.Assert(() => buffer2.GetFilePointer() == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => buffer2.GetFilePointer() == 0); buffer2.WriteVInt32(stats.DocFreq); if (field.IndexOptions != IndexOptions.DOCS_ONLY) @@ -402,7 +402,7 @@ public bool CanReuse(IndexOptions indexOptions, bool storePayloads) public FSTDocsEnum Reset(BytesRef bufferIn, IBits liveDocs, int numDocs) { - Debugging.Assert(() => numDocs > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocs > 0); if (buffer.Length < bufferIn.Length) { buffer = ArrayUtil.Grow(buffer, bufferIn.Length); @@ -446,7 +446,7 @@ public override int NextDoc() else { freq = @in.ReadVInt32(); - Debugging.Assert(() => freq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => freq > 0); } if (indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) @@ -555,7 +555,7 @@ public bool CanReuse(bool storePayloads, bool storeOffsets) public FSTDocsAndPositionsEnum Reset(BytesRef bufferIn, IBits liveDocs, int numDocs) { - Debugging.Assert(() => numDocs > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocs > 0); // System.out.println("D&P reset bytes this=" + this); // for(int i=bufferIn.offset;i freq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => freq > 0); } if (liveDocs == null || liveDocs.Get(accum)) @@ -655,7 +655,7 @@ public override int NextDoc() public override int NextPosition() { //System.out.println(" nextPos storePayloads=" + storePayloads + " this=" + this); - Debugging.Assert(() => posPending > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => posPending > 0); posPending--; if (!storePayloads) { diff --git a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsFormat.cs b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsFormat.cs index 17ed5fb1a9..8f61c25a83 100644 --- a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsFormat.cs +++ b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsFormat.cs @@ -47,7 +47,7 @@ public PulsingPostingsFormat(PostingsBaseFormat wrappedPostingsBaseFormat, int f int minBlockSize, int maxBlockSize) : base() { - Debugging.Assert(() => minBlockSize > 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => minBlockSize > 1); _freqCutoff = freqCutoff; _minBlockSize = minBlockSize; diff --git a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs index d43a4f7e48..b7568b2725 100644 --- a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs +++ b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs @@ -122,7 +122,7 @@ public override object Clone() } else { - Debugging.Assert(() => WrappedTermState != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => WrappedTermState != null); clone.WrappedTermState = (BlockTermState)WrappedTermState.Clone(); clone.Absolute = Absolute; @@ -172,7 +172,7 @@ public override void DecodeTerm(long[] empty, DataInput input, FieldInfo fieldIn { var termState2 = (PulsingTermState) termState; - Debugging.Assert(() => empty.Length == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => empty.Length == 0); termState2.Absolute = termState2.Absolute || absolute; // if we have positions, its total TF, otherwise its computed based on docFreq. @@ -340,7 +340,7 @@ public PulsingDocsEnum(FieldInfo fieldInfo) public virtual PulsingDocsEnum Reset(IBits liveDocs, PulsingTermState termState) { - Debugging.Assert(() => termState.PostingsSize != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termState.PostingsSize != -1); // Must make a copy of termState's byte[] so that if // app does TermsEnum.next(), this DocsEnum is not affected @@ -482,7 +482,7 @@ internal bool CanReuse(FieldInfo fieldInfo) public virtual PulsingDocsAndPositionsEnum Reset(IBits liveDocs, PulsingTermState termState) { - Debugging.Assert(() => termState.PostingsSize != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termState.PostingsSize != -1); if (_postingsBytes == null) { @@ -542,7 +542,7 @@ public override int Advance(int target) public override int NextPosition() { - Debugging.Assert(() => _posPending > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => _posPending > 0); _posPending--; diff --git a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsWriter.cs b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsWriter.cs index 7179fd5c7d..f1b0ec5770 100644 --- a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsWriter.cs +++ b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsWriter.cs @@ -150,7 +150,7 @@ public override BlockTermState NewTermState() public override void StartTerm() { - Debugging.Assert(() => _pendingCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => _pendingCount == 0); } // TODO: -- should we NOT reuse across fields? would @@ -175,7 +175,7 @@ public override int SetField(FieldInfo fieldInfo) public override void StartDoc(int docId, int termDocFreq) { - Debugging.Assert(() => docId >= 0, () => "Got DocID=" + docId); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docId >= 0, () => "Got DocID=" + docId); if (_pendingCount == _pending.Length) { @@ -185,7 +185,7 @@ public override void StartDoc(int docId, int termDocFreq) if (_pendingCount != -1) { - Debugging.Assert(() => _pendingCount < _pending.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => _pendingCount < _pending.Length); _currentDoc = _pending[_pendingCount]; _currentDoc.docID = docId; if (_indexOptions == IndexOptions.DOCS_ONLY) @@ -267,7 +267,7 @@ public override void FinishTerm(BlockTermState state) { var state2 = (PulsingTermState)state; - Debugging.Assert(() => _pendingCount > 0 || _pendingCount == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => _pendingCount > 0 || _pendingCount == -1); if (_pendingCount == -1) { @@ -318,7 +318,7 @@ public override void FinishTerm(BlockTermState state) for (var posIDX = 0; posIDX < doc.termFreq; posIDX++) { var pos = _pending[pendingIDX++]; - Debugging.Assert(() => pos.docID == doc.docID); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pos.docID == doc.docID); var posDelta = pos.pos - lastPos; lastPos = pos.pos; @@ -361,7 +361,7 @@ public override void FinishTerm(BlockTermState state) if (payloadLength > 0) { - Debugging.Assert(() => _storePayloads); + if (Debugging.AssertsEnabled) Debugging.Assert(() => _storePayloads); _buffer.WriteBytes(pos.payload.Bytes, 0, pos.payload.Length); } } @@ -375,7 +375,7 @@ public override void FinishTerm(BlockTermState state) Position doc = _pending[posIdx]; int delta = doc.docID - lastDocId; - Debugging.Assert(() => doc.termFreq != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => doc.termFreq != 0); if (doc.termFreq == 1) { @@ -411,7 +411,7 @@ public override void EncodeTerm(long[] empty, DataOutput output, FieldInfo field bool abs) { var _state = (PulsingTermState)state; - Debugging.Assert(() => empty.Length == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => empty.Length == 0); _absolute = _absolute || abs; if (_state.bytes == null) { @@ -469,7 +469,7 @@ protected override void Dispose(bool disposing) /// private void Push() { - Debugging.Assert(() => _pendingCount == _pending.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => _pendingCount == _pending.Length); _wrappedPostingsWriter.StartTerm(); @@ -487,7 +487,7 @@ private void Push() } else if (doc.docID != pos.docID) { - Debugging.Assert(() => pos.docID > doc.docID); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pos.docID > doc.docID); _wrappedPostingsWriter.FinishDoc(); doc = pos; _wrappedPostingsWriter.StartDoc(doc.docID, doc.termFreq); diff --git a/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs b/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs index 5943d169ea..dad545a78a 100644 --- a/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs +++ b/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs @@ -263,7 +263,7 @@ public override DocsEnum Docs(FieldInfo fieldInfo, BlockTermState termState, IBi public override DocsAndPositionsEnum DocsAndPositions(FieldInfo fieldInfo, BlockTermState termState, IBits liveDocs, DocsAndPositionsEnum reuse, DocsAndPositionsFlags flags) { - Debugging.Assert(() => fieldInfo.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldInfo.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); SepTermState termState_ = (SepTermState)termState; SepDocsAndPositionsEnum postingsEnum; if (reuse == null || !(reuse is SepDocsAndPositionsEnum)) @@ -692,7 +692,7 @@ public override int NextPosition() { // Payload length has changed payloadLength = posReader.Next(); - Debugging.Assert(() => payloadLength >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => payloadLength >= 0); } pendingPosCount--; position = 0; @@ -707,7 +707,7 @@ public override int NextPosition() { // Payload length has changed payloadLength = posReader.Next(); - Debugging.Assert(() => payloadLength >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => payloadLength >= 0); } position += (int)(((uint)code) >> 1); pendingPayloadBytes += payloadLength; @@ -719,7 +719,7 @@ public override int NextPosition() } pendingPosCount--; - Debugging.Assert(() => pendingPosCount >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pendingPosCount >= 0); return position; } @@ -741,7 +741,7 @@ public override BytesRef GetPayload() return payload; } - Debugging.Assert(() => pendingPayloadBytes >= payloadLength); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pendingPayloadBytes >= payloadLength); if (pendingPayloadBytes > payloadLength) { diff --git a/src/Lucene.Net.Codecs/Sep/SepPostingsWriter.cs b/src/Lucene.Net.Codecs/Sep/SepPostingsWriter.cs index 5f75ab4067..e460fde487 100644 --- a/src/Lucene.Net.Codecs/Sep/SepPostingsWriter.cs +++ b/src/Lucene.Net.Codecs/Sep/SepPostingsWriter.cs @@ -262,10 +262,10 @@ public override void StartDoc(int docID, int termDocFreq) /// Add a new position & payload. public override void AddPosition(int position, BytesRef payload, int startOffset, int endOffset) { - Debugging.Assert(() => indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); + if (Debugging.AssertsEnabled) Debugging.Assert(() => indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); int delta = position - lastPosition; - Debugging.Assert(() => delta >= 0, () => "position=" + position + " lastPosition=" + lastPosition); // not quite right (if pos=0 is repeated twice we don't catch it) + if (Debugging.AssertsEnabled) Debugging.Assert(() => delta >= 0, () => "position=" + position + " lastPosition=" + lastPosition); // not quite right (if pos=0 is repeated twice we don't catch it) lastPosition = position; if (storePayloads) @@ -317,8 +317,11 @@ public override void FinishTerm(BlockTermState state) { SepTermState state_ = (SepTermState)state; // TODO: -- wasteful we are counting this in two places? - Debugging.Assert(() => state_.DocFreq > 0); - Debugging.Assert(() => state_.DocFreq == df); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => state_.DocFreq > 0); + Debugging.Assert(() => state_.DocFreq == df); + } state_.DocIndex = docOut.GetIndex(); state_.DocIndex.CopyFrom(docIndex, false); diff --git a/src/Lucene.Net.Codecs/Sep/SepSkipListReader.cs b/src/Lucene.Net.Codecs/Sep/SepSkipListReader.cs index 54528deae6..b7c50ef01a 100644 --- a/src/Lucene.Net.Codecs/Sep/SepSkipListReader.cs +++ b/src/Lucene.Net.Codecs/Sep/SepSkipListReader.cs @@ -195,7 +195,7 @@ protected override void SetLastSkipData(int level) protected override int ReadSkipData(int level, IndexInput skipStream) { int delta; - Debugging.Assert(() => indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !currentFieldStoresPayloads); + if (Debugging.AssertsEnabled) Debugging.Assert(() => indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !currentFieldStoresPayloads); if (currentFieldStoresPayloads) { // the current field stores payloads. diff --git a/src/Lucene.Net.Codecs/Sep/SepSkipListWriter.cs b/src/Lucene.Net.Codecs/Sep/SepSkipListWriter.cs index 3cd61c1673..2ef7094542 100644 --- a/src/Lucene.Net.Codecs/Sep/SepSkipListWriter.cs +++ b/src/Lucene.Net.Codecs/Sep/SepSkipListWriter.cs @@ -178,7 +178,7 @@ protected override void WriteSkipData(int level, IndexOutput skipBuffer) // current payload length equals the length at the previous // skip point - Debugging.Assert(() => indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !curStorePayloads); + if (Debugging.AssertsEnabled) Debugging.Assert(() => indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !curStorePayloads); if (curStorePayloads) { diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs index 151afe8876..2123bd430c 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs @@ -76,25 +76,25 @@ internal SimpleTextDocValuesReader(SegmentReadState state, string ext) { break; } - Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.FIELD), () => scratch.Utf8ToString()); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.FIELD), () => scratch.Utf8ToString()); var fieldName = StripPrefix(SimpleTextDocValuesWriter.FIELD); var field = new OneField(); fields[fieldName] = field; ReadLine(); - Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.TYPE), () => scratch.Utf8ToString()); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.TYPE), () => scratch.Utf8ToString()); var dvType = (DocValuesType)Enum.Parse(typeof(DocValuesType), StripPrefix(SimpleTextDocValuesWriter.TYPE)); - // Debugging.Assert(() => dvType != null); // LUCENENET: Not possible for an enum to be null in .NET + // if (Debugging.AssertsEnabled) Debugging.Assert(() => dvType != null); // LUCENENET: Not possible for an enum to be null in .NET if (dvType == DocValuesType.NUMERIC) { ReadLine(); - Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.MINVALUE), + if (Debugging.AssertsEnabled) Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.MINVALUE), () => "got " + scratch.Utf8ToString() + " field=" + fieldName + " ext=" + ext); field.MinValue = Convert.ToInt64(StripPrefix(SimpleTextDocValuesWriter.MINVALUE), CultureInfo.InvariantCulture); ReadLine(); - Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.PATTERN)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.PATTERN)); field.Pattern = StripPrefix(SimpleTextDocValuesWriter.PATTERN); field.DataStartFilePointer = data.GetFilePointer(); data.Seek(data.GetFilePointer() + (1 + field.Pattern.Length + 2)*maxDoc); @@ -102,10 +102,10 @@ internal SimpleTextDocValuesReader(SegmentReadState state, string ext) else if (dvType == DocValuesType.BINARY) { ReadLine(); - Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.MAXLENGTH)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.MAXLENGTH)); field.MaxLength = Convert.ToInt32(StripPrefix(SimpleTextDocValuesWriter.MAXLENGTH), CultureInfo.InvariantCulture); ReadLine(); - Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.PATTERN)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.PATTERN)); field.Pattern = StripPrefix(SimpleTextDocValuesWriter.PATTERN); field.DataStartFilePointer = data.GetFilePointer(); data.Seek(data.GetFilePointer() + (9 + field.Pattern.Length + field.MaxLength + 2)*maxDoc); @@ -113,16 +113,16 @@ internal SimpleTextDocValuesReader(SegmentReadState state, string ext) else if (dvType == DocValuesType.SORTED || dvType == DocValuesType.SORTED_SET) { ReadLine(); - Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.NUMVALUES)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.NUMVALUES)); field.NumValues = Convert.ToInt64(StripPrefix(SimpleTextDocValuesWriter.NUMVALUES), CultureInfo.InvariantCulture); ReadLine(); - Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.MAXLENGTH)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.MAXLENGTH)); field.MaxLength = Convert.ToInt32(StripPrefix(SimpleTextDocValuesWriter.MAXLENGTH), CultureInfo.InvariantCulture); ReadLine(); - Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.PATTERN)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.PATTERN)); field.Pattern = StripPrefix(SimpleTextDocValuesWriter.PATTERN); ReadLine(); - Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.ORDPATTERN)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.ORDPATTERN)); field.OrdPattern = StripPrefix(SimpleTextDocValuesWriter.ORDPATTERN); field.DataStartFilePointer = data.GetFilePointer(); data.Seek(data.GetFilePointer() + (9 + field.Pattern.Length + field.MaxLength)*field.NumValues + @@ -136,16 +136,19 @@ internal SimpleTextDocValuesReader(SegmentReadState state, string ext) // We should only be called from above if at least one // field has DVs: - Debugging.Assert(() => fields.Count > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fields.Count > 0); } public override NumericDocValues GetNumeric(FieldInfo fieldInfo) { var field = fields[fieldInfo.Name]; - Debugging.Assert(() => field != null); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => field != null); - // SegmentCoreReaders already verifies this field is valid: - Debugging.Assert(() => field != null, () => "field=" + fieldInfo.Name + " fields=" + fields); + // SegmentCoreReaders already verifies this field is valid: + Debugging.Assert(() => field != null, () => "field=" + fieldInfo.Name + " fields=" + fields); + } var @in = (IndexInput)data.Clone(); var scratch = new BytesRef(); @@ -241,7 +244,7 @@ public bool Get(int index) public override BinaryDocValues GetBinary(FieldInfo fieldInfo) { var field = fields[fieldInfo.Name]; - Debugging.Assert(() => field != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => field != null); var input = (IndexInput)data.Clone(); var scratch = new BytesRef(); @@ -275,7 +278,7 @@ public override void Get(int docId, BytesRef result) _input.Seek(_field.DataStartFilePointer + (9 + _field.Pattern.Length + _field.MaxLength + 2) * docId); SimpleTextUtil.ReadLine(_input, _scratch); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH)); int len; try { @@ -332,7 +335,7 @@ public bool Get(int index) { _input.Seek(_field.DataStartFilePointer + (9 + _field.Pattern.Length + _field.MaxLength + 2) * index); SimpleTextUtil.ReadLine(_input, _scratch); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH)); int len; try { @@ -365,7 +368,7 @@ public override SortedDocValues GetSorted(FieldInfo fieldInfo) var field = fields[fieldInfo.Name]; // SegmentCoreReaders already verifies this field is valid: - Debugging.Assert(() => field != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => field != null); var input = (IndexInput)data.Clone(); var scratch = new BytesRef(); @@ -434,7 +437,7 @@ public override void LookupOrd(int ord, BytesRef result) } _input.Seek(_field.DataStartFilePointer + ord * (9 + _field.Pattern.Length + _field.MaxLength)); SimpleTextUtil.ReadLine(_input, _scratch); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH), + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH), () => "got " + _scratch.Utf8ToString() + " in=" + _input); int len; try @@ -470,7 +473,7 @@ public override SortedSetDocValues GetSortedSet(FieldInfo fieldInfo) // SegmentCoreReaders already verifies this field is // valid: - Debugging.Assert(() => field != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => field != null); var input = (IndexInput) data.Clone(); var scratch = new BytesRef(); @@ -539,7 +542,7 @@ public override void LookupOrd(long ord, BytesRef result) _input.Seek(_field.DataStartFilePointer + ord * (9 + _field.Pattern.Length + _field.MaxLength)); SimpleTextUtil.ReadLine(_input, _scratch); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH), + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH), () => "got " + _scratch.Utf8ToString() + " in=" + _input); int len; try diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs index 856baa2384..e7bc52218a 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs @@ -69,16 +69,19 @@ internal SimpleTextDocValuesWriter(SegmentWriteState state, string ext) /// private bool FieldSeen(string field) { - Debugging.Assert(() => !_fieldsSeen.Contains(field), () => "field \"" + field + "\" was added more than once during flush"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !_fieldsSeen.Contains(field), () => "field \"" + field + "\" was added more than once during flush"); _fieldsSeen.Add(field); return true; } public override void AddNumericField(FieldInfo field, IEnumerable values) { - Debugging.Assert(() => FieldSeen(field.Name)); - Debugging.Assert(() => field.DocValuesType == DocValuesType.NUMERIC || - field.NormType == DocValuesType.NUMERIC); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => FieldSeen(field.Name)); + Debugging.Assert(() => field.DocValuesType == DocValuesType.NUMERIC || + field.NormType == DocValuesType.NUMERIC); + } WriteFieldEntry(field, DocValuesType.NUMERIC); // first pass to find min/max @@ -118,26 +121,29 @@ public override void AddNumericField(FieldInfo field, IEnumerable values) { long value = n.GetValueOrDefault(); - Debugging.Assert(() => value >= minValue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => value >= minValue); var delta = (decimal)value - (decimal)minValue; // LUCENENET specific - use decimal rather than BigInteger string s = delta.ToString(patternString, CultureInfo.InvariantCulture); - Debugging.Assert(() => s.Length == patternString.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => s.Length == patternString.Length); SimpleTextUtil.Write(data, s, scratch); SimpleTextUtil.WriteNewline(data); SimpleTextUtil.Write(data, n == null ? "F" : "T", scratch); SimpleTextUtil.WriteNewline(data); numDocsWritten++; - Debugging.Assert(() => numDocsWritten <= numDocs); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocsWritten <= numDocs); } - Debugging.Assert(() => numDocs == numDocsWritten, () => "numDocs=" + numDocs + " numDocsWritten=" + numDocsWritten); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocs == numDocsWritten, () => "numDocs=" + numDocs + " numDocsWritten=" + numDocsWritten); } public override void AddBinaryField(FieldInfo field, IEnumerable values) { - Debugging.Assert(() => FieldSeen(field.Name)); - Debugging.Assert(() => field.DocValuesType == DocValuesType.BINARY); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => FieldSeen(field.Name)); + Debugging.Assert(() => field.DocValuesType == DocValuesType.BINARY); + } var maxLength = 0; foreach (var value in values) @@ -192,13 +198,16 @@ public override void AddBinaryField(FieldInfo field, IEnumerable value numDocsWritten++; } - Debugging.Assert(() => numDocs == numDocsWritten); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocs == numDocsWritten); } public override void AddSortedField(FieldInfo field, IEnumerable values, IEnumerable docToOrd) { - Debugging.Assert(() => FieldSeen(field.Name)); - Debugging.Assert(() => field.DocValuesType == DocValuesType.SORTED); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => FieldSeen(field.Name)); + Debugging.Assert(() => field.DocValuesType == DocValuesType.SORTED); + } WriteFieldEntry(field, DocValuesType.SORTED); int valueCount = 0; @@ -268,10 +277,10 @@ public override void AddSortedField(FieldInfo field, IEnumerable value } SimpleTextUtil.WriteNewline(data); valuesSeen++; - Debugging.Assert(() => valuesSeen <= valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => valuesSeen <= valueCount); } - Debugging.Assert(() => valuesSeen == valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => valuesSeen == valueCount); foreach (var ord in docToOrd) { @@ -283,8 +292,11 @@ public override void AddSortedField(FieldInfo field, IEnumerable value public override void AddSortedSetField(FieldInfo field, IEnumerable values, IEnumerable docToOrdCount, IEnumerable ords) { - Debugging.Assert(() => FieldSeen(field.Name)); - Debugging.Assert(() => field.DocValuesType == DocValuesType.SORTED_SET); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => FieldSeen(field.Name)); + Debugging.Assert(() => field.DocValuesType == DocValuesType.SORTED_SET); + } WriteFieldEntry(field, DocValuesType.SORTED_SET); long valueCount = 0; @@ -375,10 +387,10 @@ public override void AddSortedSetField(FieldInfo field, IEnumerable va } SimpleTextUtil.WriteNewline(data); valuesSeen++; - Debugging.Assert(() => valuesSeen <= valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => valuesSeen <= valueCount); } - Debugging.Assert(() => valuesSeen == valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => valuesSeen == valueCount); using (var ordStream = ords.GetEnumerator()) { @@ -426,7 +438,7 @@ protected override void Dispose(bool disposing) var success = false; try { - Debugging.Assert(() => _fieldsSeen.Count > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => _fieldsSeen.Count > 0); // java : sheisty to do this here? SimpleTextUtil.Write(data, END); SimpleTextUtil.WriteNewline(data); diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldInfosReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldInfosReader.cs index 9c50ef3a21..e9a68b013f 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldInfosReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldInfosReader.cs @@ -57,29 +57,29 @@ public override FieldInfos Read(Directory directory, string segmentName, string { SimpleTextUtil.ReadLine(input, scratch); - Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NUMFIELDS)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NUMFIELDS)); var size = Convert.ToInt32(ReadString(SimpleTextFieldInfosWriter.NUMFIELDS.Length, scratch), CultureInfo.InvariantCulture); var infos = new FieldInfo[size]; for (var i = 0; i < size; i++) { SimpleTextUtil.ReadLine(input, scratch); - Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NAME)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NAME)); string name = ReadString(SimpleTextFieldInfosWriter.NAME.Length, scratch); SimpleTextUtil.ReadLine(input, scratch); - Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NUMBER)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NUMBER)); int fieldNumber = Convert.ToInt32(ReadString(SimpleTextFieldInfosWriter.NUMBER.Length, scratch), CultureInfo.InvariantCulture); SimpleTextUtil.ReadLine(input, scratch); - Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.ISINDEXED)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.ISINDEXED)); bool isIndexed = Convert.ToBoolean(ReadString(SimpleTextFieldInfosWriter.ISINDEXED.Length, scratch), CultureInfo.InvariantCulture); IndexOptions indexOptions; if (isIndexed) { SimpleTextUtil.ReadLine(input, scratch); - Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.INDEXOPTIONS)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.INDEXOPTIONS)); indexOptions = (IndexOptions)Enum.Parse(typeof(IndexOptions), ReadString(SimpleTextFieldInfosWriter.INDEXOPTIONS.Length, scratch)); } @@ -89,46 +89,46 @@ public override FieldInfos Read(Directory directory, string segmentName, string } SimpleTextUtil.ReadLine(input, scratch); - Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.STORETV)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.STORETV)); bool storeTermVector = Convert.ToBoolean(ReadString(SimpleTextFieldInfosWriter.STORETV.Length, scratch), CultureInfo.InvariantCulture); SimpleTextUtil.ReadLine(input, scratch); - Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.PAYLOADS)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.PAYLOADS)); bool storePayloads = Convert.ToBoolean(ReadString(SimpleTextFieldInfosWriter.PAYLOADS.Length, scratch), CultureInfo.InvariantCulture); SimpleTextUtil.ReadLine(input, scratch); - Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NORMS)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NORMS)); bool omitNorms = !Convert.ToBoolean(ReadString(SimpleTextFieldInfosWriter.NORMS.Length, scratch), CultureInfo.InvariantCulture); SimpleTextUtil.ReadLine(input, scratch); - Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NORMS_TYPE)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NORMS_TYPE)); string nrmType = ReadString(SimpleTextFieldInfosWriter.NORMS_TYPE.Length, scratch); Index.DocValuesType normsType = DocValuesType(nrmType); SimpleTextUtil.ReadLine(input, scratch); - Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.DOCVALUES)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.DOCVALUES)); string dvType = ReadString(SimpleTextFieldInfosWriter.DOCVALUES.Length, scratch); Index.DocValuesType docValuesType = DocValuesType(dvType); SimpleTextUtil.ReadLine(input, scratch); - Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.DOCVALUES_GEN)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.DOCVALUES_GEN)); long dvGen = Convert.ToInt64(ReadString(SimpleTextFieldInfosWriter.DOCVALUES_GEN.Length, scratch), CultureInfo.InvariantCulture); SimpleTextUtil.ReadLine(input, scratch); - Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NUM_ATTS)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NUM_ATTS)); int numAtts = Convert.ToInt32(ReadString(SimpleTextFieldInfosWriter.NUM_ATTS.Length, scratch), CultureInfo.InvariantCulture); IDictionary atts = new Dictionary(); for (int j = 0; j < numAtts; j++) { SimpleTextUtil.ReadLine(input, scratch); - Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.ATT_KEY)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.ATT_KEY)); string key = ReadString(SimpleTextFieldInfosWriter.ATT_KEY.Length, scratch); SimpleTextUtil.ReadLine(input, scratch); - Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.ATT_VALUE)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.ATT_VALUE)); string value = ReadString(SimpleTextFieldInfosWriter.ATT_VALUE.Length, scratch); atts[key] = value; } diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldInfosWriter.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldInfosWriter.cs index 02b8cb2418..949c682a1b 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldInfosWriter.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldInfosWriter.cs @@ -92,7 +92,7 @@ public override void Write(Directory directory, string segmentName, string segme if (fi.IsIndexed) { - Debugging.Assert(() => fi.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fi.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); SimpleTextUtil.Write(output, INDEXOPTIONS); SimpleTextUtil.Write(output, fi.IndexOptions != IndexOptions.NONE ? fi.IndexOptions.ToString() : string.Empty, diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs index a4432a82cd..7e0c8d01db 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs @@ -159,7 +159,7 @@ public override SeekStatus SeekCeil(BytesRef text) public override BytesRef Next() { - //Debugging.Assert(() => !ended); // LUCENENET: Ended field is never set, so this can never fail + //if (Debugging.AssertsEnabled) Debugging.Assert(() => !ended); // LUCENENET: Ended field is never set, so this can never fail var result = _fstEnum.Next(); if (result == null) return null; @@ -315,7 +315,7 @@ public override int NextDoc() } else { - Debugging.Assert( + if (Debugging.AssertsEnabled) Debugging.Assert( () => StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.TERM) || StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.FIELD) || // LUCENENET TODO: This assert fails sometimes, which in turns causes _scratch.Utf8ToString() to throw an index out of range exception StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.END), () => "scratch=" + _scratch.Utf8ToString()); @@ -446,7 +446,7 @@ public override int NextDoc() } else { - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.TERM) || StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.FIELD) || + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.TERM) || StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.FIELD) || StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.END)); if (!first && (_liveDocs == null || _liveDocs.Get(_docId))) @@ -472,7 +472,7 @@ public override int NextPosition() if (_readPositions) { SimpleTextUtil.ReadLine(_in, _scratch); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.POS), () => "got line=" + _scratch.Utf8ToString()); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.POS), () => "got line=" + _scratch.Utf8ToString()); UnicodeUtil.UTF8toUTF16(_scratch.Bytes, _scratch.Offset + SimpleTextFieldsWriter.POS.Length, _scratch.Length - SimpleTextFieldsWriter.POS.Length, _scratchUtf162); pos = ArrayUtil.ParseInt32(_scratchUtf162.Chars, 0, _scratchUtf162.Length); @@ -485,12 +485,12 @@ public override int NextPosition() if (_readOffsets) { SimpleTextUtil.ReadLine(_in, _scratch); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.START_OFFSET), () => "got line=" + _scratch.Utf8ToString()); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.START_OFFSET), () => "got line=" + _scratch.Utf8ToString()); UnicodeUtil.UTF8toUTF16(_scratch.Bytes, _scratch.Offset + SimpleTextFieldsWriter.START_OFFSET.Length, _scratch.Length - SimpleTextFieldsWriter.START_OFFSET.Length, _scratchUtf162); _startOffset = ArrayUtil.ParseInt32(_scratchUtf162.Chars, 0, _scratchUtf162.Length); SimpleTextUtil.ReadLine(_in, _scratch); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.END_OFFSET), () => "got line=" + _scratch.Utf8ToString()); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.END_OFFSET), () => "got line=" + _scratch.Utf8ToString()); UnicodeUtil.UTF8toUTF16(_scratch.Bytes, _scratch.Offset + SimpleTextFieldsWriter.END_OFFSET.Length, _scratch.Length - SimpleTextFieldsWriter.END_OFFSET.Length, _scratchUtf162); _endOffset = ArrayUtil.ParseInt32(_scratchUtf162.Chars, 0, _scratchUtf162.Length); diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsWriter.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsWriter.cs index 91562fc1fa..31b85d931e 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsWriter.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsWriter.cs @@ -163,9 +163,12 @@ public override void AddPosition(int position, BytesRef payload, int startOffset if (_writeOffsets) { - Debugging.Assert(() => endOffset >= startOffset); - Debugging.Assert(() => startOffset >= _lastStartOffset, - () => "startOffset=" + startOffset + " lastStartOffset=" + _lastStartOffset); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => endOffset >= startOffset); + Debugging.Assert(() => startOffset >= _lastStartOffset, + () => "startOffset=" + startOffset + " lastStartOffset=" + _lastStartOffset); + } _lastStartOffset = startOffset; _outerInstance.Write(START_OFFSET); _outerInstance.Write(Convert.ToString(startOffset, CultureInfo.InvariantCulture)); @@ -177,7 +180,7 @@ public override void AddPosition(int position, BytesRef payload, int startOffset if (payload != null && payload.Length > 0) { - Debugging.Assert(() => payload.Length != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => payload.Length != 0); _outerInstance.Write(PAYLOAD); _outerInstance.Write(payload); _outerInstance.Newline(); diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextLiveDocsFormat.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextLiveDocsFormat.cs index b7245f7318..0c2c553810 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextLiveDocsFormat.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextLiveDocsFormat.cs @@ -67,7 +67,7 @@ public override IMutableBits NewLiveDocs(IBits existing) public override IBits ReadLiveDocs(Directory dir, SegmentCommitInfo info, IOContext context) { - Debugging.Assert(() => info.HasDeletions); + if (Debugging.AssertsEnabled) Debugging.Assert(() => info.HasDeletions); var scratch = new BytesRef(); var scratchUtf16 = new CharsRef(); @@ -80,7 +80,7 @@ public override IBits ReadLiveDocs(Directory dir, SegmentCommitInfo info, IOCont input = dir.OpenChecksumInput(fileName, context); SimpleTextUtil.ReadLine(input, scratch); - Debugging.Assert(() => StringHelper.StartsWith(scratch, SIZE)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SIZE)); var size = ParseInt32At(scratch, SIZE.Length, scratchUtf16); var bits = new BitSet(size); @@ -88,7 +88,7 @@ public override IBits ReadLiveDocs(Directory dir, SegmentCommitInfo info, IOCont SimpleTextUtil.ReadLine(input, scratch); while (!scratch.Equals(END)) { - Debugging.Assert(() => StringHelper.StartsWith(scratch, DOC)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, DOC)); var docid = ParseInt32At(scratch, DOC.Length, scratchUtf16); bits.Set(docid); SimpleTextUtil.ReadLine(input, scratch); diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextSegmentInfoReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextSegmentInfoReader.cs index 8be96c18c3..8fdf142f1e 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextSegmentInfoReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextSegmentInfoReader.cs @@ -53,43 +53,43 @@ public override SegmentInfo Read(Directory directory, string segmentName, IOCont try { SimpleTextUtil.ReadLine(input, scratch); - Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_VERSION)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_VERSION)); string version = ReadString(SimpleTextSegmentInfoWriter.SI_VERSION.Length, scratch); SimpleTextUtil.ReadLine(input, scratch); - Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_DOCCOUNT)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_DOCCOUNT)); int docCount = Convert.ToInt32(ReadString(SimpleTextSegmentInfoWriter.SI_DOCCOUNT.Length, scratch), CultureInfo.InvariantCulture); SimpleTextUtil.ReadLine(input, scratch); - Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_USECOMPOUND)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_USECOMPOUND)); bool isCompoundFile = Convert.ToBoolean(ReadString(SimpleTextSegmentInfoWriter.SI_USECOMPOUND.Length, scratch), CultureInfo.InvariantCulture); SimpleTextUtil.ReadLine(input, scratch); - Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_NUM_DIAG)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_NUM_DIAG)); int numDiag = Convert.ToInt32(ReadString(SimpleTextSegmentInfoWriter.SI_NUM_DIAG.Length, scratch), CultureInfo.InvariantCulture); IDictionary diagnostics = new Dictionary(); for (int i = 0; i < numDiag; i++) { SimpleTextUtil.ReadLine(input, scratch); - Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_DIAG_KEY)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_DIAG_KEY)); string key = ReadString(SimpleTextSegmentInfoWriter.SI_DIAG_KEY.Length, scratch); SimpleTextUtil.ReadLine(input, scratch); - Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_DIAG_VALUE)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_DIAG_VALUE)); string value = ReadString(SimpleTextSegmentInfoWriter.SI_DIAG_VALUE.Length, scratch); diagnostics[key] = value; } SimpleTextUtil.ReadLine(input, scratch); - Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_NUM_FILES)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_NUM_FILES)); int numFiles = Convert.ToInt32(ReadString(SimpleTextSegmentInfoWriter.SI_NUM_FILES.Length, scratch), CultureInfo.InvariantCulture); var files = new JCG.HashSet(); for (int i = 0; i < numFiles; i++) { SimpleTextUtil.ReadLine(input, scratch); - Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_FILE)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_FILE)); string fileName = ReadString(SimpleTextSegmentInfoWriter.SI_FILE.Length, scratch); files.Add(fileName); } diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextStoredFieldsReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextStoredFieldsReader.cs index ac3e8d7727..c17061f9f1 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextStoredFieldsReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextStoredFieldsReader.cs @@ -113,26 +113,26 @@ private void ReadIndex(int size) } } SimpleTextUtil.CheckFooter(input); - Debugging.Assert(() => upto == _offsets.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => upto == _offsets.Length); } public override void VisitDocument(int n, StoredFieldVisitor visitor) { _input.Seek(_offsets[n]); ReadLine(); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.NUM)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.NUM)); var numFields = ParseInt32At(SimpleTextStoredFieldsWriter.NUM.Length); for (var i = 0; i < numFields; i++) { ReadLine(); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.FIELD)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.FIELD)); int fieldNumber = ParseInt32At(SimpleTextStoredFieldsWriter.FIELD.Length); FieldInfo fieldInfo = _fieldInfos.FieldInfo(fieldNumber); ReadLine(); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.NAME)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.NAME)); ReadLine(); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.TYPE)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.TYPE)); BytesRef type; if (EqualsAt(SimpleTextStoredFieldsWriter.TYPE_STRING, _scratch, SimpleTextStoredFieldsWriter.TYPE.Length)) @@ -171,7 +171,7 @@ public override void VisitDocument(int n, StoredFieldVisitor visitor) break; case StoredFieldVisitor.Status.NO: ReadLine(); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.VALUE)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.VALUE)); break; case StoredFieldVisitor.Status.STOP: return; @@ -182,7 +182,7 @@ public override void VisitDocument(int n, StoredFieldVisitor visitor) private void ReadField(BytesRef type, FieldInfo fieldInfo, StoredFieldVisitor visitor) { ReadLine(); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.VALUE)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.VALUE)); if (Equals(type, SimpleTextStoredFieldsWriter.TYPE_STRING)) { visitor.StringField(fieldInfo, diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs index 95abd7f913..71f23ab226 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs @@ -109,7 +109,7 @@ private void ReadIndex(int maxDoc) } } SimpleTextUtil.CheckFooter(input); - Debugging.Assert(() => upto == _offsets.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => upto == _offsets.Length); } public override Fields Get(int doc) @@ -119,7 +119,7 @@ public override Fields Get(int doc) _input.Seek(_offsets[doc]); ReadLine(); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.NUMFIELDS)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.NUMFIELDS)); var numFields = ParseInt32At(SimpleTextTermVectorsWriter.NUMFIELDS.Length); if (numFields == 0) { @@ -128,28 +128,28 @@ public override Fields Get(int doc) for (var i = 0; i < numFields; i++) { ReadLine(); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELD)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELD)); // skip fieldNumber: ParseInt32At(SimpleTextTermVectorsWriter.FIELD.Length); ReadLine(); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDNAME)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDNAME)); var fieldName = ReadString(SimpleTextTermVectorsWriter.FIELDNAME.Length, _scratch); ReadLine(); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDPOSITIONS)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDPOSITIONS)); var positions = Convert.ToBoolean(ReadString(SimpleTextTermVectorsWriter.FIELDPOSITIONS.Length, _scratch), CultureInfo.InvariantCulture); ReadLine(); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDOFFSETS)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDOFFSETS)); var offsets = Convert.ToBoolean(ReadString(SimpleTextTermVectorsWriter.FIELDOFFSETS.Length, _scratch), CultureInfo.InvariantCulture); ReadLine(); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDPAYLOADS)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDPAYLOADS)); var payloads = Convert.ToBoolean(ReadString(SimpleTextTermVectorsWriter.FIELDPAYLOADS.Length, _scratch), CultureInfo.InvariantCulture); ReadLine(); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDTERMCOUNT)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDTERMCOUNT)); var termCount = ParseInt32At(SimpleTextTermVectorsWriter.FIELDTERMCOUNT.Length); var terms = new SimpleTVTerms(offsets, positions, payloads); @@ -158,7 +158,7 @@ public override Fields Get(int doc) for (var j = 0; j < termCount; j++) { ReadLine(); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.TERMTEXT)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.TERMTEXT)); var term = new BytesRef(); var termLength = _scratch.Length - SimpleTextTermVectorsWriter.TERMTEXT.Length; term.Grow(termLength); @@ -169,7 +169,7 @@ public override Fields Get(int doc) terms.terms.Add(term, postings); ReadLine(); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.TERMFREQ)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.TERMFREQ)); postings.freq = ParseInt32At(SimpleTextTermVectorsWriter.TERMFREQ.Length); if (!positions && !offsets) continue; @@ -194,12 +194,12 @@ public override Fields Get(int doc) if (positions) { ReadLine(); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.POSITION)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.POSITION)); postings.positions[k] = ParseInt32At(SimpleTextTermVectorsWriter.POSITION.Length); if (payloads) { ReadLine(); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.PAYLOAD)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.PAYLOAD)); if (_scratch.Length - SimpleTextTermVectorsWriter.PAYLOAD.Length == 0) { postings.payloads[k] = null; @@ -217,11 +217,11 @@ public override Fields Get(int doc) if (!offsets) continue; ReadLine(); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.STARTOFFSET)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.STARTOFFSET)); postings.startOffsets[k] = ParseInt32At(SimpleTextTermVectorsWriter.STARTOFFSET.Length); ReadLine(); - Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.ENDOFFSET)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.ENDOFFSET)); postings.endOffsets[k] = ParseInt32At(SimpleTextTermVectorsWriter.ENDOFFSET.Length); } } @@ -444,7 +444,7 @@ public override int Freq { get { - Debugging.Assert(() => _freqRenamed != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => _freqRenamed != -1); return _freqRenamed; } } @@ -495,7 +495,7 @@ public override int Freq if (_positions != null) return _positions.Length; - Debugging.Assert(() => _startOffsets != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => _startOffsets != null); return _startOffsets.Length; } } @@ -540,7 +540,7 @@ public override BytesRef GetPayload() public override int NextPosition() { - Debugging.Assert(() => (_positions != null && _nextPos < _positions.Length) || + if (Debugging.AssertsEnabled) Debugging.Assert(() => (_positions != null && _nextPos < _positions.Length) || _startOffsets != null && _nextPos < _startOffsets.Length); if (_positions != null) diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsWriter.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsWriter.cs index 011029e3ce..b2dd7efb5e 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsWriter.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsWriter.cs @@ -143,7 +143,7 @@ public override void StartTerm(BytesRef term, int freq) public override void AddPosition(int position, int startOffset, int endOffset, BytesRef payload) { - Debugging.Assert(() => _positions || _offsets); + if (Debugging.AssertsEnabled) Debugging.Assert(() => _positions || _offsets); if (_positions) { @@ -156,7 +156,7 @@ public override void AddPosition(int position, int startOffset, int endOffset, B Write(PAYLOAD); if (payload != null) { - Debugging.Assert(() => payload.Length > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => payload.Length > 0); Write(payload); } NewLine(); diff --git a/src/Lucene.Net.Expressions/ExpressionComparator.cs b/src/Lucene.Net.Expressions/ExpressionComparator.cs index e187adfd7d..4905386464 100644 --- a/src/Lucene.Net.Expressions/ExpressionComparator.cs +++ b/src/Lucene.Net.Expressions/ExpressionComparator.cs @@ -49,11 +49,11 @@ public override void SetScorer(Scorer scorer) base.SetScorer(scorer); // TODO: might be cleaner to lazy-init 'source' and set scorer after? - Debugging.Assert(() => readerContext != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => readerContext != null); try { var context = new Dictionary(); - Debugging.Assert(() => scorer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => scorer != null); context["scorer"] = scorer; scores = source.GetValues(context, readerContext); } diff --git a/src/Lucene.Net.Expressions/ScoreFunctionValues.cs b/src/Lucene.Net.Expressions/ScoreFunctionValues.cs index dd1b052704..a42d4ecf85 100644 --- a/src/Lucene.Net.Expressions/ScoreFunctionValues.cs +++ b/src/Lucene.Net.Expressions/ScoreFunctionValues.cs @@ -43,7 +43,7 @@ public override double DoubleVal(int document) { try { - Debugging.Assert(() => document == scorer.DocID); + if (Debugging.AssertsEnabled) Debugging.Assert(() => document == scorer.DocID); return scorer.GetScore(); } catch (IOException exception) diff --git a/src/Lucene.Net.Facet/DrillDownQuery.cs b/src/Lucene.Net.Facet/DrillDownQuery.cs index 65e9c52d85..7d26a0e8d6 100644 --- a/src/Lucene.Net.Facet/DrillDownQuery.cs +++ b/src/Lucene.Net.Facet/DrillDownQuery.cs @@ -86,7 +86,7 @@ internal DrillDownQuery(FacetsConfig config, Filter filter, DrillDownQuery other { throw new ArgumentException("cannot apply filter unless baseQuery isn't null; pass ConstantScoreQuery instead"); } - Debugging.Assert(() => clauses.Length == 1 + other.drillDownDims.Count, () => clauses.Length + " vs " + (1 + other.drillDownDims.Count)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => clauses.Length == 1 + other.drillDownDims.Count, () => clauses.Length + " vs " + (1 + other.drillDownDims.Count)); drillDownDims.PutAll(other.drillDownDims); query.Add(new FilteredQuery(clauses[0].Query, filter), Occur.MUST); for (int i = 1; i < clauses.Length; i++) diff --git a/src/Lucene.Net.Facet/DrillSideways.cs b/src/Lucene.Net.Facet/DrillSideways.cs index 65d4c10135..b144bb2d95 100644 --- a/src/Lucene.Net.Facet/DrillSideways.cs +++ b/src/Lucene.Net.Facet/DrillSideways.cs @@ -175,7 +175,7 @@ public virtual DrillSidewaysResult Search(DrillDownQuery query, ICollector hitCo } else { - Debugging.Assert(() => clauses.Length == 1 + drillDownDims.Count); + if (Debugging.AssertsEnabled) Debugging.Assert(() => clauses.Length == 1 + drillDownDims.Count); baseQuery = clauses[0].Query; startClause = 1; } diff --git a/src/Lucene.Net.Facet/DrillSidewaysScorer.cs b/src/Lucene.Net.Facet/DrillSidewaysScorer.cs index 23ed7d6f5f..ccbc0c2f49 100644 --- a/src/Lucene.Net.Facet/DrillSidewaysScorer.cs +++ b/src/Lucene.Net.Facet/DrillSidewaysScorer.cs @@ -89,7 +89,7 @@ public override bool Score(ICollector collector, int maxDoc) // TODO: if we ever allow null baseScorer ... it will // mean we DO score docs out of order ... hmm, or if we // change up the order of the conjuntions below - Debugging.Assert(() => baseScorer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => baseScorer != null); // some scorers, eg ReqExlScorer, can hit NPE if cost is called after nextDoc long baseQueryCost = baseScorer.GetCost(); @@ -395,7 +395,7 @@ private void DoDrillDownAdvanceScoring(ICollector collector, DocIdSetIterator[] while (slot0 < CHUNK && (slot0 = seen.NextSetBit(slot0)) != -1) { int ddDocID = docIDs[slot0]; - Debugging.Assert(() => ddDocID != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ddDocID != -1); int baseDocID = baseScorer.DocID; if (baseDocID < ddDocID) @@ -550,7 +550,7 @@ private void DoUnionScoring(ICollector collector, DocIdSetIterator[] disis, ICol //} // Mark slot as valid: - Debugging.Assert(() => docIDs[slot] != docID, () => "slot=" + slot + " docID=" + docID); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docIDs[slot] != docID, () => "slot=" + slot + " docID=" + docID); docIDs[slot] = docID; scores[slot] = baseScorer.GetScore(); filledSlots[filledCount++] = slot; diff --git a/src/Lucene.Net.Facet/FacetsConfig.cs b/src/Lucene.Net.Facet/FacetsConfig.cs index 2f5610c285..b0f0f0e293 100644 --- a/src/Lucene.Net.Facet/FacetsConfig.cs +++ b/src/Lucene.Net.Facet/FacetsConfig.cs @@ -694,7 +694,7 @@ public static string[] StringToPath(string s) } } parts.Add(new string(buffer, 0, upto)); - Debugging.Assert(() => !lastEscape); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !lastEscape); return parts.ToArray(); } } diff --git a/src/Lucene.Net.Facet/Range/LongRangeCounter.cs b/src/Lucene.Net.Facet/Range/LongRangeCounter.cs index fa124e8c9d..0641fc6db2 100644 --- a/src/Lucene.Net.Facet/Range/LongRangeCounter.cs +++ b/src/Lucene.Net.Facet/Range/LongRangeCounter.cs @@ -120,7 +120,7 @@ public Int64RangeCounter(Int64Range[] ranges) } else { - Debugging.Assert(() => flags == 2); + if (Debugging.AssertsEnabled) Debugging.Assert(() => flags == 2); // This point is only the end of an interval; attach // it to last interval: elementaryIntervals.Add(new InclusiveRange(prev, v)); @@ -275,7 +275,7 @@ private sealed class InclusiveRange public InclusiveRange(long start, long end) { - Debugging.Assert(() => end >= start); + if (Debugging.AssertsEnabled) Debugging.Assert(() => end >= start); this.Start = start; this.End = end; } @@ -349,7 +349,7 @@ internal void AddOutputs(int index, Int64Range range) } else if (left != null) { - Debugging.Assert(() => right != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => right != null); // Recurse: left.AddOutputs(index, range); right.AddOutputs(index, range); @@ -361,7 +361,7 @@ internal void ToString(StringBuilder sb, int depth) Indent(sb, depth); if (left == null) { - Debugging.Assert(() => right == null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => right == null); sb.Append("leaf: " + start + " to " + end); } else @@ -377,7 +377,7 @@ internal void ToString(StringBuilder sb, int depth) if (left != null) { - Debugging.Assert(() => right != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => right != null); left.ToString(sb, depth + 1); right.ToString(sb, depth + 1); } diff --git a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs index 9033c19cf0..92f5d57e41 100644 --- a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs +++ b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs @@ -65,7 +65,7 @@ private CategoryPath(CategoryPath copyFrom, int prefixLen) // while the code which calls this method is safe, at some point a test // tripped on AIOOBE in toString, but we failed to reproduce. adding the // assert as a safety check. - Debugging.Assert(() => prefixLen > 0 && prefixLen <= copyFrom.Components.Length, () => "prefixLen cannot be negative nor larger than the given components' length: prefixLen=" + prefixLen + " components.length=" + copyFrom.Components.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => prefixLen > 0 && prefixLen <= copyFrom.Components.Length, () => "prefixLen cannot be negative nor larger than the given components' length: prefixLen=" + prefixLen + " components.length=" + copyFrom.Components.Length); this.Components = copyFrom.Components; Length = prefixLen; } @@ -75,7 +75,7 @@ private CategoryPath(CategoryPath copyFrom, int prefixLen) /// public CategoryPath(params string[] components) { - Debugging.Assert(() => components.Length > 0, () => "use CategoryPath.EMPTY to create an empty path"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => components.Length > 0, () => "use CategoryPath.EMPTY to create an empty path"); foreach (string comp in components) { if (string.IsNullOrEmpty(comp)) diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs index d9846c81f5..52df74c584 100644 --- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs +++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs @@ -190,7 +190,7 @@ public DirectoryTaxonomyWriter(Directory directory, OpenMode openMode, // verify (to some extent) that merge policy in effect would preserve category docids if (indexWriter != null) { - Debugging.Assert(() => !(indexWriter.Config.MergePolicy is TieredMergePolicy), () => "for preserving category docids, merging none-adjacent segments is not allowed"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !(indexWriter.Config.MergePolicy is TieredMergePolicy), () => "for preserving category docids, merging none-adjacent segments is not allowed"); } // after we opened the writer, and the index is locked, it's safe to check @@ -826,7 +826,7 @@ public virtual void SetCacheMissesUntilFill(int i) FacetLabel cp = new FacetLabel(FacetsConfig.StringToPath(t.Utf8ToString())); docsEnum = termsEnum.Docs(null, docsEnum, DocsFlags.NONE); bool res = cache.Put(cp, docsEnum.NextDoc() + ctx.DocBase); - Debugging.Assert(() => !res, () => "entries should not have been evicted from the cache"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !res, () => "entries should not have been evicted from the cache"); } else { @@ -907,7 +907,7 @@ public virtual int GetParent(int ordinal) } int[] parents = GetTaxoArrays().Parents; - Debugging.Assert(() => ordinal < parents.Length, () => "requested ordinal (" + ordinal + "); parents.length (" + parents.Length + ") !"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ordinal < parents.Length, () => "requested ordinal (" + ordinal + "); parents.length (" + parents.Length + ") !"); return parents[ordinal]; } diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs index 10170b98f9..52e71c9389 100644 --- a/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs +++ b/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs @@ -77,7 +77,7 @@ public TaxonomyIndexArrays(IndexReader reader) public TaxonomyIndexArrays(IndexReader reader, TaxonomyIndexArrays copyFrom) { - Debugging.Assert(() => copyFrom != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => copyFrom != null); // note that copyParents.length may be equal to reader.maxDoc(). this is not a bug // it may be caused if e.g. the taxonomy segments were merged, and so an updated diff --git a/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs b/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs index 3c8aaa1e23..c9bc017fec 100644 --- a/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs +++ b/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs @@ -68,7 +68,7 @@ private FacetLabel(FacetLabel copyFrom, int prefixLen) // while the code which calls this method is safe, at some point a test // tripped on AIOOBE in toString, but we failed to reproduce. adding the // assert as a safety check. - Debugging.Assert(() => prefixLen >= 0 && prefixLen <= copyFrom.Components.Length, () => "prefixLen cannot be negative nor larger than the given components' length: prefixLen=" + prefixLen + " components.length=" + copyFrom.Components.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => prefixLen >= 0 && prefixLen <= copyFrom.Components.Length, () => "prefixLen cannot be negative nor larger than the given components' length: prefixLen=" + prefixLen + " components.length=" + copyFrom.Components.Length); this.Components = copyFrom.Components; Length = prefixLen; } diff --git a/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs index ca0976dbda..b71ed1fa44 100644 --- a/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs +++ b/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs @@ -58,7 +58,7 @@ protected virtual void Rollup() if (ft.IsHierarchical && ft.IsMultiValued == false) { int dimRootOrd = m_taxoReader.GetOrdinal(new FacetLabel(dim)); - Debugging.Assert(() => dimRootOrd > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => dimRootOrd > 0); m_values[dimRootOrd] += Rollup(m_children[dimRootOrd]); } } diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs index de702f8f79..fa4cec5d35 100644 --- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs +++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs @@ -143,7 +143,7 @@ protected TaxonomyReader() // LUCENENET specific - marked protected instead of p public static T OpenIfChanged(T oldTaxoReader) where T : TaxonomyReader { T newTaxoReader = (T)oldTaxoReader.DoOpenIfChanged(); - Debugging.Assert(() => newTaxoReader != oldTaxoReader); + if (Debugging.AssertsEnabled) Debugging.Assert(() => newTaxoReader != oldTaxoReader); return newTaxoReader; } diff --git a/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs b/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs index fbc983ad35..2e39fefda1 100644 --- a/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs +++ b/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs @@ -248,7 +248,7 @@ public virtual void Collect(int doc) bottomGroup = m_orderedGroups.Last(); m_orderedGroups.Remove(bottomGroup); } - Debugging.Assert(() => m_orderedGroups.Count == topNGroups - 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m_orderedGroups.Count == topNGroups - 1); groupMap.Remove(bottomGroup.GroupValue); @@ -263,7 +263,7 @@ public virtual void Collect(int doc) groupMap[bottomGroup.GroupValue] = bottomGroup; m_orderedGroups.Add(bottomGroup); - Debugging.Assert(() => m_orderedGroups.Count == topNGroups); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m_orderedGroups.Count == topNGroups); int lastComparerSlot = m_orderedGroups.Last().ComparerSlot; foreach (FieldComparer fc in comparers) @@ -315,7 +315,7 @@ public virtual void Collect(int doc) prevLast = m_orderedGroups.Last(); m_orderedGroups.Remove(group); } - Debugging.Assert(() => m_orderedGroups.Count == topNGroups - 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m_orderedGroups.Count == topNGroups - 1); } else { @@ -333,7 +333,7 @@ public virtual void Collect(int doc) if (m_orderedGroups != null) { m_orderedGroups.Add(group); - Debugging.Assert(() => m_orderedGroups.Count == topNGroups); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m_orderedGroups.Count == topNGroups); var newLast = m_orderedGroups.Last(); // If we changed the value of the last group, or changed which group was last, then update bottom: if (group == newLast || prevLast != newLast) @@ -376,7 +376,7 @@ private void BuildSortedSet() var comparer = new BuildSortedSetComparer(this); m_orderedGroups = new JCG.SortedSet>(comparer); m_orderedGroups.UnionWith(groupMap.Values); - Debugging.Assert(() => m_orderedGroups.Count > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m_orderedGroups.Count > 0); foreach (FieldComparer fc in comparers) { diff --git a/src/Lucene.Net.Grouping/BlockGroupingCollector.cs b/src/Lucene.Net.Grouping/BlockGroupingCollector.cs index c5e14e8123..293475449d 100644 --- a/src/Lucene.Net.Grouping/BlockGroupingCollector.cs +++ b/src/Lucene.Net.Grouping/BlockGroupingCollector.cs @@ -153,8 +153,11 @@ protected internal override bool LessThan(OneGroup group1, OneGroup group2) { //System.out.println(" ltcheck"); - Debugging.Assert(() => group1 != group2); - Debugging.Assert(() => group1.comparerSlot != group2.comparerSlot); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => group1 != group2); + Debugging.Assert(() => group1.comparerSlot != group2.comparerSlot); + } int numComparers = outerInstance.comparers.Length; for (int compIDX = 0; compIDX < numComparers; compIDX++) @@ -221,7 +224,7 @@ private void ProcessGroup() { // Replace bottom element in PQ and then updateTop OneGroup og = groupQueue.Top; - Debugging.Assert(() => og != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => og != null); og.count = subDocUpto; og.topGroupDoc = docBase + topGroupDoc; // Swap pending docs @@ -521,7 +524,7 @@ public virtual void Collect(int doc) { if (subDocUpto == 1) { - Debugging.Assert(() => !queueFull); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !queueFull); //System.out.println(" init copy to bottomSlot=" + bottomSlot); foreach (FieldComparer fc in comparers) diff --git a/src/Lucene.Net.Grouping/SearchGroup.cs b/src/Lucene.Net.Grouping/SearchGroup.cs index 540bdd5676..32f47568ac 100644 --- a/src/Lucene.Net.Grouping/SearchGroup.cs +++ b/src/Lucene.Net.Grouping/SearchGroup.cs @@ -106,12 +106,12 @@ public ShardIter(IEnumerable> shard, int shardIndex) { this.shardIndex = shardIndex; iter = shard.GetEnumerator(); - //Debugging.Assert(iter.hasNext()); // No reasonable way to do this in .NET + //if (Debugging.AssertsEnabled) Debugging.Assert(iter.hasNext()); // No reasonable way to do this in .NET } public ISearchGroup Next() { - //Debugging.Assert(iter.hasNext()); // No reasonable way to do this in .NET + //if (Debugging.AssertsEnabled) Debugging.Assert(iter.hasNext()); // No reasonable way to do this in .NET ISearchGroup group = iter.Current; if (group.SortValues == null) { @@ -186,12 +186,12 @@ private bool NeverEquals(object other) { if (groupValue == null) { - Debugging.Assert(() => otherMergedGroup.groupValue != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => otherMergedGroup.groupValue != null); } else { - Debugging.Assert(() => !groupValueIsValueType + if (Debugging.AssertsEnabled) Debugging.Assert(() => !groupValueIsValueType ? JCG.EqualityComparer.Default.Equals(groupValue, otherMergedGroup.groupValue) // LUCENENET specific - use J2N.Collections.StructuralEqualityComparer.Default.Equals() if we have a reference type @@ -206,7 +206,7 @@ public override bool Equals(object other) { // We never have another MergedGroup instance with // same groupValue - Debugging.Assert(() => NeverEquals(other)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => NeverEquals(other)); if (other is MergedGroup otherMergedGroup) { @@ -294,7 +294,7 @@ public virtual int Compare(MergedGroup group, MergedGroup other) } // Tie break by min shard index: - Debugging.Assert(() => group.MinShardIndex != other.MinShardIndex); + if (Debugging.AssertsEnabled) Debugging.Assert(() => group.MinShardIndex != other.MinShardIndex); return group.MinShardIndex - other.MinShardIndex; } } @@ -327,7 +327,7 @@ private void UpdateNextGroup(int topN, ShardIter shard) //System.out.println(" new"); mergedGroup = new MergedGroup(group.GroupValue); mergedGroup.MinShardIndex = shard.ShardIndex; - Debugging.Assert(() => group.SortValues != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => group.SortValues != null); mergedGroup.TopValues = group.SortValues; groupsSeen[group.GroupValue] = mergedGroup; mergedGroup.IsInQueue = true; diff --git a/src/Lucene.Net.Grouping/Term/TermGroupFacetCollector.cs b/src/Lucene.Net.Grouping/Term/TermGroupFacetCollector.cs index 5b4ddbd25e..2d65424f42 100644 --- a/src/Lucene.Net.Grouping/Term/TermGroupFacetCollector.cs +++ b/src/Lucene.Net.Grouping/Term/TermGroupFacetCollector.cs @@ -175,7 +175,7 @@ public override void SetNextReader(AtomicReaderContext context) BytesRef facetEndPrefix = BytesRef.DeepCopyOf(m_facetPrefix); facetEndPrefix.Append(UnicodeUtil.BIG_TERM); m_endFacetOrd = facetFieldTermsIndex.LookupTerm(facetEndPrefix); - Debugging.Assert(() => m_endFacetOrd < 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m_endFacetOrd < 0); m_endFacetOrd = -m_endFacetOrd - 1; // Points to the ord one higher than facetEndPrefix } else @@ -203,7 +203,7 @@ internal SegmentResult(int[] counts, int total, TermsEnum tenum, int startFacetO this.m_mergePos = startFacetOrd == -1 ? 1 : startFacetOrd + 1; if (m_mergePos < m_maxTermPos) { - Debugging.Assert(() => tenum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => tenum != null); tenum.SeekExact(startFacetOrd == -1 ? 0 : startFacetOrd); m_mergeTerm = tenum.Term; } diff --git a/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs b/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs index 37ed39207b..ffc4b8e9f6 100644 --- a/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs +++ b/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs @@ -292,7 +292,7 @@ public override int StartOffset { get { - Debugging.Assert(() => currentStartOffset >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentStartOffset >= 0); return currentStartOffset; } } @@ -301,7 +301,7 @@ public override int EndOffset { get { - Debugging.Assert(() => currentEndOffset >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentEndOffset >= 0); return currentEndOffset; } } diff --git a/src/Lucene.Net.Highlighter/PostingsHighlight/Passage.cs b/src/Lucene.Net.Highlighter/PostingsHighlight/Passage.cs index 920c9d1d7f..22805b2553 100644 --- a/src/Lucene.Net.Highlighter/PostingsHighlight/Passage.cs +++ b/src/Lucene.Net.Highlighter/PostingsHighlight/Passage.cs @@ -43,7 +43,7 @@ public sealed class Passage internal void AddMatch(int startOffset, int endOffset, BytesRef term) { - Debugging.Assert(() => startOffset >= this.startOffset && startOffset <= this.endOffset); + if (Debugging.AssertsEnabled) Debugging.Assert(() => startOffset >= this.startOffset && startOffset <= this.endOffset); if (numMatches == matchStarts.Length) { int newLength = ArrayUtil.Oversize(numMatches + 1, RamUsageEstimator.NUM_BYTES_OBJECT_REF); @@ -57,7 +57,7 @@ internal void AddMatch(int startOffset, int endOffset, BytesRef term) matchEnds = newMatchEnds; matchTerms = newMatchTerms; } - Debugging.Assert(() => matchStarts.Length == matchEnds.Length && matchEnds.Length == matchTerms.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => matchStarts.Length == matchEnds.Length && matchEnds.Length == matchTerms.Length); matchStarts[numMatches] = startOffset; matchEnds[numMatches] = endOffset; matchTerms[numMatches] = term; diff --git a/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs b/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs index 7244c75b6c..b5b5a8abc5 100644 --- a/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs +++ b/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs @@ -542,7 +542,7 @@ private IDictionary HighlightField(string field, string[] contents, AtomicReaderContext subContext = leaves[leaf]; AtomicReader r = subContext.AtomicReader; - Debugging.Assert(() => leaf >= lastLeaf); // increasing order + if (Debugging.AssertsEnabled) Debugging.Assert(() => leaf >= lastLeaf); // increasing order // if the segment has changed, we must initialize new enums. if (leaf != lastLeaf) @@ -671,7 +671,7 @@ private Passage[] HighlightDoc(string field, BytesRef[] terms, int contentLength // LUCENE-5166: this hit would span the content limit... however more valid // hits may exist (they are sorted by start). so we pretend like we never // saw this term, it won't cause a passage to be added to passageQueue or anything. - Debugging.Assert(() => EMPTY.StartOffset == int.MaxValue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => EMPTY.StartOffset == int.MaxValue); if (start < contentLength && end > contentLength) { continue; @@ -714,7 +714,7 @@ private Passage[] HighlightDoc(string field, BytesRef[] terms, int contentLength return passages; } // advance breakiterator - Debugging.Assert(() => BreakIterator.Done < 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => BreakIterator.Done < 0); current.startOffset = Math.Max(bi.Preceding(start + 1), 0); current.endOffset = Math.Min(bi.Next(), contentLength); } @@ -727,7 +727,7 @@ private Passage[] HighlightDoc(string field, BytesRef[] terms, int contentLength { // multitermquery match, pull from payload term = off.dp.GetPayload(); - Debugging.Assert(() => term != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => term != null); } current.AddMatch(start, end, term); if (off.pos == dp.Freq) @@ -751,7 +751,7 @@ private Passage[] HighlightDoc(string field, BytesRef[] terms, int contentLength } // Dead code but compiler disagrees: - Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false); return null; } @@ -766,7 +766,7 @@ protected virtual Passage[] GetEmptyHighlight(string fieldName, BreakIterator bi // BreakIterator should be un-next'd: List passages = new List(); int pos = bi.Current; - Debugging.Assert(() => pos == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pos == 0); while (passages.Count < maxPassages) { int next = bi.Next(); @@ -883,7 +883,7 @@ private class LimitedStoredFieldVisitor : StoredFieldVisitor public LimitedStoredFieldVisitor(string[] fields, char[] valueSeparators, int maxLength) { - Debugging.Assert(() => fields.Length == valueSeparators.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fields.Length == valueSeparators.Length); this.fields = fields; this.valueSeparators = valueSeparators; this.maxLength = maxLength; @@ -896,7 +896,7 @@ public LimitedStoredFieldVisitor(string[] fields, char[] valueSeparators, int ma public override void StringField(Index.FieldInfo fieldInfo, string value) { - Debugging.Assert(() => currentField >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentField >= 0); StringBuilder builder = builders[currentField]; if (builder.Length > 0 && builder.Length < maxLength) { diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragListBuilder.cs b/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragListBuilder.cs index 60f133f943..7f656b8f1d 100644 --- a/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragListBuilder.cs +++ b/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragListBuilder.cs @@ -142,7 +142,7 @@ public IteratorQueue(IEnumerator iter) { this.iter = iter; T removeTop = RemoveTop(); - Debugging.Assert(() => removeTop == null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => removeTop == null); } public T Top() diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/FieldTermStack.cs b/src/Lucene.Net.Highlighter/VectorHighlight/FieldTermStack.cs index 1b5798dcf5..98185988fc 100644 --- a/src/Lucene.Net.Highlighter/VectorHighlight/FieldTermStack.cs +++ b/src/Lucene.Net.Highlighter/VectorHighlight/FieldTermStack.cs @@ -140,7 +140,7 @@ public FieldTermStack(IndexReader reader, int docId, string fieldName, FieldQuer TermInfo current = termList[i]; if (current.Position == currentPos) { - Debugging.Assert(() => previous != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => previous != null); previous.SetNext(current); previous = current; //iterator.Remove(); diff --git a/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs b/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs index 02974b5eaa..09d424a804 100644 --- a/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs +++ b/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs @@ -246,7 +246,7 @@ public override int NextDoc() } } - Debugging.Assert(() => _childDoc < _parentDoc, () => "childDoc=" + _childDoc + " parentDoc=" + _parentDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => _childDoc < _parentDoc, () => "childDoc=" + _childDoc + " parentDoc=" + _parentDoc); _childDoc++; if (_acceptDocs != null && !_acceptDocs.Get(_childDoc)) { @@ -280,7 +280,7 @@ public override float GetScore() public override int Advance(int childTarget) { - Debugging.Assert(() => childTarget >= _parentBits.Length || !_parentBits.Get(childTarget)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => childTarget >= _parentBits.Length || !_parentBits.Get(childTarget)); //System.out.println("Q.advance childTarget=" + childTarget); if (childTarget == NO_MORE_DOCS) @@ -289,14 +289,14 @@ public override int Advance(int childTarget) return _childDoc = _parentDoc = NO_MORE_DOCS; } - Debugging.Assert(() => _childDoc == -1 || childTarget != _parentDoc, () => "childTarget=" + childTarget); + if (Debugging.AssertsEnabled) Debugging.Assert(() => _childDoc == -1 || childTarget != _parentDoc, () => "childTarget=" + childTarget); if (_childDoc == -1 || childTarget > _parentDoc) { // Advance to new parent: _parentDoc = _parentScorer.Advance(childTarget); ValidateParentDoc(); //System.out.println(" advance to parentDoc=" + parentDoc); - Debugging.Assert(() => _parentDoc > childTarget); + if (Debugging.AssertsEnabled) Debugging.Assert(() => _parentDoc > childTarget); if (_parentDoc == NO_MORE_DOCS) { //System.out.println(" END"); @@ -312,7 +312,7 @@ public override int Advance(int childTarget) childTarget = Math.Max(childTarget, firstChild); } - Debugging.Assert(() => childTarget < _parentDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => childTarget < _parentDoc); // Advance within children of current parent: _childDoc = childTarget; diff --git a/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs b/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs index 9cd5bee258..71deac5f2f 100644 --- a/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs +++ b/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs @@ -284,7 +284,7 @@ private void CopyGroups(OneGroup og) og.counts[scorerIDX] = joinScorer.ChildCount; //System.out.println(" count=" + og.counts[scorerIDX]); og.docs[scorerIDX] = joinScorer.SwapChildDocs(og.docs[scorerIDX]); - Debugging.Assert(() => og.docs[scorerIDX].Length >= og.counts[scorerIDX], () => "length=" + og.docs[scorerIDX].Length + " vs count=" + og.counts[scorerIDX]); + if (Debugging.AssertsEnabled) Debugging.Assert(() => og.docs[scorerIDX].Length >= og.counts[scorerIDX], () => "length=" + og.docs[scorerIDX].Length + " vs count=" + og.counts[scorerIDX]); //System.out.println(" len=" + og.docs[scorerIDX].length); /* for(int idx=0;idx og.scores[scorerIDX].Length >= og.counts[scorerIDX], () => "length=" + og.scores[scorerIDX].Length + " vs count=" + og.counts[scorerIDX]); + if (Debugging.AssertsEnabled) Debugging.Assert(() => og.scores[scorerIDX].Length >= og.counts[scorerIDX], () => "length=" + og.scores[scorerIDX].Length + " vs count=" + og.counts[scorerIDX]); } } else diff --git a/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs b/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs index 01df4a512d..84f41fdae4 100644 --- a/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs +++ b/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs @@ -284,7 +284,7 @@ public override int NextDoc() } //System.out.println(" parentDoc=" + parentDoc); - Debugging.Assert(() => _parentDoc != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => _parentDoc != -1); //System.out.println(" nextChildDoc=" + nextChildDoc); if (_acceptDocs != null && !_acceptDocs.Get(_parentDoc)) @@ -402,7 +402,7 @@ public override int Advance(int parentTarget) _prevParentDoc = _parentBits.PrevSetBit(parentTarget - 1); //System.out.println(" rolled back to prevParentDoc=" + prevParentDoc + " vs parentDoc=" + parentDoc); - Debugging.Assert(() => _prevParentDoc >= _parentDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => _prevParentDoc >= _parentDoc); if (_prevParentDoc > _nextChildDoc) { _nextChildDoc = _childScorer.Advance(_prevParentDoc); diff --git a/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs b/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs index d1a97ee632..c6ef1a5b03 100644 --- a/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs +++ b/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs @@ -250,7 +250,7 @@ internal int BinarySearch(BytesRef b, BytesRef bytesRef, int low, int high, Byte return mid; } } - Debugging.Assert(() => comparer.Compare(bytesRef, b) != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => comparer.Compare(bytesRef, b) != 0); return -(low + 1); } @@ -285,7 +285,7 @@ public override SeekStatus SeekCeil(BytesRef text) public override void SeekExact(long ord) { - Debugging.Assert(() => ord < info.terms.Count); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ord < info.terms.Count); termUpto = (int)ord; } @@ -332,7 +332,7 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPos public override void SeekExact(BytesRef term, TermState state) { - Debugging.Assert(() => state != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != null); this.SeekExact(((OrdTermState)state).Ord); } @@ -450,8 +450,11 @@ public override int Advance(int target) public override int NextPosition() { - Debugging.Assert(() => posUpto++ < freq); - Debugging.Assert(() => !sliceReader.IsEndOfSlice, () => " stores offsets : " + startOffset); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => posUpto++ < freq); + Debugging.Assert(() => !sliceReader.IsEndOfSlice, () => " stores offsets : " + startOffset); + } if (outerInstance.outerInstance.storeOffsets) { int pos = sliceReader.ReadInt32(); diff --git a/src/Lucene.Net.Memory/MemoryIndex.cs b/src/Lucene.Net.Memory/MemoryIndex.cs index f38d804bf9..18df8c9215 100644 --- a/src/Lucene.Net.Memory/MemoryIndex.cs +++ b/src/Lucene.Net.Memory/MemoryIndex.cs @@ -217,7 +217,7 @@ internal MemoryIndex(bool storeOffsets, long maxReusedBytes) this.bytesUsed = Counter.NewCounter(); int maxBufferedByteBlocks = (int)((maxReusedBytes / 2) / ByteBlockPool.BYTE_BLOCK_SIZE); int maxBufferedIntBlocks = (int)((maxReusedBytes - (maxBufferedByteBlocks * ByteBlockPool.BYTE_BLOCK_SIZE)) / (Int32BlockPool.INT32_BLOCK_SIZE * RamUsageEstimator.NUM_BYTES_INT32)); - Debugging.Assert(() => (maxBufferedByteBlocks * ByteBlockPool.BYTE_BLOCK_SIZE) + (maxBufferedIntBlocks * Int32BlockPool.INT32_BLOCK_SIZE * RamUsageEstimator.NUM_BYTES_INT32) <= maxReusedBytes); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (maxBufferedByteBlocks * ByteBlockPool.BYTE_BLOCK_SIZE) + (maxBufferedIntBlocks * Int32BlockPool.INT32_BLOCK_SIZE * RamUsageEstimator.NUM_BYTES_INT32) <= maxReusedBytes); byteBlockPool = new ByteBlockPool(new RecyclingByteBlockAllocator(ByteBlockPool.BYTE_BLOCK_SIZE, maxBufferedByteBlocks, bytesUsed)); intBlockPool = new Int32BlockPool(new RecyclingInt32BlockAllocator(Int32BlockPool.INT32_BLOCK_SIZE, maxBufferedIntBlocks, bytesUsed)); postingsWriter = new Int32BlockPool.SliceWriter(intBlockPool); @@ -739,9 +739,12 @@ public override int[] Init() start = new int[ArrayUtil.Oversize(ord.Length, RamUsageEstimator.NUM_BYTES_INT32)]; end = new int[ArrayUtil.Oversize(ord.Length, RamUsageEstimator.NUM_BYTES_INT32)]; freq = new int[ArrayUtil.Oversize(ord.Length, RamUsageEstimator.NUM_BYTES_INT32)]; - Debugging.Assert(() => start.Length >= ord.Length); - Debugging.Assert(() => end.Length >= ord.Length); - Debugging.Assert(() => freq.Length >= ord.Length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => start.Length >= ord.Length); + Debugging.Assert(() => end.Length >= ord.Length); + Debugging.Assert(() => freq.Length >= ord.Length); + } return ord; } @@ -754,9 +757,12 @@ public override int[] Grow() end = ArrayUtil.Grow(end, ord.Length); freq = ArrayUtil.Grow(freq, ord.Length); } - Debugging.Assert(() => start.Length >= ord.Length); - Debugging.Assert(() => end.Length >= ord.Length); - Debugging.Assert(() => freq.Length >= ord.Length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => start.Length >= ord.Length); + Debugging.Assert(() => end.Length >= ord.Length); + Debugging.Assert(() => freq.Length >= ord.Length); + } return ord; } diff --git a/src/Lucene.Net.Misc/Document/LazyDocument.cs b/src/Lucene.Net.Misc/Document/LazyDocument.cs index 72baf7a1c2..bc2aa77852 100644 --- a/src/Lucene.Net.Misc/Document/LazyDocument.cs +++ b/src/Lucene.Net.Misc/Document/LazyDocument.cs @@ -120,7 +120,7 @@ private void FetchRealValues(string name, int fieldNum) fields.TryGetValue(fieldNum, out lazyValues); IIndexableField[] realValues = d.GetFields(name); - Debugging.Assert(() => realValues.Length <= lazyValues.Count, + if (Debugging.AssertsEnabled) Debugging.Assert(() => realValues.Length <= lazyValues.Count, () => "More lazy values then real values for field: " + name); for (int i = 0; i < lazyValues.Count; i++) @@ -164,8 +164,11 @@ internal virtual IIndexableField GetRealValue() { outerInstance.FetchRealValues(name, fieldNum); } - Debugging.Assert(() => HasBeenLoaded, () => "field value was not lazy loaded"); - Debugging.Assert(() => realValue.Name.Equals(Name, StringComparison.Ordinal), () => "realvalue name != name: " + realValue.Name + " != " + Name); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => HasBeenLoaded, () => "field value was not lazy loaded"); + Debugging.Assert(() => realValue.Name.Equals(Name, StringComparison.Ordinal), () => "realvalue name != name: " + realValue.Name + " != " + Name); + } return realValue; } diff --git a/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs b/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs index 1b85aa0f02..17695f7f54 100644 --- a/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs +++ b/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs @@ -297,7 +297,7 @@ public void UndeleteAll() if (m_input.HasDeletions) { IBits oldLiveDocs = m_input.LiveDocs; - Debugging.Assert(() => oldLiveDocs != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => oldLiveDocs != null); // this loop is a little bit ineffective, as Bits has no nextSetBit(): for (int i = 0; i < maxDoc; i++) { diff --git a/src/Lucene.Net.Misc/Index/PKIndexSplitter.cs b/src/Lucene.Net.Misc/Index/PKIndexSplitter.cs index 6377c8cd23..8f96af9521 100644 --- a/src/Lucene.Net.Misc/Index/PKIndexSplitter.cs +++ b/src/Lucene.Net.Misc/Index/PKIndexSplitter.cs @@ -156,7 +156,7 @@ public DocumentFilteredAtomicIndexReader(AtomicReaderContext context, Filter pre if (m_input.HasDeletions) { IBits oldLiveDocs = m_input.LiveDocs; - Debugging.Assert(() => oldLiveDocs != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => oldLiveDocs != null); DocIdSetIterator it = bits.GetIterator(); for (int i = it.NextDoc(); i < maxDoc; i = it.NextDoc()) { diff --git a/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs b/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs index b812d2a21c..cfff119466 100644 --- a/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs +++ b/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs @@ -83,8 +83,11 @@ internal static bool IsConsistent(DocMap docMap) { int newID = docMap.OldToNew(i); int oldID = docMap.NewToOld(newID); - Debugging.Assert(() => newID >= 0 && newID < maxDoc, () => "doc IDs must be in [0-" + maxDoc + "[, got " + newID); - Debugging.Assert(() => i == oldID, () => "mapping is inconsistent: " + i + " --oldToNew--> " + newID + " --newToOld--> " + oldID); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => newID >= 0 && newID < maxDoc, () => "doc IDs must be in [0-" + maxDoc + "[, got " + newID); + Debugging.Assert(() => i == oldID, () => "mapping is inconsistent: " + i + " --oldToNew--> " + newID + " --newToOld--> " + oldID); + } if (i != oldID || newID < 0 || newID >= maxDoc) { return false; diff --git a/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs b/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs index 0ff309ee81..63745337eb 100644 --- a/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs +++ b/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs @@ -761,7 +761,7 @@ internal static AtomicReader Wrap(AtomicReader reader, Sorter.DocMap docMap) { throw new ArgumentException("reader.MaxDoc should be equal to docMap.Count, got" + reader.MaxDoc + " != " + docMap.Count); } - Debugging.Assert(() => Sorter.IsConsistent(docMap)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Sorter.IsConsistent(docMap)); return new SortingAtomicReader(reader, docMap); } diff --git a/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs b/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs index 58719d32c3..70282442eb 100644 --- a/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs +++ b/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs @@ -134,7 +134,7 @@ public override MergePolicy.DocMap GetDocMap(MergeState mergeState) { return base.GetDocMap(mergeState); } - Debugging.Assert(() => mergeState.DocMaps.Length == 1); // we returned a singleton reader + if (Debugging.AssertsEnabled) Debugging.Assert(() => mergeState.DocMaps.Length == 1); // we returned a singleton reader MonotonicAppendingInt64Buffer deletes = GetDeletes(unsortedReaders); return new DocMapAnonymousInnerClassHelper(this, mergeState, deletes); } diff --git a/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs b/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs index 91dc58bffe..03da23bd4d 100644 --- a/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs +++ b/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs @@ -88,7 +88,7 @@ public override object Subtract(object @object, object inc) public override object Add(object prefix, object output) { - Debugging.Assert(() => !(prefix is IList)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !(prefix is IList)); if (!(output is IList)) { return outputs.Add((T)prefix, (T)output); @@ -107,7 +107,7 @@ public override object Add(object prefix, object output) public override void Write(object output, DataOutput @out) { - Debugging.Assert(() => !(output is IList)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !(output is IList)); outputs.Write((T)output, @out); } diff --git a/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs b/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs index d6ca39f4e0..40b4361990 100644 --- a/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs +++ b/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs @@ -70,8 +70,11 @@ public TwoInt64s(long first, long second) { this.first = first; this.second = second; - Debugging.Assert(() => first >= 0); - Debugging.Assert(() => second >= 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => first >= 0); + Debugging.Assert(() => second >= 0); + } } public override string ToString() @@ -134,8 +137,11 @@ public TwoInt64s Get(long first, long second) public override object Common(object output1, object output2) { - Debugging.Assert(() => Valid(output1, false)); - Debugging.Assert(() => Valid(output2, false)); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => Valid(output1, false)); + Debugging.Assert(() => Valid(output2, false)); + } long? output1_ = (long?)output1; long? output2_ = (long?)output2; if (output1_ == NO_OUTPUT || output2_ == NO_OUTPUT) @@ -144,8 +150,11 @@ public override object Common(object output1, object output2) } else if (doShare) { - Debugging.Assert(() => output1_ > 0); - Debugging.Assert(() => output2_ > 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => output1_ > 0); + Debugging.Assert(() => output2_ > 0); + } return Math.Min(output1_.GetValueOrDefault(), output2_.GetValueOrDefault()); } else if (output1_.Equals(output2_)) @@ -160,11 +169,14 @@ public override object Common(object output1, object output2) public override object Subtract(object output, object inc) { - Debugging.Assert(() => Valid(output, false)); - Debugging.Assert(() => Valid(inc, false)); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => Valid(output, false)); + Debugging.Assert(() => Valid(inc, false)); + } long? output2 = (long?)output; long? inc2 = (long?)inc; - Debugging.Assert(() => output2 >= inc2); + if (Debugging.AssertsEnabled) Debugging.Assert(() => output2 >= inc2); if (inc2 == NO_OUTPUT) { @@ -182,8 +194,8 @@ public override object Subtract(object output, object inc) public override object Add(object prefix, object output) { - Debugging.Assert(() => Valid(prefix, false)); - Debugging.Assert(() => Valid(output, true)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Valid(prefix, false)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Valid(output, true)); long? prefix2 = (long?)prefix; if (output is long?) { @@ -211,7 +223,7 @@ public override object Add(object prefix, object output) public override void Write(object output, DataOutput @out) { - Debugging.Assert(() => Valid(output, true)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Valid(output, true)); if (output is long?) { long? output2 = (long?)output; @@ -286,8 +298,11 @@ public override string OutputToString(object output) [MethodImpl(MethodImplOptions.NoInlining)] public override object Merge(object first, object second) { - Debugging.Assert(() => Valid(first, false)); - Debugging.Assert(() => Valid(second, false)); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => Valid(first, false)); + Debugging.Assert(() => Valid(second, false)); + } return new TwoInt64s(((long?)first).GetValueOrDefault(), ((long?)second).GetValueOrDefault()); } } diff --git a/src/Lucene.Net.Queries/BooleanFilter.cs b/src/Lucene.Net.Queries/BooleanFilter.cs index 00744e12f8..d15cee8c61 100644 --- a/src/Lucene.Net.Queries/BooleanFilter.cs +++ b/src/Lucene.Net.Queries/BooleanFilter.cs @@ -78,7 +78,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo { if (res == null) { - Debugging.Assert(() => !hasShouldClauses); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !hasShouldClauses); res = new FixedBitSet(reader.MaxDoc); res.Set(0, reader.MaxDoc); // NOTE: may set bits on deleted docs } diff --git a/src/Lucene.Net.Queries/CommonTermsQuery.cs b/src/Lucene.Net.Queries/CommonTermsQuery.cs index db42ad7072..e78ce71985 100644 --- a/src/Lucene.Net.Queries/CommonTermsQuery.cs +++ b/src/Lucene.Net.Queries/CommonTermsQuery.cs @@ -279,7 +279,7 @@ public virtual void CollectTermContext(IndexReader reader, IList termsEnum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termsEnum != null); if (termsEnum == TermsEnum.EMPTY) { diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs index ea7d8449e0..f4d4c5fd74 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs @@ -182,7 +182,7 @@ protected override IQueryNode PostProcessNode(IQueryNode node) { bool hasNext; hasNext = buffer.IncrementToken(); - Debugging.Assert(() => hasNext == true); + if (Debugging.AssertsEnabled) Debugging.Assert(() => hasNext == true); term = termAtt.ToString(); } #pragma warning disable 168 @@ -213,7 +213,7 @@ protected override IQueryNode PostProcessNode(IQueryNode node) try { bool hasNext = buffer.IncrementToken(); - Debugging.Assert(() => hasNext == true); + if (Debugging.AssertsEnabled) Debugging.Assert(() => hasNext == true); term = termAtt.ToString(); } #pragma warning disable 168 @@ -240,7 +240,7 @@ protected override IQueryNode PostProcessNode(IQueryNode node) try { bool hasNext = buffer.IncrementToken(); - Debugging.Assert(() => hasNext == true); + if (Debugging.AssertsEnabled) Debugging.Assert(() => hasNext == true); term = termAtt.ToString(); } #pragma warning disable 168 @@ -307,7 +307,7 @@ protected override IQueryNode PostProcessNode(IQueryNode node) try { bool hasNext = buffer.IncrementToken(); - Debugging.Assert(() => hasNext == true); + if (Debugging.AssertsEnabled) Debugging.Assert(() => hasNext == true); term = termAtt.ToString(); if (posIncrAtt != null) { @@ -379,7 +379,7 @@ protected override IQueryNode PostProcessNode(IQueryNode node) try { bool hasNext = buffer.IncrementToken(); - Debugging.Assert(() => hasNext == true); + if (Debugging.AssertsEnabled) Debugging.Assert(() => hasNext == true); term = termAtt.ToString(); if (posIncrAtt != null) diff --git a/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs b/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs index 1be52702c4..009ad62094 100644 --- a/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs +++ b/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs @@ -242,7 +242,7 @@ private void ParseSubQuery(State state) private void ConsumeSubQuery(State state) { - Debugging.Assert(() => (m_flags & Operator.PRECEDENCE_OPERATORS) != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (m_flags & Operator.PRECEDENCE_OPERATORS) != 0); int start = ++state.Index; int precedence = 1; bool escaped = false; @@ -315,7 +315,7 @@ private void ConsumeSubQuery(State state) private void ConsumePhrase(State state) { - Debugging.Assert(() => (m_flags & Operator.PHRASE_OPERATOR) != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (m_flags & Operator.PHRASE_OPERATOR) != 0); int start = ++state.Index; int copied = 0; bool escaped = false; diff --git a/src/Lucene.Net.Replicator/IndexAndTaxonomyRevision.cs b/src/Lucene.Net.Replicator/IndexAndTaxonomyRevision.cs index 4c9be8f07f..eddc6f0777 100644 --- a/src/Lucene.Net.Replicator/IndexAndTaxonomyRevision.cs +++ b/src/Lucene.Net.Replicator/IndexAndTaxonomyRevision.cs @@ -187,7 +187,7 @@ public virtual int CompareTo(IRevision other) /// public virtual Stream Open(string source, string fileName) { - Debugging.Assert(() => source.Equals(INDEX_SOURCE, StringComparison.Ordinal) || source.Equals(TAXONOMY_SOURCE, StringComparison.Ordinal), () => string.Format("invalid source; expected=({0} or {1}) got={2}", INDEX_SOURCE, TAXONOMY_SOURCE, source)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => source.Equals(INDEX_SOURCE, StringComparison.Ordinal) || source.Equals(TAXONOMY_SOURCE, StringComparison.Ordinal), () => string.Format("invalid source; expected=({0} or {1}) got={2}", INDEX_SOURCE, TAXONOMY_SOURCE, source)); IndexCommit commit = source.Equals(INDEX_SOURCE, StringComparison.Ordinal) ? indexCommit : taxonomyCommit; return new IndexInputStream(commit.Directory.OpenInput(fileName, IOContext.READ_ONCE)); } diff --git a/src/Lucene.Net.Replicator/IndexRevision.cs b/src/Lucene.Net.Replicator/IndexRevision.cs index e8895a756d..3a97eadb67 100644 --- a/src/Lucene.Net.Replicator/IndexRevision.cs +++ b/src/Lucene.Net.Replicator/IndexRevision.cs @@ -134,7 +134,7 @@ public virtual int CompareTo(IRevision other) public virtual Stream Open(string source, string fileName) { - Debugging.Assert(() => source.Equals(SOURCE, StringComparison.Ordinal), () => string.Format("invalid source; expected={0} got={1}", SOURCE, source)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => source.Equals(SOURCE, StringComparison.Ordinal), () => string.Format("invalid source; expected={0} got={1}", SOURCE, source)); return new IndexInputStream(commit.Directory.OpenInput(fileName, IOContext.READ_ONCE)); } diff --git a/src/Lucene.Net.Replicator/ReplicationClient.cs b/src/Lucene.Net.Replicator/ReplicationClient.cs index f958f52b4d..35251e62d4 100644 --- a/src/Lucene.Net.Replicator/ReplicationClient.cs +++ b/src/Lucene.Net.Replicator/ReplicationClient.cs @@ -370,7 +370,7 @@ protected virtual IDictionary> RequiredFiles(IDictio // make sure to preserve revisionFiles order List res = new List(); string source = e.Key; - Debugging.Assert(() => newRevisionFiles.ContainsKey(source), () => string.Format("source not found in newRevisionFiles: {0}", newRevisionFiles)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => newRevisionFiles.ContainsKey(source), () => string.Format("source not found in newRevisionFiles: {0}", newRevisionFiles)); foreach (RevisionFile file in newRevisionFiles[source]) { if (!handlerFiles.Contains(file.FileName)) @@ -416,7 +416,7 @@ public virtual void StartUpdateThread(long intervalMillis, string threadName) updateThread = new ReplicationThread(intervalMillis, threadName, DoUpdate, HandleUpdateException, updateLock); updateThread.Start(); // we rely on isAlive to return true in isUpdateThreadAlive, assert to be on the safe side - Debugging.Assert(() => updateThread.IsAlive, () => "updateThread started but not alive?"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => updateThread.IsAlive, () => "updateThread started but not alive?"); } /// diff --git a/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs b/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs index 0a9c457e67..265e06fbd9 100644 --- a/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs +++ b/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs @@ -191,7 +191,7 @@ protected override SortedDocValues GetSortedDocValues(AtomicReaderContext contex case Selector.MIDDLE_MAX: return new MiddleMaxValue(randomOrds); case Selector.MIN: default: - Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false); return null; } } diff --git a/src/Lucene.Net.Spatial/Prefix/AbstractPrefixTreeFilter.cs b/src/Lucene.Net.Spatial/Prefix/AbstractPrefixTreeFilter.cs index cfe132c606..3e5da90d4e 100644 --- a/src/Lucene.Net.Spatial/Prefix/AbstractPrefixTreeFilter.cs +++ b/src/Lucene.Net.Spatial/Prefix/AbstractPrefixTreeFilter.cs @@ -113,7 +113,7 @@ public BaseTermsEnumTraverser(AbstractPrefixTreeFilter outerInstance, AtomicRead protected virtual void CollectDocs(FixedBitSet bitSet) { //WARN: keep this specialization in sync - Debugging.Assert(() => m_termsEnum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m_termsEnum != null); m_docsEnum = m_termsEnum.Docs(m_acceptDocs, m_docsEnum, DocsFlags.NONE); int docid; while ((docid = m_docsEnum.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS) diff --git a/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs b/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs index 1eb27b7537..033eb93576 100644 --- a/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs +++ b/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs @@ -52,7 +52,7 @@ public AbstractVisitingPrefixTreeFilter(IShape queryShape, string fieldName, Spa : base(queryShape, fieldName, grid, detailLevel) { this.m_prefixGridScanLevel = Math.Max(0, Math.Min(prefixGridScanLevel, grid.MaxLevels - 1)); - Debugging.Assert(() => detailLevel <= grid.MaxLevels); + if (Debugging.AssertsEnabled) Debugging.Assert(() => detailLevel <= grid.MaxLevels); } public override bool Equals(object o) @@ -135,7 +135,7 @@ public VisitorTemplate(AbstractVisitingPrefixTreeFilter outerInstance, AtomicRea public virtual DocIdSet GetDocIdSet() { - Debugging.Assert(() => curVNode == null, () => "Called more than once?"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => curVNode == null, () => "Called more than once?"); if (m_termsEnum == null) { return null; @@ -169,7 +169,7 @@ public virtual DocIdSet GetDocIdSet() // LUCENENET IMPORTANT: Must not call this inline with Debug.Assert // because the compiler removes Debug.Assert statements in release mode!! bool hasNext = curVNode.children.MoveNext(); - Debugging.Assert(() => hasNext); + if (Debugging.AssertsEnabled) Debugging.Assert(() => hasNext); curVNode = curVNode.children.Current; } @@ -206,7 +206,7 @@ public virtual DocIdSet GetDocIdSet() if (compare > 0) { // leap frog (termsEnum is beyond where we would otherwise seek) - Debugging.Assert(() => !m_context.AtomicReader.GetTerms(m_outerInstance.m_fieldName).GetIterator(null).SeekExact(curVNodeTerm), () => "should be absent"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !m_context.AtomicReader.GetTerms(m_outerInstance.m_fieldName).GetIterator(null).SeekExact(curVNodeTerm), () => "should be absent"); } else { @@ -250,7 +250,7 @@ public virtual DocIdSet GetDocIdSet() /// private void AddIntersectingChildren() { - Debugging.Assert(() => thisTerm != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => thisTerm != null); Cell cell = curVNode.cell; if (cell.Level >= m_outerInstance.m_detailLevel) { @@ -261,7 +261,7 @@ private void AddIntersectingChildren() { //If the next indexed term just adds a leaf marker ('+') to cell, // then add all of those docs - Debugging.Assert(() => StringHelper.StartsWith(thisTerm, curVNodeTerm));//TODO refactor to use method on curVNode.cell + if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(thisTerm, curVNodeTerm));//TODO refactor to use method on curVNode.cell scanCell = m_outerInstance.m_grid.GetCell(thisTerm.Bytes, thisTerm.Offset, thisTerm.Length, scanCell); if (scanCell.Level == cell.Level && scanCell.IsLeaf) { @@ -372,7 +372,7 @@ public void Dispose() public bool MoveNext() { - //Debugging.Assert(cellIter.Current != null); + //if (Debugging.AssertsEnabled) Debugging.Assert(cellIter.Current != null); // LUCENENET NOTE: The consumer of this class calls // cellIter.MoveNext() before it is instantiated. @@ -490,9 +490,9 @@ internal VNode(VNode parent) internal virtual void Reset(Cell cell) { - Debugging.Assert(() => cell != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => cell != null); this.cell = cell; - Debugging.Assert(() => children == null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => children == null); } } diff --git a/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs b/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs index 8464ec9853..80d20390c7 100644 --- a/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs +++ b/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs @@ -102,7 +102,7 @@ internal SmallDocSet Visit(Cell cell, IBits acceptContains) if (cell.Level != 0 && ((cell.ShapeRel == SpatialRelation.NOT_SET || cell.ShapeRel == SpatialRelation.WITHIN))) { subCellsFilter = null; - Debugging.Assert(() => cell.Shape.Relate(outerInstance.m_queryShape) == SpatialRelation.WITHIN); + if (Debugging.AssertsEnabled) Debugging.Assert(() => cell.Shape.Relate(outerInstance.m_queryShape) == SpatialRelation.WITHIN); } ICollection subCells = cell.GetSubCells(subCellsFilter); foreach (Cell subCell in subCells) @@ -147,7 +147,7 @@ internal SmallDocSet Visit(Cell cell, IBits acceptContains) private bool SeekExact(Cell cell) { - Debugging.Assert(() => new BytesRef(cell.GetTokenBytes()).CompareTo(termBytes) > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => new BytesRef(cell.GetTokenBytes()).CompareTo(termBytes) > 0); this.termBytes.Bytes = cell.GetTokenBytes(); this.termBytes.Length = this.termBytes.Bytes.Length; if (m_termsEnum == null) @@ -157,7 +157,7 @@ private bool SeekExact(Cell cell) private SmallDocSet GetDocs(Cell cell, IBits acceptContains) { - Debugging.Assert(() => new BytesRef(cell.GetTokenBytes()).Equals(termBytes)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => new BytesRef(cell.GetTokenBytes()).Equals(termBytes)); return this.CollectDocs(acceptContains); } @@ -165,8 +165,11 @@ private SmallDocSet GetDocs(Cell cell, IBits acceptContains) private SmallDocSet GetLeafDocs(Cell leafCell, IBits acceptContains) { - Debugging.Assert(() => new BytesRef(leafCell.GetTokenBytes()).Equals(termBytes)); - Debugging.Assert(() => !leafCell.Equals(lastLeaf));//don't call for same leaf again + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => new BytesRef(leafCell.GetTokenBytes()).Equals(termBytes)); + Debugging.Assert(() => !leafCell.Equals(lastLeaf));//don't call for same leaf again + } lastLeaf = leafCell; if (m_termsEnum == null) @@ -298,7 +301,7 @@ public override DocIdSetIterator GetIterator() } docs[d++] = v; } - Debugging.Assert(() => d == intSet.Count); + if (Debugging.AssertsEnabled) Debugging.Assert(() => d == intSet.Count); int size = d; //sort them Array.Sort(docs, 0, size); diff --git a/src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs b/src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs index 004be33381..e4fcc52fa2 100644 --- a/src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs +++ b/src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs @@ -104,7 +104,7 @@ protected internal Cell(SpatialPrefixTree outerInstance, byte[] bytes, int off, public virtual void Reset(byte[] bytes, int off, int len) { - Debugging.Assert(() => Level != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Level != 0); token = null; m_shapeRel = SpatialRelation.NOT_SET; this.bytes = bytes; @@ -139,7 +139,7 @@ private void B_fixLeaf() /// Note: not supported at level 0. public virtual void SetLeaf() { - Debugging.Assert(() => Level != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Level != 0); m_leaf = true; } diff --git a/src/Lucene.Net.Spatial/Prefix/Tree/QuadPrefixTree.cs b/src/Lucene.Net.Spatial/Prefix/Tree/QuadPrefixTree.cs index 4d399859a7..b6b76f42ad 100644 --- a/src/Lucene.Net.Spatial/Prefix/Tree/QuadPrefixTree.cs +++ b/src/Lucene.Net.Spatial/Prefix/Tree/QuadPrefixTree.cs @@ -171,7 +171,7 @@ private void Build( IShape shape, int maxLevel) { - Debugging.Assert(() => str.Length == level); + if (Debugging.AssertsEnabled) Debugging.Assert(() => str.Length == level); double w = levelW[level] / 2; double h = levelH[level] / 2; @@ -197,7 +197,7 @@ private void CheckBattenberg( IShape shape, int maxLevel) { - Debugging.Assert(() => str.Length == level); + if (Debugging.AssertsEnabled) Debugging.Assert(() => str.Length == level); double w = levelW[level] / 2; double h = levelH[level] / 2; diff --git a/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs b/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs index 34d923d989..36a45f39d7 100644 --- a/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs +++ b/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs @@ -48,7 +48,7 @@ public abstract class SpatialPrefixTree public SpatialPrefixTree(SpatialContext ctx, int maxLevels) { - Debugging.Assert(() => maxLevels > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => maxLevels > 0); this.m_ctx = ctx; this.m_maxLevels = maxLevels; } @@ -269,7 +269,7 @@ public virtual IList GetCells(IPoint p, int detailLevel, bool inclParents) return new ReadOnlyCollection(new[] { cell }); } string endToken = cell.TokenString; - Debugging.Assert(() => endToken.Length == detailLevel); + if (Debugging.AssertsEnabled) Debugging.Assert(() => endToken.Length == detailLevel); IList cells = new List(detailLevel); for (int i = 1; i < detailLevel; i++) { diff --git a/src/Lucene.Net.Spatial/Prefix/WithinPrefixTreeFilter.cs b/src/Lucene.Net.Spatial/Prefix/WithinPrefixTreeFilter.cs index daf619c491..5b33da79dc 100644 --- a/src/Lucene.Net.Spatial/Prefix/WithinPrefixTreeFilter.cs +++ b/src/Lucene.Net.Spatial/Prefix/WithinPrefixTreeFilter.cs @@ -203,8 +203,11 @@ protected internal override bool Visit(Cell cell) protected internal override void VisitLeaf(Cell cell) { //visitRelation is declared as a field, populated by visit() so we don't recompute it - Debugging.Assert(() => m_outerInstance.m_detailLevel != cell.Level); - Debugging.Assert(() => visitRelation == cell.Shape.Relate(m_outerInstance.m_queryShape)); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => m_outerInstance.m_detailLevel != cell.Level); + Debugging.Assert(() => visitRelation == cell.Shape.Relate(m_outerInstance.m_queryShape)); + } if (AllCellsIntersectQuery(cell, visitRelation)) { CollectDocs(inside); diff --git a/src/Lucene.Net.Spatial/Vector/DistanceValueSource.cs b/src/Lucene.Net.Spatial/Vector/DistanceValueSource.cs index 35270a8fd6..391f23edfd 100644 --- a/src/Lucene.Net.Spatial/Vector/DistanceValueSource.cs +++ b/src/Lucene.Net.Spatial/Vector/DistanceValueSource.cs @@ -104,7 +104,7 @@ public override double DoubleVal(int doc) // make sure it has minX and area if (validX.Get(doc)) { - Debugging.Assert(() => validY.Get(doc)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => validY.Get(doc)); return calculator.Distance(outerInstance.from, ptX.Get(doc), ptY.Get(doc)) * outerInstance.multiplier; } return nullValue; diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs index f8d6c753ea..0be20a58f0 100644 --- a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs +++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs @@ -270,7 +270,7 @@ private void ReplaceSep(Automaton a) IList newTransitions = new List(); foreach (Transition t in state.GetTransitions()) { - Debugging.Assert(() => t.Min == t.Max); + if (Debugging.AssertsEnabled) Debugging.Assert(() => t.Min == t.Max); if (t.Min == TokenStreamToAutomaton.POS_SEP) { if (preserveSep) @@ -359,8 +359,11 @@ public int Compare(BytesRef a, BytesRef b) // Next by cost: long aCost = readerA.ReadInt32(); long bCost = readerB.ReadInt32(); - Debugging.Assert(() => DecodeWeight(aCost) >= 0); - Debugging.Assert(() => DecodeWeight(bCost) >= 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => DecodeWeight(aCost) >= 0); + Debugging.Assert(() => DecodeWeight(bCost) >= 0); + } if (aCost < bCost) { return -1; @@ -487,7 +490,7 @@ public override void Build(IInputIterator iterator) output.WriteBytes(surfaceForm.Bytes, surfaceForm.Offset, surfaceForm.Length); } - Debugging.Assert(() => output.Position == requiredLength, () => output.Position + " vs " + requiredLength); + if (Debugging.AssertsEnabled) Debugging.Assert(() => output.Position == requiredLength, () => output.Position + " vs " + requiredLength); writer.Write(buffer, 0, output.Position); } @@ -661,7 +664,7 @@ private LookupResult GetLookupResult(long? output1, BytesRef output2, CharsRef s break; } } - Debugging.Assert(() => sepIndex != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => sepIndex != -1); spare.Grow(sepIndex); int payloadLen = output2.Length - sepIndex - 1; @@ -707,7 +710,7 @@ private bool SameSurfaceForm(BytesRef key, BytesRef output2) public override IList DoLookup(string key, IEnumerable contexts, bool onlyMorePopular, int num) { - Debugging.Assert(() => num > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => num > 0); if (onlyMorePopular) { @@ -799,7 +802,7 @@ public override IList DoLookup(string key, IEnumerable c } var completions = searcher.Search(); - Debugging.Assert(() => completions.IsComplete); + if (Debugging.AssertsEnabled) Debugging.Assert(() => completions.IsComplete); // NOTE: this is rather inefficient: we enumerate // every matching "exactly the same analyzed form" @@ -842,7 +845,7 @@ public override IList DoLookup(string key, IEnumerable c } var completions2 = searcher2.Search(); - Debugging.Assert(() => completions2.IsComplete); + if (Debugging.AssertsEnabled) Debugging.Assert(() => completions2.IsComplete); foreach (Util.Fst.Util.Result.Pair> completion in completions2) { @@ -920,7 +923,7 @@ protected override bool AcceptResult(Int32sRef input, PairOutputs results.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => results.Count == 1); return false; } else @@ -964,7 +967,7 @@ internal ISet ToFiniteStrings(BytesRef surfaceForm, TokenStreamToAuto ReplaceSep(automaton); automaton = ConvertAutomaton(automaton); - Debugging.Assert(() => SpecialOperations.IsFinite(automaton)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => SpecialOperations.IsFinite(automaton)); // Get all paths from the automaton (there can be // more than one path, eg if the analyzer created a diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/BlendedInfixSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/BlendedInfixSuggester.cs index d20b4461b0..a0c70e4cb3 100644 --- a/src/Lucene.Net.Suggest/Suggest/Analyzing/BlendedInfixSuggester.cs +++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/BlendedInfixSuggester.cs @@ -145,7 +145,7 @@ protected override FieldType GetTextFieldType() { BinaryDocValues textDV = MultiDocValues.GetBinaryValues(searcher.IndexReader, TEXT_FIELD_NAME); - Debugging.Assert(() => textDV != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => textDV != null); // This will just be null if app didn't pass payloads to build(): // TODO: maybe just stored fields? they compress... diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/FSTUtil.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/FSTUtil.cs index a676d8e375..fbb56ad880 100644 --- a/src/Lucene.Net.Suggest/Suggest/Analyzing/FSTUtil.cs +++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/FSTUtil.cs @@ -69,7 +69,7 @@ public Path(State state, FST.Arc fstNode, T output, Int32sRef input) /// public static IList> IntersectPrefixPaths(Automaton a, FST fst) { - Debugging.Assert(() => a.IsDeterministic); + if (Debugging.AssertsEnabled) Debugging.Assert(() => a.IsDeterministic); IList> queue = new List>(); List> endNodes = new List>(); queue.Add(new Path(a.GetInitialState(), fst.GetFirstArc(new FST.Arc()), fst.Outputs.NoOutput, new Int32sRef())); @@ -120,8 +120,8 @@ public static IList> IntersectPrefixPaths(Automaton a, FST fst) FST.Arc nextArc = Lucene.Net.Util.Fst.Util.ReadCeilArc(min, fst, path.FstNode, scratchArc, fstReader); while (nextArc != null && nextArc.Label <= max) { - Debugging.Assert(() => nextArc.Label <= max); - Debugging.Assert(() => nextArc.Label >= min, () => nextArc.Label + " " + min); + if (Debugging.AssertsEnabled) Debugging.Assert(() => nextArc.Label <= max); + if (Debugging.AssertsEnabled) Debugging.Assert(() => nextArc.Label >= min, () => nextArc.Label + " " + min); Int32sRef newInput = new Int32sRef(currentInput.Length + 1); newInput.CopyInt32s(currentInput); newInput.Int32s[currentInput.Length] = nextArc.Label; @@ -130,7 +130,7 @@ public static IList> IntersectPrefixPaths(Automaton a, FST fst) .CopyFrom(nextArc), fst.Outputs.Add(path.Output, nextArc.Output), newInput)); int label = nextArc.Label; // used in assert nextArc = nextArc.IsLast ? null : fst.ReadNextRealArc(nextArc, fstReader); - Debugging.Assert(() => nextArc == null || label < nextArc.Label, () => "last: " + label + " next: " + (nextArc == null ? "" : nextArc.Label.ToString())); + if (Debugging.AssertsEnabled) Debugging.Assert(() => nextArc == null || label < nextArc.Label, () => "last: " + label + " next: " + (nextArc == null ? "" : nextArc.Label.ToString())); } } } diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs index c53d8c0982..2b79029781 100644 --- a/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs +++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs @@ -556,7 +556,7 @@ public virtual IList DoLookup(string key, IEnumerable co // a separate dedicated att for this? int gramCount = posLenAtt.PositionLength; - Debugging.Assert(() => gramCount <= grams); + if (Debugging.AssertsEnabled) Debugging.Assert(() => gramCount <= grams); // Safety: make sure the recalculated count "agrees": if (CountGrams(tokenBytes) != gramCount) @@ -682,7 +682,7 @@ public virtual IList DoLookup(string key, IEnumerable co { BytesRef context = new BytesRef(token.Bytes, token.Offset, i); long? output = Lucene.Net.Util.Fst.Util.Get(fst, Lucene.Net.Util.Fst.Util.ToInt32sRef(context, new Int32sRef())); - Debugging.Assert(() => output != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => output != null); contextCount = DecodeWeight(output); lastTokenFragment = new BytesRef(token.Bytes, token.Offset + i + 1, token.Length - i - 1); break; @@ -699,7 +699,7 @@ public virtual IList DoLookup(string key, IEnumerable co { finalLastToken = BytesRef.DeepCopyOf(lastTokenFragment); } - Debugging.Assert(() => finalLastToken.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => finalLastToken.Offset == 0); CharsRef spare = new CharsRef(); @@ -726,7 +726,7 @@ public virtual IList DoLookup(string key, IEnumerable co searcher.AddStartPaths(arc, prefixOutput, true, new Int32sRef()); completions = searcher.Search(); - Debugging.Assert(() => completions.IsComplete); + if (Debugging.AssertsEnabled) Debugging.Assert(() => completions.IsComplete); } catch (IOException bogus) { @@ -754,7 +754,7 @@ public virtual IList DoLookup(string key, IEnumerable co { if (token.Bytes[token.Offset + i] == separator) { - Debugging.Assert(() => token.Length - i - 1 > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => token.Length - i - 1 > 0); lastToken = new BytesRef(token.Bytes, token.Offset + i + 1, token.Length - i - 1); break; } @@ -772,7 +772,7 @@ public virtual IList DoLookup(string key, IEnumerable co // return numbers that are greater than long.MaxValue, which results in a negative long number. (long)(long.MaxValue * (decimal)backoff * ((decimal)DecodeWeight(completion.Output)) / contextCount)); results.Add(result); - Debugging.Assert(() => results.Count == seen.Count); + if (Debugging.AssertsEnabled) Debugging.Assert(() => results.Count == seen.Count); //System.out.println(" add result=" + result); nextCompletionContinue:; } @@ -874,7 +874,7 @@ private long EncodeWeight(long ngramCount) //private long decodeWeight(Pair output) { private static long DecodeWeight(long? output) { - Debugging.Assert(() => output != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => output != null); return (int)(long.MaxValue - output); // LUCENENET TODO: Perhaps a Java Lucene bug? Why cast to int when returning long? } diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/SuggestStopFilter.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/SuggestStopFilter.cs index 4455dfadac..8f909db40f 100644 --- a/src/Lucene.Net.Suggest/Suggest/Analyzing/SuggestStopFilter.cs +++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/SuggestStopFilter.cs @@ -115,7 +115,7 @@ public override bool IncrementToken() m_input.End(); endState = CaptureState(); int finalEndOffset = offsetAtt.EndOffset; - Debugging.Assert(() => finalEndOffset >= endOffset); + if (Debugging.AssertsEnabled) Debugging.Assert(() => finalEndOffset >= endOffset); if (finalEndOffset > endOffset) { // OK there was a token separator after the diff --git a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs index d9d974d31b..afdfc8d498 100644 --- a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs +++ b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs @@ -407,7 +407,7 @@ private bool Collect(IList res, int num, int bucket, BytesRef output { output.Bytes = ArrayUtil.Grow(output.Bytes); } - Debugging.Assert(() => output.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => output.Offset == 0); output.Bytes[output.Length++] = (byte) arc.Label; FST.BytesReader fstReader = automaton.GetBytesReader(); automaton.ReadFirstTargetArc(arc, arc, fstReader); diff --git a/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs b/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs index ad27c567a1..8efcd39998 100644 --- a/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs +++ b/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs @@ -140,7 +140,7 @@ public override IList DoLookup(string key, IEnumerable c { throw new ArgumentException("this suggester doesn't support contexts"); } - Debugging.Assert(() => num > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => num > 0); if (onlyMorePopular) { @@ -190,7 +190,7 @@ public override IList DoLookup(string key, IEnumerable c try { completions = Lucene.Net.Util.Fst.Util.ShortestPaths(fst, arc, prefixOutput, weightComparer, num, !exactFirst); - Debugging.Assert(() => completions.IsComplete); + if (Debugging.AssertsEnabled) Debugging.Assert(() => completions.IsComplete); } catch (IOException bogus) { @@ -213,7 +213,7 @@ public override IList DoLookup(string key, IEnumerable c private long? LookupPrefix(BytesRef scratch, FST.Arc arc) //Bogus { - Debugging.Assert(() => 0 == (long)fst.Outputs.NoOutput); + if (Debugging.AssertsEnabled) Debugging.Assert(() => 0 == (long)fst.Outputs.NoOutput); long output = 0; var bytesReader = fst.GetBytesReader(); @@ -294,7 +294,7 @@ internal WFSTInputIterator(WFSTCompletionLookup outerInstance, IInputIterator so : base(source) { this.outerInstance = outerInstance; - Debugging.Assert(() => source.HasPayloads == false); + if (Debugging.AssertsEnabled) Debugging.Assert(() => source.HasPayloads == false); } protected internal override void Encode(OfflineSorter.ByteSequencesWriter writer, ByteArrayDataOutput output, byte[] buffer, BytesRef spare, BytesRef payload, ICollection contexts, long weight) diff --git a/src/Lucene.Net.Suggest/Suggest/UnsortedInputIterator.cs b/src/Lucene.Net.Suggest/Suggest/UnsortedInputIterator.cs index b2c32d9430..4b8791f6fd 100644 --- a/src/Lucene.Net.Suggest/Suggest/UnsortedInputIterator.cs +++ b/src/Lucene.Net.Suggest/Suggest/UnsortedInputIterator.cs @@ -62,7 +62,7 @@ public override long Weight { get { - Debugging.Assert(() => currentOrd == ords[m_curPos]); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentOrd == ords[m_curPos]); return m_freqs[currentOrd]; } } @@ -83,7 +83,7 @@ public override BytesRef Payload { if (HasPayloads && m_curPos < m_payloads.Length) { - Debugging.Assert(() => currentOrd == ords[m_curPos]); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentOrd == ords[m_curPos]); return m_payloads.Get(payloadSpare, currentOrd); } return null; @@ -96,7 +96,7 @@ public override ICollection Contexts { if (HasContexts && m_curPos < m_contextSets.Count) { - Debugging.Assert(() => currentOrd == ords[m_curPos]); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentOrd == ords[m_curPos]); return m_contextSets[currentOrd]; } return null; diff --git a/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs b/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs index 67d081e0c8..f20029b576 100644 --- a/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs +++ b/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs @@ -78,7 +78,7 @@ public virtual void Add(AttributeSource.State state) public virtual AttributeSource.State NextState() { - Debugging.Assert(() => NextRead < InputTokens.Count); + if (Debugging.AssertsEnabled) Debugging.Assert(() => NextRead < InputTokens.Count); return InputTokens[NextRead++]; } } @@ -141,7 +141,7 @@ protected virtual void InsertToken() m_positions.Get(m_inputPos).Add(CaptureState()); tokenPending = false; } - Debugging.Assert(() => !insertPending); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !insertPending); insertPending = true; } @@ -184,8 +184,8 @@ protected virtual bool PeekToken() { Console.WriteLine("LTF.peekToken inputPos=" + m_inputPos + " outputPos=" + m_outputPos + " tokenPending=" + tokenPending); } - Debugging.Assert(() => !m_end); - Debugging.Assert(() => m_inputPos == -1 || m_outputPos <= m_inputPos); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !m_end); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m_inputPos == -1 || m_outputPos <= m_inputPos); if (tokenPending) { m_positions.Get(m_inputPos).Add(CaptureState()); @@ -199,7 +199,7 @@ protected virtual bool PeekToken() if (gotToken) { m_inputPos += m_posIncAtt.PositionIncrement; - Debugging.Assert(() => m_inputPos >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m_inputPos >= 0); if (DEBUG) { Console.WriteLine(" now inputPos=" + m_inputPos); @@ -216,7 +216,7 @@ protected virtual bool PeekToken() else { // Make sure our input isn't messing up offsets: - Debugging.Assert(() => startPosData.StartOffset == startOffset, () => "prev startOffset=" + startPosData.StartOffset + " vs new startOffset=" + startOffset + " inputPos=" + m_inputPos); + if (Debugging.AssertsEnabled) Debugging.Assert(() => startPosData.StartOffset == startOffset, () => "prev startOffset=" + startPosData.StartOffset + " vs new startOffset=" + startOffset + " inputPos=" + m_inputPos); } int endOffset = m_offsetAtt.EndOffset; @@ -227,7 +227,7 @@ protected virtual bool PeekToken() else { // Make sure our input isn't messing up offsets: - Debugging.Assert(() => endPosData.EndOffset == endOffset, () => "prev endOffset=" + endPosData.EndOffset + " vs new endOffset=" + endOffset + " inputPos=" + m_inputPos); + if (Debugging.AssertsEnabled) Debugging.Assert(() => endPosData.EndOffset == endOffset, () => "prev endOffset=" + endPosData.EndOffset + " vs new endOffset=" + endOffset + " inputPos=" + m_inputPos); } tokenPending = true; @@ -314,7 +314,7 @@ protected virtual bool NextToken() { Console.WriteLine(" return inserted token"); } - Debugging.Assert(InsertedTokenConsistent); + if (Debugging.AssertsEnabled) Debugging.Assert(InsertedTokenConsistent); insertPending = false; return true; } @@ -340,7 +340,7 @@ protected virtual bool NextToken() { Console.WriteLine(" return inserted token"); } - Debugging.Assert(InsertedTokenConsistent); + if (Debugging.AssertsEnabled) Debugging.Assert(InsertedTokenConsistent); insertPending = false; return true; } @@ -364,8 +364,8 @@ private bool InsertedTokenConsistent() { int posLen = m_posLenAtt.PositionLength; Position endPosData = m_positions.Get(m_outputPos + posLen); - Debugging.Assert(() => endPosData.EndOffset != -1); - Debugging.Assert(() => m_offsetAtt.EndOffset == endPosData.EndOffset, () => "offsetAtt.endOffset=" + m_offsetAtt.EndOffset + " vs expected=" + endPosData.EndOffset); + if (Debugging.AssertsEnabled) Debugging.Assert(() => endPosData.EndOffset != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m_offsetAtt.EndOffset == endPosData.EndOffset, () => "offsetAtt.endOffset=" + m_offsetAtt.EndOffset + " vs expected=" + endPosData.EndOffset); return true; } diff --git a/src/Lucene.Net.TestFramework/Analysis/MockCharFilter.cs b/src/Lucene.Net.TestFramework/Analysis/MockCharFilter.cs index 5f15220d5d..0b377d1cea 100644 --- a/src/Lucene.Net.TestFramework/Analysis/MockCharFilter.cs +++ b/src/Lucene.Net.TestFramework/Analysis/MockCharFilter.cs @@ -112,7 +112,7 @@ protected override int Correct(int currentOff) ret = currentOff; } - Debugging.Assert(() => ret >= 0, () => "currentOff=" + currentOff + ",diff=" + (ret - currentOff)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ret >= 0, () => "currentOff=" + currentOff + ",diff=" + (ret - currentOff)); return ret; } diff --git a/src/Lucene.Net.TestFramework/Analysis/MockReaderWrapper.cs b/src/Lucene.Net.TestFramework/Analysis/MockReaderWrapper.cs index 51e9e52bc1..49bbebfb4f 100644 --- a/src/Lucene.Net.TestFramework/Analysis/MockReaderWrapper.cs +++ b/src/Lucene.Net.TestFramework/Analysis/MockReaderWrapper.cs @@ -47,7 +47,7 @@ public virtual void ThrowExcAfterChar(int charUpto) { excAtChar = charUpto; // You should only call this on init!: - Debugging.Assert(() => 0 == readSoFar); + if (Debugging.AssertsEnabled) Debugging.Assert(() => 0 == readSoFar); } public virtual void ThrowExcNext() @@ -91,10 +91,10 @@ public override int Read(char[] cbuf, int off, int len) if (excAtChar != -1) { int left = excAtChar - readSoFar; - Debugging.Assert(() => left != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => left != 0); read = input.Read(cbuf, off, Math.Min(realLen, left)); //Characters are left - Debugging.Assert(() => read != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => read != 0); readSoFar += read; } else diff --git a/src/Lucene.Net.TestFramework/Analysis/MockTokenizer.cs b/src/Lucene.Net.TestFramework/Analysis/MockTokenizer.cs index f2c076aeb8..50152f6224 100644 --- a/src/Lucene.Net.TestFramework/Analysis/MockTokenizer.cs +++ b/src/Lucene.Net.TestFramework/Analysis/MockTokenizer.cs @@ -140,7 +140,7 @@ public MockTokenizer(AttributeFactory factory, TextReader input) public sealed override bool IncrementToken() { - Debugging.Assert(() => !enableChecks || (streamState == State.RESET || streamState == State.INCREMENT), () => "IncrementToken() called while in wrong state: " + streamState); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !enableChecks || (streamState == State.RESET || streamState == State.INCREMENT), () => "IncrementToken() called while in wrong state: " + streamState); ClearAttributes(); for (; ; ) { @@ -219,7 +219,7 @@ protected virtual int ReadCodePoint() } else { - Debugging.Assert(() => !char.IsLowSurrogate((char)ch), () => "unpaired low surrogate: " + ch.ToString("x")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !char.IsLowSurrogate((char)ch), () => "unpaired low surrogate: " + ch.ToString("x")); off++; if (char.IsHighSurrogate((char)ch)) { @@ -227,12 +227,12 @@ protected virtual int ReadCodePoint() if (ch2 >= 0) { off++; - Debugging.Assert(() => char.IsLowSurrogate((char)ch2), () => "unpaired high surrogate: " + ch.ToString("x") + ", followed by: " + ch2.ToString("x")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => char.IsLowSurrogate((char)ch2), () => "unpaired high surrogate: " + ch.ToString("x") + ", followed by: " + ch2.ToString("x")); return Character.ToCodePoint((char)ch, (char)ch2); } else { - Debugging.Assert(() => false, () => "stream ends with unpaired high surrogate: " + ch.ToString("x")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "stream ends with unpaired high surrogate: " + ch.ToString("x")); } } return ch; @@ -300,7 +300,7 @@ public override void Reset() state = runAutomaton.InitialState; lastOffset = off = 0; bufferedCodePoint = -1; - Debugging.Assert(() => !enableChecks || streamState != State.RESET, () => "Double Reset()"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !enableChecks || streamState != State.RESET, () => "Double Reset()"); streamState = State.RESET; } @@ -312,14 +312,14 @@ protected override void Dispose(bool disposing) // in some exceptional cases (e.g. TestIndexWriterExceptions) a test can prematurely close() // these tests should disable this check, by default we check the normal workflow. // TODO: investigate the CachingTokenFilter "double-close"... for now we ignore this - Debugging.Assert(() => !enableChecks || streamState == State.END || streamState == State.CLOSE, () => "Dispose() called in wrong state: " + streamState); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !enableChecks || streamState == State.END || streamState == State.CLOSE, () => "Dispose() called in wrong state: " + streamState); streamState = State.CLOSE; } } internal override bool SetReaderTestPoint() { - Debugging.Assert(() => !enableChecks || streamState == State.CLOSE, () => "SetReader() called in wrong state: " + streamState); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !enableChecks || streamState == State.CLOSE, () => "SetReader() called in wrong state: " + streamState); streamState = State.SETREADER; return true; } @@ -333,7 +333,7 @@ public override void End() // these tests should disable this check (in general you should consume the entire stream) try { - Debugging.Assert(() => !enableChecks || streamState == State.INCREMENT_FALSE, () => "End() called before IncrementToken() returned false!"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !enableChecks || streamState == State.INCREMENT_FALSE, () => "End() called before IncrementToken() returned false!"); } finally { diff --git a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingDocValuesFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingDocValuesFormat.cs index 6cad1fc7ca..486a980b3a 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingDocValuesFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingDocValuesFormat.cs @@ -40,15 +40,15 @@ public AssertingDocValuesFormat() public override DocValuesConsumer FieldsConsumer(SegmentWriteState state) { DocValuesConsumer consumer = @in.FieldsConsumer(state); - Debugging.Assert(() => consumer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => consumer != null); return new AssertingDocValuesConsumer(consumer, state.SegmentInfo.DocCount); } public override DocValuesProducer FieldsProducer(SegmentReadState state) { - Debugging.Assert(() => state.FieldInfos.HasDocValues); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state.FieldInfos.HasDocValues); DocValuesProducer producer = @in.FieldsProducer(state); - Debugging.Assert(() => producer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => producer != null); return new AssertingDocValuesProducer(producer, state.SegmentInfo.DocCount); } @@ -70,7 +70,7 @@ public override void AddNumericField(FieldInfo field, IEnumerable values) { count++; } - Debugging.Assert(() => count == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => count == maxDoc); CheckIterator(values.GetEnumerator(), maxDoc, true); @in.AddNumericField(field, values); } @@ -80,10 +80,10 @@ public override void AddBinaryField(FieldInfo field, IEnumerable value int count = 0; foreach (BytesRef b in values) { - Debugging.Assert(() => b == null || b.IsValid()); + if (Debugging.AssertsEnabled) Debugging.Assert(() => b == null || b.IsValid()); count++; } - Debugging.Assert(() => count == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => count == maxDoc); CheckIterator(values.GetEnumerator(), maxDoc, true); @in.AddBinaryField(field, values); } @@ -94,25 +94,25 @@ public override void AddSortedField(FieldInfo field, IEnumerable value BytesRef lastValue = null; foreach (BytesRef b in values) { - Debugging.Assert(() => b != null); - Debugging.Assert(() => b.IsValid()); + if (Debugging.AssertsEnabled) Debugging.Assert(() => b != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => b.IsValid()); if (valueCount > 0) { - Debugging.Assert(() => b.CompareTo(lastValue) > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => b.CompareTo(lastValue) > 0); } lastValue = BytesRef.DeepCopyOf(b); valueCount++; } - Debugging.Assert(() => valueCount <= maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount <= maxDoc); FixedBitSet seenOrds = new FixedBitSet(valueCount); int count = 0; foreach (long? v in docToOrd) { - Debugging.Assert(() => v != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => v != null); int ord = (int)v.Value; - Debugging.Assert(() => ord >= -1 && ord < valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ord >= -1 && ord < valueCount); if (ord >= 0) { seenOrds.Set(ord); @@ -120,8 +120,8 @@ public override void AddSortedField(FieldInfo field, IEnumerable value count++; } - Debugging.Assert(() => count == maxDoc); - Debugging.Assert(() => seenOrds.Cardinality() == valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => count == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => seenOrds.Cardinality() == valueCount); CheckIterator(values.GetEnumerator(), valueCount, false); CheckIterator(docToOrd.GetEnumerator(), maxDoc, false); @in.AddSortedField(field, values, docToOrd); @@ -133,11 +133,11 @@ public override void AddSortedSetField(FieldInfo field, IEnumerable va BytesRef lastValue = null; foreach (BytesRef b in values) { - Debugging.Assert(() => b != null); - Debugging.Assert(() => b.IsValid()); + if (Debugging.AssertsEnabled) Debugging.Assert(() => b != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => b.IsValid()); if (valueCount > 0) { - Debugging.Assert(() => b.CompareTo(lastValue) > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => b.CompareTo(lastValue) > 0); } lastValue = BytesRef.DeepCopyOf(b); valueCount++; @@ -150,9 +150,9 @@ public override void AddSortedSetField(FieldInfo field, IEnumerable va { foreach (long? v in docToOrdCount) { - Debugging.Assert(() => v != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => v != null); int count = (int)v.Value; - Debugging.Assert(() => count >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => count >= 0); docCount++; ordCount += count; @@ -161,18 +161,18 @@ public override void AddSortedSetField(FieldInfo field, IEnumerable va { ordIterator.MoveNext(); long? o = ordIterator.Current; - Debugging.Assert(() => o != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => o != null); long ord = o.Value; - Debugging.Assert(() => ord >= 0 && ord < valueCount); - Debugging.Assert(() => ord > lastOrd, () => "ord=" + ord + ",lastOrd=" + lastOrd); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ord >= 0 && ord < valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ord > lastOrd, () => "ord=" + ord + ",lastOrd=" + lastOrd); seenOrds.Set(ord); lastOrd = ord; } } - Debugging.Assert(() => ordIterator.MoveNext() == false); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ordIterator.MoveNext() == false); - Debugging.Assert(() => docCount == maxDoc); - Debugging.Assert(() => seenOrds.Cardinality() == valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docCount == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => seenOrds.Cardinality() == valueCount); CheckIterator(values.GetEnumerator(), valueCount, false); CheckIterator(docToOrdCount.GetEnumerator(), maxDoc, false); CheckIterator(ords.GetEnumerator(), ordCount, false); @@ -203,10 +203,10 @@ public override void AddNumericField(FieldInfo field, IEnumerable values) int count = 0; foreach (long? v in values) { - Debugging.Assert(() => v != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => v != null); count++; } - Debugging.Assert(() => count == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => count == maxDoc); CheckIterator(values.GetEnumerator(), maxDoc, false); @in.AddNumericField(field, values); } @@ -240,9 +240,9 @@ private static void CheckIterator(IEnumerator iterator, long expectedSize, for (long i = 0; i < expectedSize; i++) { bool hasNext = iterator.MoveNext(); - Debugging.Assert(() => hasNext); + if (Debugging.AssertsEnabled) Debugging.Assert(() => hasNext); T v = iterator.Current; - Debugging.Assert(() => allowNull || v != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => allowNull || v != null); // LUCENE.NET specific. removed call to Reset(). //try @@ -255,7 +255,7 @@ private static void CheckIterator(IEnumerator iterator, long expectedSize, // // ok //} } - Debugging.Assert(() => !iterator.MoveNext()); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !iterator.MoveNext()); /*try { //iterator.next(); @@ -285,42 +285,42 @@ internal AssertingDocValuesProducer(DocValuesProducer @in, int maxDoc) public override NumericDocValues GetNumeric(FieldInfo field) { - Debugging.Assert(() => field.DocValuesType == DocValuesType.NUMERIC || field.NormType == DocValuesType.NUMERIC); + if (Debugging.AssertsEnabled) Debugging.Assert(() => field.DocValuesType == DocValuesType.NUMERIC || field.NormType == DocValuesType.NUMERIC); NumericDocValues values = @in.GetNumeric(field); - Debugging.Assert(() => values != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => values != null); return new AssertingNumericDocValues(values, maxDoc); } public override BinaryDocValues GetBinary(FieldInfo field) { - Debugging.Assert(() => field.DocValuesType == DocValuesType.BINARY); + if (Debugging.AssertsEnabled) Debugging.Assert(() => field.DocValuesType == DocValuesType.BINARY); BinaryDocValues values = @in.GetBinary(field); - Debugging.Assert(() => values != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => values != null); return new AssertingBinaryDocValues(values, maxDoc); } public override SortedDocValues GetSorted(FieldInfo field) { - Debugging.Assert(() => field.DocValuesType == DocValuesType.SORTED); + if (Debugging.AssertsEnabled) Debugging.Assert(() => field.DocValuesType == DocValuesType.SORTED); SortedDocValues values = @in.GetSorted(field); - Debugging.Assert(() => values != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => values != null); return new AssertingSortedDocValues(values, maxDoc); } public override SortedSetDocValues GetSortedSet(FieldInfo field) { - Debugging.Assert(() => field.DocValuesType == DocValuesType.SORTED_SET); + if (Debugging.AssertsEnabled) Debugging.Assert(() => field.DocValuesType == DocValuesType.SORTED_SET); SortedSetDocValues values = @in.GetSortedSet(field); - Debugging.Assert(() => values != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => values != null); return new AssertingSortedSetDocValues(values, maxDoc); } public override IBits GetDocsWithField(FieldInfo field) { - Debugging.Assert(() => field.DocValuesType != DocValuesType.NONE); + if (Debugging.AssertsEnabled) Debugging.Assert(() => field.DocValuesType != DocValuesType.NONE); IBits bits = @in.GetDocsWithField(field); - Debugging.Assert(() => bits != null); - Debugging.Assert(() => bits.Length == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bits != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bits.Length == maxDoc); return new AssertingBits(bits); } diff --git a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingNormsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingNormsFormat.cs index 87efcbaad2..ea40b54df6 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingNormsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingNormsFormat.cs @@ -32,15 +32,15 @@ public class AssertingNormsFormat : NormsFormat public override DocValuesConsumer NormsConsumer(SegmentWriteState state) { DocValuesConsumer consumer = @in.NormsConsumer(state); - Debugging.Assert(() => consumer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => consumer != null); return new AssertingNormsConsumer(consumer, state.SegmentInfo.DocCount); } public override DocValuesProducer NormsProducer(SegmentReadState state) { - Debugging.Assert(() => state.FieldInfos.HasNorms); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state.FieldInfos.HasNorms); DocValuesProducer producer = @in.NormsProducer(state); - Debugging.Assert(() => producer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => producer != null); return new AssertingDocValuesProducer(producer, state.SegmentInfo.DocCount); } } diff --git a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingPostingsFormat.cs index cc99fd3503..69e981f8b0 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingPostingsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingPostingsFormat.cs @@ -67,7 +67,7 @@ protected override void Dispose(bool disposing) public override IEnumerator GetEnumerator() { IEnumerator iterator = @in.GetEnumerator(); - Debugging.Assert(() => iterator != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => iterator != null); return iterator; } @@ -105,7 +105,7 @@ internal AssertingFieldsConsumer(FieldsConsumer @in) public override TermsConsumer AddField(FieldInfo field) { TermsConsumer consumer = @in.AddField(field); - Debugging.Assert(() => consumer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => consumer != null); return new AssertingTermsConsumer(consumer, field); } @@ -144,28 +144,28 @@ internal AssertingTermsConsumer(TermsConsumer @in, FieldInfo fieldInfo) public override PostingsConsumer StartTerm(BytesRef text) { - Debugging.Assert(() => state == TermsConsumerState.INITIAL || state == TermsConsumerState.START && lastPostingsConsumer.docFreq == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state == TermsConsumerState.INITIAL || state == TermsConsumerState.START && lastPostingsConsumer.docFreq == 0); state = TermsConsumerState.START; - Debugging.Assert(() => lastTerm == null || @in.Comparer.Compare(text, lastTerm) > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => lastTerm == null || @in.Comparer.Compare(text, lastTerm) > 0); lastTerm = BytesRef.DeepCopyOf(text); return lastPostingsConsumer = new AssertingPostingsConsumer(@in.StartTerm(text), fieldInfo, visitedDocs); } public override void FinishTerm(BytesRef text, TermStats stats) { - Debugging.Assert(() => state == TermsConsumerState.START); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state == TermsConsumerState.START); state = TermsConsumerState.INITIAL; - Debugging.Assert(() => text.Equals(lastTerm)); - Debugging.Assert(() => stats.DocFreq > 0); // otherwise, this method should not be called. - Debugging.Assert(() => stats.DocFreq == lastPostingsConsumer.docFreq); + if (Debugging.AssertsEnabled) Debugging.Assert(() => text.Equals(lastTerm)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => stats.DocFreq > 0); // otherwise, this method should not be called. + if (Debugging.AssertsEnabled) Debugging.Assert(() => stats.DocFreq == lastPostingsConsumer.docFreq); sumDocFreq += stats.DocFreq; if (fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY) { - Debugging.Assert(() => stats.TotalTermFreq == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => stats.TotalTermFreq == -1); } else { - Debugging.Assert(() => stats.TotalTermFreq == lastPostingsConsumer.totalTermFreq); + if (Debugging.AssertsEnabled) Debugging.Assert(() => stats.TotalTermFreq == lastPostingsConsumer.totalTermFreq); sumTotalTermFreq += stats.TotalTermFreq; } @in.FinishTerm(text, stats); @@ -173,20 +173,20 @@ public override void FinishTerm(BytesRef text, TermStats stats) public override void Finish(long sumTotalTermFreq, long sumDocFreq, int docCount) { - Debugging.Assert(() => state == TermsConsumerState.INITIAL || state == TermsConsumerState.START && lastPostingsConsumer.docFreq == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state == TermsConsumerState.INITIAL || state == TermsConsumerState.START && lastPostingsConsumer.docFreq == 0); state = TermsConsumerState.FINISHED; - Debugging.Assert(() => docCount >= 0); - Debugging.Assert(() => docCount == visitedDocs.Cardinality()); - Debugging.Assert(() => sumDocFreq >= docCount); - Debugging.Assert(() => sumDocFreq == this.sumDocFreq); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docCount >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docCount == visitedDocs.Cardinality()); + if (Debugging.AssertsEnabled) Debugging.Assert(() => sumDocFreq >= docCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => sumDocFreq == this.sumDocFreq); if (fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY) { - Debugging.Assert(() => sumTotalTermFreq == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => sumTotalTermFreq == -1); } else { - Debugging.Assert(() => sumTotalTermFreq >= sumDocFreq); - Debugging.Assert(() => sumTotalTermFreq == this.sumTotalTermFreq); + if (Debugging.AssertsEnabled) Debugging.Assert(() => sumTotalTermFreq >= sumDocFreq); + if (Debugging.AssertsEnabled) Debugging.Assert(() => sumTotalTermFreq == this.sumTotalTermFreq); } @in.Finish(sumTotalTermFreq, sumDocFreq, docCount); } @@ -222,17 +222,17 @@ internal AssertingPostingsConsumer(PostingsConsumer @in, FieldInfo fieldInfo, Op public override void StartDoc(int docID, int freq) { - Debugging.Assert(() => state == PostingsConsumerState.INITIAL); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state == PostingsConsumerState.INITIAL); state = PostingsConsumerState.START; - Debugging.Assert(() => docID >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docID >= 0); if (fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY) { - Debugging.Assert(() => freq == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => freq == -1); this.freq = 0; // we don't expect any positions here } else { - Debugging.Assert(() => freq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => freq > 0); this.freq = freq; totalTermFreq += freq; } @@ -246,41 +246,41 @@ public override void StartDoc(int docID, int freq) public override void AddPosition(int position, BytesRef payload, int startOffset, int endOffset) { - Debugging.Assert(() => state == PostingsConsumerState.START); - Debugging.Assert(() => positionCount < freq); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state == PostingsConsumerState.START); + if (Debugging.AssertsEnabled) Debugging.Assert(() => positionCount < freq); positionCount++; - Debugging.Assert(() => position >= lastPosition || position == -1); // we still allow -1 from old 3.x indexes + if (Debugging.AssertsEnabled) Debugging.Assert(() => position >= lastPosition || position == -1); // we still allow -1 from old 3.x indexes lastPosition = position; if (fieldInfo.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) { - Debugging.Assert(() => startOffset >= 0); - Debugging.Assert(() => startOffset >= lastStartOffset); + if (Debugging.AssertsEnabled) Debugging.Assert(() => startOffset >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => startOffset >= lastStartOffset); lastStartOffset = startOffset; - Debugging.Assert(() => endOffset >= startOffset); + if (Debugging.AssertsEnabled) Debugging.Assert(() => endOffset >= startOffset); } else { - Debugging.Assert(() => startOffset == -1); - Debugging.Assert(() => endOffset == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => startOffset == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => endOffset == -1); } if (payload != null) { - Debugging.Assert(() => fieldInfo.HasPayloads); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldInfo.HasPayloads); } @in.AddPosition(position, payload, startOffset, endOffset); } public override void FinishDoc() { - Debugging.Assert(() => state == PostingsConsumerState.START); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state == PostingsConsumerState.START); state = PostingsConsumerState.INITIAL; if (fieldInfo.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) { - Debugging.Assert(() => positionCount == 0); // we should not have fed any positions! + if (Debugging.AssertsEnabled) Debugging.Assert(() => positionCount == 0); // we should not have fed any positions! } else { - Debugging.Assert(() => positionCount == freq); + if (Debugging.AssertsEnabled) Debugging.Assert(() => positionCount == freq); } @in.FinishDoc(); } diff --git a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingStoredFieldsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingStoredFieldsFormat.cs index ca5761756f..d4d9d763b2 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingStoredFieldsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingStoredFieldsFormat.cs @@ -58,7 +58,7 @@ protected override void Dispose(bool disposing) public override void VisitDocument(int n, StoredFieldVisitor visitor) { - Debugging.Assert(() => n >= 0 && n < maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => n >= 0 && n < maxDoc); @in.VisitDocument(n, visitor); } @@ -100,9 +100,9 @@ internal AssertingStoredFieldsWriter(StoredFieldsWriter @in) public override void StartDocument(int numStoredFields) { - Debugging.Assert(() => docStatus != Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docStatus != Status.STARTED); @in.StartDocument(numStoredFields); - Debugging.Assert(() => fieldCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldCount == 0); fieldCount = numStoredFields; numWritten++; docStatus = Status.STARTED; @@ -110,17 +110,17 @@ public override void StartDocument(int numStoredFields) public override void FinishDocument() { - Debugging.Assert(() => docStatus == Status.STARTED); - Debugging.Assert(() => fieldCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldCount == 0); @in.FinishDocument(); docStatus = Status.FINISHED; } public override void WriteField(FieldInfo info, IIndexableField field) { - Debugging.Assert(() => docStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docStatus == Status.STARTED); @in.WriteField(info, field); - Debugging.Assert(() => fieldCount > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldCount > 0); fieldCount--; } @@ -131,10 +131,10 @@ public override void Abort() public override void Finish(FieldInfos fis, int numDocs) { - Debugging.Assert(() => docStatus == (numDocs > 0 ? Status.FINISHED : Status.UNDEFINED)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docStatus == (numDocs > 0 ? Status.FINISHED : Status.UNDEFINED)); @in.Finish(fis, numDocs); - Debugging.Assert(() => fieldCount == 0); - Debugging.Assert(() => numDocs == numWritten); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocs == numWritten); } protected override void Dispose(bool disposing) diff --git a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingTermVectorsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingTermVectorsFormat.cs index 10a94d5dd1..b7ce2300f0 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingTermVectorsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingTermVectorsFormat.cs @@ -103,8 +103,8 @@ internal AssertingTermVectorsWriter(TermVectorsWriter @in) public override void StartDocument(int numVectorFields) { - Debugging.Assert(() => fieldCount == 0); - Debugging.Assert(() => docStatus != Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docStatus != Status.STARTED); @in.StartDocument(numVectorFields); docStatus = Status.STARTED; fieldCount = numVectorFields; @@ -113,17 +113,17 @@ public override void StartDocument(int numVectorFields) public override void FinishDocument() { - Debugging.Assert(() => fieldCount == 0); - Debugging.Assert(() => docStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docStatus == Status.STARTED); @in.FinishDocument(); docStatus = Status.FINISHED; } public override void StartField(FieldInfo info, int numTerms, bool positions, bool offsets, bool payloads) { - Debugging.Assert(() => termCount == 0); - Debugging.Assert(() => docStatus == Status.STARTED); - Debugging.Assert(() => fieldStatus != Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldStatus != Status.STARTED); @in.StartField(info, numTerms, positions, offsets, payloads); fieldStatus = Status.STARTED; termCount = numTerms; @@ -132,8 +132,8 @@ public override void StartField(FieldInfo info, int numTerms, bool positions, bo public override void FinishField() { - Debugging.Assert(() => termCount == 0); - Debugging.Assert(() => fieldStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldStatus == Status.STARTED); @in.FinishField(); fieldStatus = Status.FINISHED; --fieldCount; @@ -141,9 +141,9 @@ public override void FinishField() public override void StartTerm(BytesRef term, int freq) { - Debugging.Assert(() => docStatus == Status.STARTED); - Debugging.Assert(() => fieldStatus == Status.STARTED); - Debugging.Assert(() => termStatus != Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termStatus != Status.STARTED); @in.StartTerm(term, freq); termStatus = Status.STARTED; positionCount = hasPositions ? freq : 0; @@ -151,10 +151,10 @@ public override void StartTerm(BytesRef term, int freq) public override void FinishTerm() { - Debugging.Assert(() => positionCount == 0); - Debugging.Assert(() => docStatus == Status.STARTED); - Debugging.Assert(() => fieldStatus == Status.STARTED); - Debugging.Assert(() => termStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(() => positionCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termStatus == Status.STARTED); @in.FinishTerm(); termStatus = Status.FINISHED; --termCount; @@ -162,9 +162,9 @@ public override void FinishTerm() public override void AddPosition(int position, int startOffset, int endOffset, BytesRef payload) { - Debugging.Assert(() => docStatus == Status.STARTED); - Debugging.Assert(() => fieldStatus == Status.STARTED); - Debugging.Assert(() => termStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termStatus == Status.STARTED); @in.AddPosition(position, startOffset, endOffset, payload); --positionCount; } @@ -176,10 +176,10 @@ public override void Abort() public override void Finish(FieldInfos fis, int numDocs) { - Debugging.Assert(() => docCount == numDocs); - Debugging.Assert(() => docStatus == (numDocs > 0 ? Status.FINISHED : Status.UNDEFINED)); - Debugging.Assert(() => fieldStatus != Status.STARTED); - Debugging.Assert(() => termStatus != Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docCount == numDocs); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docStatus == (numDocs > 0 ? Status.FINISHED : Status.UNDEFINED)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldStatus != Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termStatus != Status.STARTED); @in.Finish(fis, numDocs); } diff --git a/src/Lucene.Net.TestFramework/Codecs/Compressing/Dummy/DummyCompressingCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Compressing/Dummy/DummyCompressingCodec.cs index dacda7f8f6..ae218dd460 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Compressing/Dummy/DummyCompressingCodec.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Compressing/Dummy/DummyCompressingCodec.cs @@ -57,7 +57,7 @@ private class DecompressorAnonymousInnerClassHelper : Decompressor { public override void Decompress(DataInput @in, int originalLength, int offset, int length, BytesRef bytes) { - Debugging.Assert(() => offset + length <= originalLength); + if (Debugging.AssertsEnabled) Debugging.Assert(() => offset + length <= originalLength); if (bytes.Bytes.Length < originalLength) { bytes.Bytes = new byte[ArrayUtil.Oversize(originalLength, 1)]; diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosWriter.cs index d46a3adfc1..8a461be015 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosWriter.cs @@ -79,7 +79,7 @@ public override void Write(Directory directory, string segmentName, string segme if (fi.IsIndexed) { bits |= IS_INDEXED; - Debugging.Assert(() => fi.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !fi.HasPayloads); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fi.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !fi.HasPayloads); if (fi.IndexOptions == IndexOptions.DOCS_ONLY) { bits |= OMIT_TERM_FREQ_AND_POSITIONS; @@ -103,7 +103,7 @@ public override void Write(Directory directory, string segmentName, string segme // only in RW case output.WriteByte((byte)(sbyte)(fi.NormType == Index.DocValuesType.NONE ? 0 : 1)); } - Debugging.Assert(() => fi.Attributes == null); // not used or supported + if (Debugging.AssertsEnabled) Debugging.Assert(() => fi.Attributes == null); // not used or supported } success = true; } diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldsWriter.cs index 59641729d4..a849aeb7fc 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldsWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldsWriter.cs @@ -81,7 +81,7 @@ public PreFlexRWFieldsWriter(SegmentWriteState state) public override TermsConsumer AddField(FieldInfo field) { - Debugging.Assert(() => field.Number != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => field.Number != -1); if (field.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0) { throw new NotSupportedException("this codec cannot index offsets"); @@ -164,7 +164,7 @@ public override void StartDoc(int docID, int termDocFreq) lastDocID = docID; - Debugging.Assert(() => docID < outerInstance.outerInstance.totalNumDocs, () => "docID=" + docID + " totalNumDocs=" + outerInstance.outerInstance.totalNumDocs); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docID < outerInstance.outerInstance.totalNumDocs, () => "docID=" + docID + " totalNumDocs=" + outerInstance.outerInstance.totalNumDocs); if (outerInstance.omitTF) { @@ -188,9 +188,9 @@ public override void StartDoc(int docID, int termDocFreq) public override void AddPosition(int position, BytesRef payload, int startOffset, int endOffset) { - Debugging.Assert(() => outerInstance.outerInstance.proxOut != null); - Debugging.Assert(() => startOffset == -1); - Debugging.Assert(() => endOffset == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.outerInstance.proxOut != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => startOffset == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => endOffset == -1); //System.out.println(" w pos=" + position + " payl=" + payload); int delta = position - lastPosition; lastPosition = position; diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs index 6831153de3..9cd2cdeec7 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs @@ -75,7 +75,7 @@ public PreFlexRWNormsConsumer(Directory directory, string segment, IOContext con public override void AddNumericField(FieldInfo field, IEnumerable values) { - Debugging.Assert(() => field.Number > lastFieldNumber, () => "writing norms fields out of order" + lastFieldNumber + " -> " + field.Number); + if (Debugging.AssertsEnabled) Debugging.Assert(() => field.Number > lastFieldNumber, () => "writing norms fields out of order" + lastFieldNumber + " -> " + field.Number); foreach (var n in values) { if (((sbyte)(byte)(long)n) < sbyte.MinValue || ((sbyte)(byte)(long)n) > sbyte.MaxValue) diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs index e7bd7d905b..807b279967 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs @@ -36,7 +36,7 @@ internal sealed class PreFlexRWStoredFieldsWriter : StoredFieldsWriter public PreFlexRWStoredFieldsWriter(Directory directory, string segment, IOContext context) { - Debugging.Assert(() => directory != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => directory != null); this.directory = directory; this.segment = segment; @@ -188,7 +188,7 @@ public override void WriteField(FieldInfo info, IIndexableField field) fieldsStream.WriteInt64(J2N.BitConversion.DoubleToInt64Bits(field.GetDoubleValue().Value)); break; default: - Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false); break; } } diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsWriter.cs index 1f8fb40a63..1c42cd5f8f 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsWriter.cs @@ -75,7 +75,7 @@ public override void StartDocument(int numVectorFields) public override void StartField(FieldInfo info, int numTerms, bool positions, bool offsets, bool payloads) { - Debugging.Assert(() => lastFieldName == null || info.Name.CompareToOrdinal(lastFieldName) > 0, () => "fieldName=" + info.Name + " lastFieldName=" + lastFieldName); + if (Debugging.AssertsEnabled) Debugging.Assert(() => lastFieldName == null || info.Name.CompareToOrdinal(lastFieldName) > 0, () => "fieldName=" + info.Name + " lastFieldName=" + lastFieldName); lastFieldName = info.Name; if (payloads) { @@ -98,7 +98,7 @@ public override void StartField(FieldInfo info, int numTerms, bool positions, bo } tvf.WriteByte((byte)bits); - Debugging.Assert(() => fieldCount <= numVectorFields); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldCount <= numVectorFields); if (fieldCount == numVectorFields) { // last field of the document @@ -148,7 +148,7 @@ public override void StartTerm(BytesRef term, int freq) public override void AddPosition(int position, int startOffset, int endOffset, BytesRef payload) { - Debugging.Assert(() => payload == null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => payload == null); if (positions && offsets) { // write position delta diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/TermInfosWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/TermInfosWriter.cs index 3030f4ce33..61d3b9e277 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/TermInfosWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/TermInfosWriter.cs @@ -135,7 +135,7 @@ private void Initialize(Directory directory, string segment, FieldInfos fis, int output.WriteInt32(indexInterval); // write indexInterval output.WriteInt32(skipInterval); // write skipInterval output.WriteInt32(maxSkipLevels); // write maxSkipLevels - Debugging.Assert(InitUTF16Results); + if (Debugging.AssertsEnabled) Debugging.Assert(InitUTF16Results); success = true; } finally @@ -202,10 +202,10 @@ private int CompareToLastTerm(int fieldNumber, BytesRef term) } scratchBytes.CopyBytes(term); - Debugging.Assert(() => lastTerm.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => lastTerm.Offset == 0); UnicodeUtil.UTF8toUTF16(lastTerm.Bytes, 0, lastTerm.Length, utf16Result1); - Debugging.Assert(() => scratchBytes.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => scratchBytes.Offset == 0); UnicodeUtil.UTF8toUTF16(scratchBytes.Bytes, 0, scratchBytes.Length, utf16Result2); int len; @@ -243,10 +243,10 @@ private int CompareToLastTerm(int fieldNumber, BytesRef term) /// public void Add(int fieldNumber, BytesRef term, TermInfo ti) { - Debugging.Assert(() => CompareToLastTerm(fieldNumber, term) < 0 || (isIndex && term.Length == 0 && lastTerm.Length == 0), () => "Terms are out of order: field=" + FieldName(fieldInfos, fieldNumber) + " (number " + fieldNumber + ")" + " lastField=" + FieldName(fieldInfos, lastFieldNumber) + " (number " + lastFieldNumber + ")" + " text=" + term.Utf8ToString() + " lastText=" + lastTerm.Utf8ToString()); + if (Debugging.AssertsEnabled) Debugging.Assert(() => CompareToLastTerm(fieldNumber, term) < 0 || (isIndex && term.Length == 0 && lastTerm.Length == 0), () => "Terms are out of order: field=" + FieldName(fieldInfos, fieldNumber) + " (number " + fieldNumber + ")" + " lastField=" + FieldName(fieldInfos, lastFieldNumber) + " (number " + lastFieldNumber + ")" + " text=" + term.Utf8ToString() + " lastText=" + lastTerm.Utf8ToString()); - Debugging.Assert(() => ti.FreqPointer >= lastTi.FreqPointer, () => "freqPointer out of order (" + ti.FreqPointer + " < " + lastTi.FreqPointer + ")"); - Debugging.Assert(() => ti.ProxPointer >= lastTi.ProxPointer, () => "proxPointer out of order (" + ti.ProxPointer + " < " + lastTi.ProxPointer + ")"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ti.FreqPointer >= lastTi.FreqPointer, () => "freqPointer out of order (" + ti.FreqPointer + " < " + lastTi.FreqPointer + ")"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ti.ProxPointer >= lastTi.ProxPointer, () => "proxPointer out of order (" + ti.ProxPointer + " < " + lastTi.ProxPointer + ")"); if (!isIndex && size % indexInterval == 0) { diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40DocValuesWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40DocValuesWriter.cs index cd0094980e..62d6dc1714 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40DocValuesWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40DocValuesWriter.cs @@ -333,7 +333,7 @@ private void AddVarStraightBytesField(FieldInfo field, IndexOutput data, IndexOu index.WriteVInt64(maxAddress); int maxDoc = state.SegmentInfo.DocCount; - Debugging.Assert(() => maxDoc != int.MaxValue); // unsupported by the 4.0 impl + if (Debugging.AssertsEnabled) Debugging.Assert(() => maxDoc != int.MaxValue); // unsupported by the 4.0 impl PackedInt32s.Writer w = PackedInt32s.GetWriter(index, maxDoc + 1, PackedInt32s.BitsRequired(maxAddress), PackedInt32s.DEFAULT); long currentPosition = 0; @@ -346,7 +346,7 @@ private void AddVarStraightBytesField(FieldInfo field, IndexOutput data, IndexOu } } // write sentinel - Debugging.Assert(() => currentPosition == maxAddress); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentPosition == maxAddress); w.Add(currentPosition); w.Finish(); } @@ -375,7 +375,7 @@ private void AddFixedDerefBytesField(FieldInfo field, IndexOutput data, IndexOut /* ordinals */ int valueCount = dictionary.Count; - Debugging.Assert(() => valueCount > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount > 0); index.WriteInt32(valueCount); int maxDoc = state.SegmentInfo.DocCount; PackedInt32s.Writer w = PackedInt32s.GetWriter(index, maxDoc, PackedInt32s.BitsRequired(valueCount - 1), PackedInt32s.DEFAULT); @@ -439,7 +439,7 @@ private void AddVarDerefBytesField(FieldInfo field, IndexOutput data, IndexOutpu // the little vint encoding used for var-deref private static void WriteVInt16(IndexOutput o, int i) { - Debugging.Assert(() => i >= 0 && i <= short.MaxValue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => i >= 0 && i <= short.MaxValue); if (i < 128) { o.WriteByte((byte)(sbyte)i); @@ -545,7 +545,7 @@ private void AddFixedSortedBytesField(FieldInfo field, IndexOutput data, IndexOu index.WriteInt32(valueCount); int maxDoc = state.SegmentInfo.DocCount; - Debugging.Assert(() => valueCount > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount > 0); PackedInt32s.Writer w = PackedInt32s.GetWriter(index, maxDoc, PackedInt32s.BitsRequired(valueCount - 1), PackedInt32s.DEFAULT); foreach (long n in docToOrd) { @@ -578,7 +578,7 @@ private void AddVarSortedBytesField(FieldInfo field, IndexOutput data, IndexOutp long maxAddress = data.GetFilePointer() - startPos; index.WriteInt64(maxAddress); - Debugging.Assert(() => valueCount != int.MaxValue); // unsupported by the 4.0 impl + if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount != int.MaxValue); // unsupported by the 4.0 impl PackedInt32s.Writer w = PackedInt32s.GetWriter(index, valueCount + 1, PackedInt32s.BitsRequired(maxAddress), PackedInt32s.DEFAULT); long currentPosition = 0; @@ -588,14 +588,14 @@ private void AddVarSortedBytesField(FieldInfo field, IndexOutput data, IndexOutp currentPosition += v.Length; } // write sentinel - Debugging.Assert(() => currentPosition == maxAddress); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentPosition == maxAddress); w.Add(currentPosition); w.Finish(); /* ordinals */ int maxDoc = state.SegmentInfo.DocCount; - Debugging.Assert(() => valueCount > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount > 0); PackedInt32s.Writer ords = PackedInt32s.GetWriter(index, maxDoc, PackedInt32s.BitsRequired(valueCount - 1), PackedInt32s.DEFAULT); foreach (long n in docToOrd) { diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40FieldInfosWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40FieldInfosWriter.cs index a11809e193..d4eb855cbd 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40FieldInfosWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40FieldInfosWriter.cs @@ -66,7 +66,7 @@ public override void Write(Directory directory, string segmentName, string segme if (fi.IsIndexed) { bits |= Lucene40FieldInfosFormat.IS_INDEXED; - Debugging.Assert(() => indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); + if (Debugging.AssertsEnabled) Debugging.Assert(() => indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); if (indexOptions == IndexOptions.DOCS_ONLY) { bits |= Lucene40FieldInfosFormat.OMIT_TERM_FREQ_AND_POSITIONS; @@ -87,7 +87,7 @@ public override void Write(Directory directory, string segmentName, string segme // pack the DV types in one byte byte dv = DocValuesByte(fi.DocValuesType, fi.GetAttribute(Lucene40FieldInfosReader.LEGACY_DV_TYPE_KEY)); byte nrm = DocValuesByte(fi.NormType, fi.GetAttribute(Lucene40FieldInfosReader.LEGACY_NORM_TYPE_KEY)); - Debugging.Assert(() => (dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0); var val = (byte)(0xff & ((nrm << 4) | (byte)dv)); output.WriteByte(val); output.WriteStringStringMap(fi.Attributes); @@ -113,12 +113,12 @@ public virtual byte DocValuesByte(DocValuesType type, string legacyTypeAtt) { if (type == DocValuesType.NONE) { - Debugging.Assert(() => legacyTypeAtt == null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => legacyTypeAtt == null); return 0; } else { - Debugging.Assert(() => legacyTypeAtt != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => legacyTypeAtt != null); //return (sbyte)LegacyDocValuesType.ordinalLookup[legacyTypeAtt]; return (byte)legacyTypeAtt.ToLegacyDocValuesType(); } diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40PostingsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40PostingsWriter.cs index 0cdfac0eb3..d5b8e24f84 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40PostingsWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40PostingsWriter.cs @@ -208,7 +208,7 @@ public override void StartDoc(int docID, int termDocFreq) skipListWriter.BufferSkip(df); } - Debugging.Assert(() => docID < totalNumDocs, () => "docID=" + docID + " totalNumDocs=" + totalNumDocs); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docID < totalNumDocs, () => "docID=" + docID + " totalNumDocs=" + totalNumDocs); lastDocID = docID; if (indexOptions == IndexOptions.DOCS_ONLY) @@ -234,12 +234,12 @@ public override void StartDoc(int docID, int termDocFreq) public override void AddPosition(int position, BytesRef payload, int startOffset, int endOffset) { //if (DEBUG) System.out.println("SPW: addPos pos=" + position + " payload=" + (payload == null ? "null" : (payload.Length + " bytes")) + " proxFP=" + proxOut.getFilePointer()); - Debugging.Assert(() => indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0, () => "invalid indexOptions: " + indexOptions); - Debugging.Assert(() => proxOut != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0, () => "invalid indexOptions: " + indexOptions); + if (Debugging.AssertsEnabled) Debugging.Assert(() => proxOut != null); int delta = position - lastPosition; - Debugging.Assert(() => delta >= 0, () => "position=" + position + " lastPosition=" + lastPosition); // not quite right (if pos=0 is repeated twice we don't catch it) + if (Debugging.AssertsEnabled) Debugging.Assert(() => delta >= 0, () => "position=" + position + " lastPosition=" + lastPosition); // not quite right (if pos=0 is repeated twice we don't catch it) lastPosition = position; @@ -271,7 +271,7 @@ public override void AddPosition(int position, BytesRef payload, int startOffset // and the numbers aren't that much smaller anyways. int offsetDelta = startOffset - lastOffset; int offsetLength = endOffset - startOffset; - Debugging.Assert(() => offsetDelta >= 0 && offsetLength >= 0, () => "startOffset=" + startOffset + ",lastOffset=" + lastOffset + ",endOffset=" + endOffset); + if (Debugging.AssertsEnabled) Debugging.Assert(() => offsetDelta >= 0 && offsetLength >= 0, () => "startOffset=" + startOffset + ",lastOffset=" + lastOffset + ",endOffset=" + endOffset); if (offsetLength != lastOffsetLength) { proxOut.WriteVInt32(offsetDelta << 1 | 1); @@ -308,11 +308,11 @@ public override void FinishTerm(BlockTermState state) { StandardTermState state_ = (StandardTermState)state; // if (DEBUG) System.out.println("SPW: finishTerm seg=" + segment + " freqStart=" + freqStart); - Debugging.Assert(() => state_.DocFreq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state_.DocFreq > 0); // TODO: wasteful we are counting this (counting # docs // for this term) in two places? - Debugging.Assert(() => state_.DocFreq == df); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state_.DocFreq == df); state_.FreqStart = freqStart; state_.ProxStart = proxStart; if (df >= skipMinimum) @@ -337,7 +337,7 @@ public override void EncodeTerm(long[] empty, DataOutput @out, FieldInfo fieldIn @out.WriteVInt64(state_.FreqStart - lastState.FreqStart); if (state_.SkipOffset != -1) { - Debugging.Assert(() => state_.SkipOffset > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state_.SkipOffset > 0); @out.WriteVInt64(state_.SkipOffset); } if (indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0) diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40SkipListWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40SkipListWriter.cs index f8b50ee4ae..d6ffba97aa 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40SkipListWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40SkipListWriter.cs @@ -67,8 +67,8 @@ public Lucene40SkipListWriter(int skipInterval, int numberOfSkipLevels, int docC /// public virtual void SetSkipData(int doc, bool storePayloads, int payloadLength, bool storeOffsets, int offsetLength) { - Debugging.Assert(() => storePayloads || payloadLength == -1); - Debugging.Assert(() => storeOffsets || offsetLength == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => storePayloads || payloadLength == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => storeOffsets || offsetLength == -1); this.curDoc = doc; this.curStorePayloads = storePayloads; this.curPayloadLength = payloadLength; @@ -120,8 +120,8 @@ protected override void WriteSkipData(int level, IndexOutput skipBuffer) if (curStorePayloads || curStoreOffsets) { - Debugging.Assert(() => curStorePayloads || curPayloadLength == lastSkipPayloadLength[level]); - Debugging.Assert(() => curStoreOffsets || curOffsetLength == lastSkipOffsetLength[level]); + if (Debugging.AssertsEnabled) Debugging.Assert(() => curStorePayloads || curPayloadLength == lastSkipPayloadLength[level]); + if (Debugging.AssertsEnabled) Debugging.Assert(() => curStoreOffsets || curOffsetLength == lastSkipOffsetLength[level]); if (curPayloadLength == lastSkipPayloadLength[level] && curOffsetLength == lastSkipOffsetLength[level]) { diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42DocValuesConsumer.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42DocValuesConsumer.cs index 9c87bf37a9..6c5c200472 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42DocValuesConsumer.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42DocValuesConsumer.cs @@ -125,7 +125,7 @@ internal virtual void AddNumericField(FieldInfo field, IEnumerable values ++count; } - Debugging.Assert(() => count == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => count == maxDoc); } if (uniqueValues != null) diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42FieldInfosWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42FieldInfosWriter.cs index 9889f63d78..11287be2d2 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42FieldInfosWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42FieldInfosWriter.cs @@ -66,7 +66,7 @@ public override void Write(Directory directory, string segmentName, string segme if (fi.IsIndexed) { bits |= Lucene42FieldInfosFormat.IS_INDEXED; - Debugging.Assert(() => indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); + if (Debugging.AssertsEnabled) Debugging.Assert(() => indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); if (indexOptions == IndexOptions.DOCS_ONLY) { bits |= Lucene42FieldInfosFormat.OMIT_TERM_FREQ_AND_POSITIONS; @@ -87,7 +87,7 @@ public override void Write(Directory directory, string segmentName, string segme // pack the DV types in one byte var dv = DocValuesByte(fi.DocValuesType); var nrm = DocValuesByte(fi.NormType); - Debugging.Assert(() => (dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0); var val = (byte)(0xff & ((nrm << 4) | (byte)dv)); output.WriteByte(val); output.WriteStringStringMap(fi.Attributes); diff --git a/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockVariableIntBlockPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockVariableIntBlockPostingsFormat.cs index d8e4ed8af6..87344892c6 100644 --- a/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockVariableIntBlockPostingsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockVariableIntBlockPostingsFormat.cs @@ -107,7 +107,7 @@ public virtual int ReadBlock() { buffer[0] = input.ReadVInt32(); int count = buffer[0] <= 3 ? baseBlockSize - 1 : 2 * baseBlockSize - 1; - Debugging.Assert(() => buffer.Length >= count, () => "buffer.length=" + buffer.Length + " count=" + count); + if (Debugging.AssertsEnabled) Debugging.Assert(() => buffer.Length >= count, () => "buffer.length=" + buffer.Length + " count=" + count); for (int i = 0; i < count; i++) { buffer[i + 1] = input.ReadVInt32(); diff --git a/src/Lucene.Net.TestFramework/Codecs/MockRandom/MockRandomPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/MockRandom/MockRandomPostingsFormat.cs index 75925c1334..8481e0dd44 100644 --- a/src/Lucene.Net.TestFramework/Codecs/MockRandom/MockRandomPostingsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/MockRandom/MockRandomPostingsFormat.cs @@ -88,7 +88,7 @@ public MockInt32StreamFactory(Random random) private static string GetExtension(string fileName) { int idx = fileName.IndexOf('.'); - Debugging.Assert(() => idx != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => idx != -1); return fileName.Substring(idx); } diff --git a/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs index d4565c2d1c..ba4029ee48 100644 --- a/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs @@ -290,8 +290,8 @@ public override IComparer Comparer public override void FinishTerm(BytesRef text, TermStats stats) { - Debugging.Assert(() => stats.DocFreq > 0); - Debugging.Assert(() => stats.DocFreq == current.docs.Count); + if (Debugging.AssertsEnabled) Debugging.Assert(() => stats.DocFreq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => stats.DocFreq == current.docs.Count); current.totalTermFreq = stats.TotalTermFreq; field.termToDocs[current.term] = current; } @@ -324,8 +324,8 @@ public override void StartDoc(int docID, int freq) public override void AddPosition(int position, BytesRef payload, int startOffset, int endOffset) { - Debugging.Assert(() => startOffset == -1); - Debugging.Assert(() => endOffset == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => startOffset == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => endOffset == -1); current.positions[posUpto] = position; if (payload != null && payload.Length > 0) { @@ -341,7 +341,7 @@ public override void AddPosition(int position, BytesRef payload, int startOffset public override void FinishDoc() { - Debugging.Assert(() => posUpto == current.positions.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => posUpto == current.positions.Length); } } diff --git a/src/Lucene.Net.TestFramework/Index/AllDeletedFilterReader.cs b/src/Lucene.Net.TestFramework/Index/AllDeletedFilterReader.cs index 2c56376f07..c545d6013e 100644 --- a/src/Lucene.Net.TestFramework/Index/AllDeletedFilterReader.cs +++ b/src/Lucene.Net.TestFramework/Index/AllDeletedFilterReader.cs @@ -31,7 +31,7 @@ public AllDeletedFilterReader(AtomicReader @in) : base(@in) { liveDocs = new Bits.MatchNoBits(@in.MaxDoc); - Debugging.Assert(() => MaxDoc == 0 || HasDeletions); + if (Debugging.AssertsEnabled) Debugging.Assert(() => MaxDoc == 0 || HasDeletions); } public override IBits LiveDocs => liveDocs; diff --git a/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs b/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs index 4657a161c2..077b27083d 100644 --- a/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs +++ b/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs @@ -36,7 +36,7 @@ public AssertingFields(Fields input) public override IEnumerator GetEnumerator() { IEnumerator iterator = base.GetEnumerator(); - Debugging.Assert(() => iterator != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => iterator != null); return iterator; } @@ -59,8 +59,8 @@ public AssertingTerms(Terms input) public override TermsEnum Intersect(CompiledAutomaton automaton, BytesRef bytes) { TermsEnum termsEnum = m_input.Intersect(automaton, bytes); - Debugging.Assert(() => termsEnum != null); - Debugging.Assert(() => bytes == null || bytes.IsValid()); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termsEnum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bytes == null || bytes.IsValid()); return new AssertingAtomicReader.AssertingTermsEnum(termsEnum); } @@ -73,7 +73,7 @@ public override TermsEnum GetIterator(TermsEnum reuse) reuse = ((AssertingAtomicReader.AssertingTermsEnum)reuse).m_input; } TermsEnum termsEnum = base.GetIterator(reuse); - Debugging.Assert(() => termsEnum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termsEnum != null); return new AssertingAtomicReader.AssertingTermsEnum(termsEnum); } } @@ -102,7 +102,7 @@ public AssertingDocsEnum(DocsEnum @in, bool failOnUnsupportedDocID) try { int docid = @in.DocID; - Debugging.Assert(() => docid == -1, () => @in.GetType() + ": invalid initial doc id: " + docid); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docid == -1, () => @in.GetType() + ": invalid initial doc id: " + docid); } catch (NotSupportedException /*e*/) { @@ -116,9 +116,9 @@ public AssertingDocsEnum(DocsEnum @in, bool failOnUnsupportedDocID) public override int NextDoc() { - Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "NextDoc() called after NO_MORE_DOCS"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "NextDoc() called after NO_MORE_DOCS"); int nextDoc = base.NextDoc(); - Debugging.Assert(() => nextDoc > doc, () => "backwards NextDoc from " + doc + " to " + nextDoc + " " + m_input); + if (Debugging.AssertsEnabled) Debugging.Assert(() => nextDoc > doc, () => "backwards NextDoc from " + doc + " to " + nextDoc + " " + m_input); if (nextDoc == DocIdSetIterator.NO_MORE_DOCS) { state = DocsEnumState.FINISHED; @@ -127,16 +127,16 @@ public override int NextDoc() { state = DocsEnumState.ITERATING; } - Debugging.Assert(() => base.DocID == nextDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => base.DocID == nextDoc); return doc = nextDoc; } public override int Advance(int target) { - Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "Advance() called after NO_MORE_DOCS"); - Debugging.Assert(() => target > doc, () => "target must be > DocID, got " + target + " <= " + doc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "Advance() called after NO_MORE_DOCS"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => target > doc, () => "target must be > DocID, got " + target + " <= " + doc); int advanced = base.Advance(target); - Debugging.Assert(() => advanced >= target, () => "backwards advance from: " + target + " to: " + advanced); + if (Debugging.AssertsEnabled) Debugging.Assert(() => advanced >= target, () => "backwards advance from: " + target + " to: " + advanced); if (advanced == DocIdSetIterator.NO_MORE_DOCS) { state = DocsEnumState.FINISHED; @@ -145,7 +145,7 @@ public override int Advance(int target) { state = DocsEnumState.ITERATING; } - Debugging.Assert(() => base.DocID == advanced); + if (Debugging.AssertsEnabled) Debugging.Assert(() => base.DocID == advanced); return doc = advanced; } @@ -153,7 +153,7 @@ public override int DocID { get { - Debugging.Assert(() => doc == base.DocID, () => " invalid DocID in " + m_input.GetType() + " " + base.DocID + " instead of " + doc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => doc == base.DocID, () => " invalid DocID in " + m_input.GetType() + " " + base.DocID + " instead of " + doc); return doc; } } @@ -162,10 +162,10 @@ public override int Freq { get { - Debugging.Assert(() => state != DocsEnumState.START, () => "Freq called before NextDoc()/Advance()"); - Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "Freq called after NO_MORE_DOCS"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.START, () => "Freq called before NextDoc()/Advance()"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "Freq called after NO_MORE_DOCS"); int freq = base.Freq; - Debugging.Assert(() => freq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => freq > 0); return freq; } } @@ -186,7 +186,7 @@ public AssertingNumericDocValues(NumericDocValues @in, int maxDoc) public override long Get(int docID) { - Debugging.Assert(() => docID >= 0 && docID < maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docID >= 0 && docID < maxDoc); return @in.Get(docID); } } @@ -206,10 +206,10 @@ public AssertingBinaryDocValues(BinaryDocValues @in, int maxDoc) public override void Get(int docID, BytesRef result) { - Debugging.Assert(() => docID >= 0 && docID < maxDoc); - Debugging.Assert(result.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docID >= 0 && docID < maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid); @in.Get(docID, result); - Debugging.Assert(result.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid); } } @@ -226,23 +226,23 @@ public AssertingSortedDocValues(SortedDocValues @in, int maxDoc) this.@in = @in; this.maxDoc = maxDoc; this.valueCount = @in.ValueCount; - Debugging.Assert(() => valueCount >= 0 && valueCount <= maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount >= 0 && valueCount <= maxDoc); } public override int GetOrd(int docID) { - Debugging.Assert(() => docID >= 0 && docID < maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docID >= 0 && docID < maxDoc); int ord = @in.GetOrd(docID); - Debugging.Assert(() => ord >= -1 && ord < valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ord >= -1 && ord < valueCount); return ord; } public override void LookupOrd(int ord, BytesRef result) { - Debugging.Assert(() => ord >= 0 && ord < valueCount); - Debugging.Assert(result.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ord >= 0 && ord < valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid); @in.LookupOrd(ord, result); - Debugging.Assert(result.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid); } public override int ValueCount @@ -250,25 +250,25 @@ public override int ValueCount get { int valueCount = @in.ValueCount; - Debugging.Assert(() => valueCount == this.valueCount); // should not change + if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount == this.valueCount); // should not change return valueCount; } } public override void Get(int docID, BytesRef result) { - Debugging.Assert(() => docID >= 0 && docID < maxDoc); - Debugging.Assert(result.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docID >= 0 && docID < maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid); @in.Get(docID, result); - Debugging.Assert(result.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid); } public override int LookupTerm(BytesRef key) { - Debugging.Assert(key.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(key.IsValid); int result = @in.LookupTerm(key); - Debugging.Assert(() => result < valueCount); - Debugging.Assert(key.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(() => result < valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(key.IsValid); return result; } } @@ -287,32 +287,32 @@ public AssertingSortedSetDocValues(SortedSetDocValues @in, int maxDoc) this.@in = @in; this.maxDoc = maxDoc; this.valueCount = @in.ValueCount; - Debugging.Assert(() => valueCount >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount >= 0); } public override long NextOrd() { - Debugging.Assert(() => lastOrd != NO_MORE_ORDS); + if (Debugging.AssertsEnabled) Debugging.Assert(() => lastOrd != NO_MORE_ORDS); long ord = @in.NextOrd(); - Debugging.Assert(() => ord < valueCount); - Debugging.Assert(() => ord == NO_MORE_ORDS || ord > lastOrd); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ord < valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ord == NO_MORE_ORDS || ord > lastOrd); lastOrd = ord; return ord; } public override void SetDocument(int docID) { - Debugging.Assert(() => docID >= 0 && docID < maxDoc, () => "docid=" + docID + ",maxDoc=" + maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docID >= 0 && docID < maxDoc, () => "docid=" + docID + ",maxDoc=" + maxDoc); @in.SetDocument(docID); lastOrd = -2; } public override void LookupOrd(long ord, BytesRef result) { - Debugging.Assert(() => ord >= 0 && ord < valueCount); - Debugging.Assert(result.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ord >= 0 && ord < valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid); @in.LookupOrd(ord, result); - Debugging.Assert(result.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid); } public override long ValueCount @@ -320,17 +320,17 @@ public override long ValueCount get { long valueCount = @in.ValueCount; - Debugging.Assert(() => valueCount == this.valueCount); // should not change + if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount == this.valueCount); // should not change return valueCount; } } public override long LookupTerm(BytesRef key) { - Debugging.Assert(key.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(key.IsValid); long result = @in.LookupTerm(key); - Debugging.Assert(() => result < valueCount); - Debugging.Assert(key.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(() => result < valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(key.IsValid); return result; } } @@ -348,7 +348,7 @@ public AssertingBits(IBits @in) public virtual bool Get(int index) { - Debugging.Assert(() => index >= 0 && index < Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < Length); return @in.Get(index); } @@ -365,10 +365,10 @@ public AssertingAtomicReader(AtomicReader @in) : base(@in) { // check some basic reader sanity - Debugging.Assert(() => @in.MaxDoc >= 0); - Debugging.Assert(() => @in.NumDocs <= @in.MaxDoc); - Debugging.Assert(() => @in.NumDeletedDocs + @in.NumDocs == @in.MaxDoc); - Debugging.Assert(() => !@in.HasDeletions || @in.NumDeletedDocs > 0 && @in.NumDocs < @in.MaxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => @in.MaxDoc >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => @in.NumDocs <= @in.MaxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => @in.NumDeletedDocs + @in.NumDocs == @in.MaxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !@in.HasDeletions || @in.NumDeletedDocs > 0 && @in.NumDocs < @in.MaxDoc); } public override Fields Fields @@ -409,7 +409,7 @@ public AssertingTermsEnum(TermsEnum @in) public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) { - Debugging.Assert(() => state == State.POSITIONED, () => "Docs(...) called on unpositioned TermsEnum"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state == State.POSITIONED, () => "Docs(...) called on unpositioned TermsEnum"); // TODO: should we give this thing a random to be super-evil, // and randomly *not* unwrap? @@ -423,7 +423,7 @@ public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPositionsEnum reuse, DocsAndPositionsFlags flags) { - Debugging.Assert(() => state == State.POSITIONED, () => "DocsAndPositions(...) called on unpositioned TermsEnum"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state == State.POSITIONED, () => "DocsAndPositions(...) called on unpositioned TermsEnum"); // TODO: should we give this thing a random to be super-evil, // and randomly *not* unwrap? @@ -439,7 +439,7 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPos // someone should not call next() after it returns null!!!! public override BytesRef Next() { - Debugging.Assert(() => state == State.INITIAL || state == State.POSITIONED, () => "Next() called on unpositioned TermsEnum"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state == State.INITIAL || state == State.POSITIONED, () => "Next() called on unpositioned TermsEnum"); BytesRef result = base.Next(); if (result == null) { @@ -447,7 +447,7 @@ public override BytesRef Next() } else { - Debugging.Assert(result.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid); state = State.POSITIONED; } return result; @@ -457,7 +457,7 @@ public override long Ord { get { - Debugging.Assert(() => state == State.POSITIONED, () => "Ord called on unpositioned TermsEnum"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state == State.POSITIONED, () => "Ord called on unpositioned TermsEnum"); return base.Ord; } } @@ -466,7 +466,7 @@ public override int DocFreq { get { - Debugging.Assert(() => state == State.POSITIONED, () => "DocFreq called on unpositioned TermsEnum"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state == State.POSITIONED, () => "DocFreq called on unpositioned TermsEnum"); return base.DocFreq; } } @@ -475,7 +475,7 @@ public override long TotalTermFreq { get { - Debugging.Assert(() => state == State.POSITIONED, () => "TotalTermFreq called on unpositioned TermsEnum"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state == State.POSITIONED, () => "TotalTermFreq called on unpositioned TermsEnum"); return base.TotalTermFreq; } } @@ -484,9 +484,9 @@ public override BytesRef Term { get { - Debugging.Assert(() => state == State.POSITIONED, () => "Term called on unpositioned TermsEnum"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state == State.POSITIONED, () => "Term called on unpositioned TermsEnum"); BytesRef ret = base.Term; - Debugging.Assert(() => ret == null || ret.IsValid()); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ret == null || ret.IsValid()); return ret; } } @@ -499,7 +499,7 @@ public override void SeekExact(long ord) public override SeekStatus SeekCeil(BytesRef term) { - Debugging.Assert(term.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(term.IsValid); SeekStatus result = base.SeekCeil(term); if (result == SeekStatus.END) { @@ -514,7 +514,7 @@ public override SeekStatus SeekCeil(BytesRef term) public override bool SeekExact(BytesRef text) { - Debugging.Assert(text.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(text.IsValid); if (base.SeekExact(text)) { state = State.POSITIONED; @@ -529,13 +529,13 @@ public override bool SeekExact(BytesRef text) public override TermState GetTermState() { - Debugging.Assert(() => state == State.POSITIONED, () => "GetTermState() called on unpositioned TermsEnum"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state == State.POSITIONED, () => "GetTermState() called on unpositioned TermsEnum"); return base.GetTermState(); } public override void SeekExact(BytesRef term, TermState state) { - Debugging.Assert(term.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(term.IsValid); base.SeekExact(term, state); this.state = State.POSITIONED; } @@ -556,15 +556,15 @@ public AssertingDocsAndPositionsEnum(DocsAndPositionsEnum @in) : base(@in) { int docid = @in.DocID; - Debugging.Assert(() => docid == -1, () => "invalid initial doc id: " + docid); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docid == -1, () => "invalid initial doc id: " + docid); doc = -1; } public override int NextDoc() { - Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "NextDoc() called after NO_MORE_DOCS"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "NextDoc() called after NO_MORE_DOCS"); int nextDoc = base.NextDoc(); - Debugging.Assert(() => nextDoc > doc, () => "backwards nextDoc from " + doc + " to " + nextDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => nextDoc > doc, () => "backwards nextDoc from " + doc + " to " + nextDoc); positionCount = 0; if (nextDoc == DocIdSetIterator.NO_MORE_DOCS) { @@ -576,16 +576,16 @@ public override int NextDoc() state = DocsEnumState.ITERATING; positionMax = base.Freq; } - Debugging.Assert(() => base.DocID == nextDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => base.DocID == nextDoc); return doc = nextDoc; } public override int Advance(int target) { - Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "Advance() called after NO_MORE_DOCS"); - Debugging.Assert(() => target > doc, () => "target must be > DocID, got " + target + " <= " + doc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "Advance() called after NO_MORE_DOCS"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => target > doc, () => "target must be > DocID, got " + target + " <= " + doc); int advanced = base.Advance(target); - Debugging.Assert(() => advanced >= target, () => "backwards advance from: " + target + " to: " + advanced); + if (Debugging.AssertsEnabled) Debugging.Assert(() => advanced >= target, () => "backwards advance from: " + target + " to: " + advanced); positionCount = 0; if (advanced == DocIdSetIterator.NO_MORE_DOCS) { @@ -597,7 +597,7 @@ public override int Advance(int target) state = DocsEnumState.ITERATING; positionMax = base.Freq; } - Debugging.Assert(() => base.DocID == advanced); + if (Debugging.AssertsEnabled) Debugging.Assert(() => base.DocID == advanced); return doc = advanced; } @@ -605,7 +605,7 @@ public override int DocID { get { - Debugging.Assert(() => doc == base.DocID, () => " invalid DocID in " + m_input.GetType() + " " + base.DocID + " instead of " + doc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => doc == base.DocID, () => " invalid DocID in " + m_input.GetType() + " " + base.DocID + " instead of " + doc); return doc; } } @@ -614,21 +614,21 @@ public override int Freq { get { - Debugging.Assert(() => state != DocsEnumState.START, () => "Freq called before NextDoc()/Advance()"); - Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "Freq called after NO_MORE_DOCS"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.START, () => "Freq called before NextDoc()/Advance()"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "Freq called after NO_MORE_DOCS"); int freq = base.Freq; - Debugging.Assert(() => freq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => freq > 0); return freq; } } public override int NextPosition() { - Debugging.Assert(() => state != DocsEnumState.START, () => "NextPosition() called before NextDoc()/Advance()"); - Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "NextPosition() called after NO_MORE_DOCS"); - Debugging.Assert(() => positionCount < positionMax, () => "NextPosition() called more than Freq times!"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.START, () => "NextPosition() called before NextDoc()/Advance()"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "NextPosition() called after NO_MORE_DOCS"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => positionCount < positionMax, () => "NextPosition() called more than Freq times!"); int position = base.NextPosition(); - Debugging.Assert(() => position >= 0 || position == -1, () => "invalid position: " + position); + if (Debugging.AssertsEnabled) Debugging.Assert(() => position >= 0 || position == -1, () => "invalid position: " + position); positionCount++; return position; } @@ -637,9 +637,9 @@ public override int StartOffset { get { - Debugging.Assert(() => state != DocsEnumState.START, () => "StartOffset called before NextDoc()/Advance()"); - Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "StartOffset called after NO_MORE_DOCS"); - Debugging.Assert(() => positionCount > 0, () => "StartOffset called before NextPosition()!"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.START, () => "StartOffset called before NextDoc()/Advance()"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "StartOffset called after NO_MORE_DOCS"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => positionCount > 0, () => "StartOffset called before NextPosition()!"); return base.StartOffset; } } @@ -648,20 +648,20 @@ public override int EndOffset { get { - Debugging.Assert(() => state != DocsEnumState.START, () => "EndOffset called before NextDoc()/Advance()"); - Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "EndOffset called after NO_MORE_DOCS"); - Debugging.Assert(() => positionCount > 0, () => "EndOffset called before NextPosition()!"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.START, () => "EndOffset called before NextDoc()/Advance()"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "EndOffset called after NO_MORE_DOCS"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => positionCount > 0, () => "EndOffset called before NextPosition()!"); return base.EndOffset; } } public override BytesRef GetPayload() { - Debugging.Assert(() => state != DocsEnumState.START, () => "GetPayload() called before NextDoc()/Advance()"); - Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "GetPayload() called after NO_MORE_DOCS"); - Debugging.Assert(() => positionCount > 0, () => "GetPayload() called before NextPosition()!"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.START, () => "GetPayload() called before NextDoc()/Advance()"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "GetPayload() called after NO_MORE_DOCS"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => positionCount > 0, () => "GetPayload() called before NextPosition()!"); BytesRef payload = base.GetPayload(); - Debugging.Assert(() => payload == null || payload.IsValid() && payload.Length > 0, () => "GetPayload() returned payload with invalid length!"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => payload == null || payload.IsValid() && payload.Length > 0, () => "GetPayload() returned payload with invalid length!"); return payload; } } @@ -681,13 +681,13 @@ public override NumericDocValues GetNumericDocValues(string field) FieldInfo fi = FieldInfos.FieldInfo(field); if (dv != null) { - Debugging.Assert(() => fi != null); - Debugging.Assert(() => fi.DocValuesType == DocValuesType.NUMERIC); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fi != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fi.DocValuesType == DocValuesType.NUMERIC); return new AssertingNumericDocValues(dv, MaxDoc); } else { - Debugging.Assert(() => fi == null || fi.DocValuesType != DocValuesType.NUMERIC); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fi == null || fi.DocValuesType != DocValuesType.NUMERIC); return null; } } @@ -698,13 +698,13 @@ public override BinaryDocValues GetBinaryDocValues(string field) FieldInfo fi = FieldInfos.FieldInfo(field); if (dv != null) { - Debugging.Assert(() => fi != null); - Debugging.Assert(() => fi.DocValuesType == DocValuesType.BINARY); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fi != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fi.DocValuesType == DocValuesType.BINARY); return new AssertingBinaryDocValues(dv, MaxDoc); } else { - Debugging.Assert(() => fi == null || fi.DocValuesType != DocValuesType.BINARY); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fi == null || fi.DocValuesType != DocValuesType.BINARY); return null; } } @@ -715,13 +715,13 @@ public override SortedDocValues GetSortedDocValues(string field) FieldInfo fi = FieldInfos.FieldInfo(field); if (dv != null) { - Debugging.Assert(() => fi != null); - Debugging.Assert(() => fi.DocValuesType == DocValuesType.SORTED); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fi != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fi.DocValuesType == DocValuesType.SORTED); return new AssertingSortedDocValues(dv, MaxDoc); } else { - Debugging.Assert(() => fi == null || fi.DocValuesType != DocValuesType.SORTED); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fi == null || fi.DocValuesType != DocValuesType.SORTED); return null; } } @@ -732,13 +732,13 @@ public override SortedSetDocValues GetSortedSetDocValues(string field) FieldInfo fi = FieldInfos.FieldInfo(field); if (dv != null) { - Debugging.Assert(() => fi != null); - Debugging.Assert(() => fi.DocValuesType == DocValuesType.SORTED_SET); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fi != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fi.DocValuesType == DocValuesType.SORTED_SET); return new AssertingSortedSetDocValues(dv, MaxDoc); } else { - Debugging.Assert(() => fi == null || fi.DocValuesType != DocValuesType.SORTED_SET); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fi == null || fi.DocValuesType != DocValuesType.SORTED_SET); return null; } } @@ -749,13 +749,13 @@ public override NumericDocValues GetNormValues(string field) FieldInfo fi = FieldInfos.FieldInfo(field); if (dv != null) { - Debugging.Assert(() => fi != null); - Debugging.Assert(() => fi.HasNorms); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fi != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fi.HasNorms); return new AssertingNumericDocValues(dv, MaxDoc); } else { - Debugging.Assert(() => fi == null || fi.HasNorms == false); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fi == null || fi.HasNorms == false); return null; } } @@ -769,13 +769,13 @@ public override IBits LiveDocs IBits liveDocs = base.LiveDocs; if (liveDocs != null) { - Debugging.Assert(() => MaxDoc == liveDocs.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => MaxDoc == liveDocs.Length); liveDocs = new AssertingBits(liveDocs); } else { - Debugging.Assert(() => MaxDoc == NumDocs); - Debugging.Assert(() => !HasDeletions); + if (Debugging.AssertsEnabled) Debugging.Assert(() => MaxDoc == NumDocs); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !HasDeletions); } return liveDocs; } @@ -787,14 +787,14 @@ public override IBits GetDocsWithField(string field) FieldInfo fi = FieldInfos.FieldInfo(field); if (docsWithField != null) { - Debugging.Assert(() => fi != null); - Debugging.Assert(() => fi.HasDocValues); - Debugging.Assert(() => MaxDoc == docsWithField.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fi != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fi.HasDocValues); + if (Debugging.AssertsEnabled) Debugging.Assert(() => MaxDoc == docsWithField.Length); docsWithField = new AssertingBits(docsWithField); } else { - Debugging.Assert(() => fi == null || fi.HasDocValues == false); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fi == null || fi.HasDocValues == false); } return docsWithField; } diff --git a/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs b/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs index 0ba88c9b26..e0ab0ac206 100644 --- a/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs +++ b/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs @@ -116,7 +116,7 @@ public virtual void TestOneNumber() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader))).GetNumericDocValues("dv"); Assert.AreEqual(5L, dv.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - 5L required because types don't match (xUnit checks this) } @@ -159,7 +159,7 @@ public virtual void TestOneSingle() // LUCENENET specific - renamed from TestOne { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetNumericDocValues("dv"); Assert.AreEqual((long)J2N.BitConversion.SingleToInt32Bits(5.7f), dv.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - cast required because types don't match (xUnit checks this) } @@ -201,7 +201,7 @@ public virtual void TestTwoNumbers() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetNumericDocValues("dv1"); Assert.AreEqual(5L, dv.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - 5L required because types don't match (xUnit checks this) dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetNumericDocValues("dv2"); @@ -248,7 +248,7 @@ public virtual void TestTwoBinaryValues() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetBinaryDocValues("dv1"); dv.Get(hits.ScoreDocs[i].Doc, scratch); Assert.AreEqual(new BytesRef(longTerm), scratch); @@ -297,7 +297,7 @@ public virtual void TestTwoFieldsMixed() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetNumericDocValues("dv1"); Assert.AreEqual(5L, dv.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - 5L required because types don't match (xUnit checks this) BinaryDocValues dv2 = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetBinaryDocValues("dv2"); @@ -346,7 +346,7 @@ public virtual void TestThreeFieldsMixed() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv1"); int ord = dv.GetOrd(0); dv.LookupOrd(ord, scratch); @@ -399,7 +399,7 @@ public virtual void TestThreeFieldsMixed2() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv2"); int ord = dv.GetOrd(0); dv.LookupOrd(ord, scratch); @@ -438,7 +438,7 @@ public virtual void TestTwoDocumentsNumeric() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv"); Assert.AreEqual(1L, dv.Get(0)); // LUCENENET specific - 1L required because types don't match (xUnit checks this) Assert.AreEqual(2L, dv.Get(1)); // LUCENENET specific - 2L required because types don't match (xUnit checks this) @@ -473,7 +473,7 @@ public virtual void TestTwoDocumentsMerged() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv"); for (int i = 0; i < 2; i++) { @@ -517,7 +517,7 @@ public virtual void TestBigNumericRange() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv"); Assert.AreEqual(long.MinValue, dv.Get(0)); Assert.AreEqual(long.MaxValue, dv.Get(1)); @@ -549,7 +549,7 @@ public virtual void TestBigNumericRange2() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv"); Assert.AreEqual(-8841491950446638677L, dv.Get(0)); Assert.AreEqual(9062230939892376225L, dv.Get(1)); @@ -591,7 +591,7 @@ public virtual void TestBytes() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); dv.Get(hits.ScoreDocs[i].Doc, scratch); Assert.AreEqual(new BytesRef("hello world"), scratch); @@ -627,7 +627,7 @@ public virtual void TestBytesTwoDocumentsMerged() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); BytesRef scratch = new BytesRef(); for (int i = 0; i < 2; i++) @@ -684,7 +684,7 @@ public virtual void TestSortedBytes() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); dv.LookupOrd(dv.GetOrd(hits.ScoreDocs[i].Doc), scratch); Assert.AreEqual(new BytesRef("hello world"), scratch); @@ -717,7 +717,7 @@ public virtual void TestSortedBytesTwoDocuments() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); BytesRef scratch = new BytesRef(); dv.LookupOrd(dv.GetOrd(0), scratch); @@ -755,7 +755,7 @@ public virtual void TestSortedBytesThreeDocuments() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); Assert.AreEqual(2, dv.ValueCount); BytesRef scratch = new BytesRef(); @@ -797,7 +797,7 @@ public virtual void TestSortedBytesTwoDocumentsMerged() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); Assert.AreEqual(2, dv.ValueCount); // 2 ords BytesRef scratch = new BytesRef(); @@ -894,7 +894,7 @@ public virtual void TestBytesWithNewline() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); BytesRef scratch = new BytesRef(); dv.Get(0, scratch); @@ -925,7 +925,7 @@ public virtual void TestMissingSortedBytes() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); BytesRef scratch = new BytesRef(); dv.LookupOrd(dv.GetOrd(0), scratch); @@ -1045,7 +1045,7 @@ public virtual void TestEmptySortedBytes() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); BytesRef scratch = new BytesRef(); Assert.AreEqual(0, dv.GetOrd(0)); @@ -1080,7 +1080,7 @@ public virtual void TestEmptyBytes() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); BytesRef scratch = new BytesRef(); dv.Get(0, scratch); @@ -1114,7 +1114,7 @@ public virtual void TestVeryLargeButLegalBytes() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); BytesRef scratch = new BytesRef(); dv.Get(0, scratch); @@ -1146,7 +1146,7 @@ public virtual void TestVeryLargeButLegalSortedBytes() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); BytesRef scratch = new BytesRef(); dv.Get(0, scratch); @@ -1174,7 +1174,7 @@ public virtual void TestCodecUsesOwnBytes() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); var mybytes = new byte[20]; BytesRef scratch = new BytesRef(mybytes); @@ -1205,7 +1205,7 @@ public virtual void TestCodecUsesOwnSortedBytes() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); var mybytes = new byte[20]; BytesRef scratch = new BytesRef(mybytes); @@ -1239,7 +1239,7 @@ public virtual void TestCodecUsesOwnBytesEachTime() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); BytesRef scratch = new BytesRef(); dv.Get(0, scratch); @@ -1277,7 +1277,7 @@ public virtual void TestCodecUsesOwnSortedBytesEachTime() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); BytesRef scratch = new BytesRef(); dv.Get(0, scratch); @@ -1494,7 +1494,7 @@ private void DoTestNumericsVsStoredFields(Int64Producer longs) int numDocs = AtLeast(300); // numDocs should be always > 256 so that in case of a codec that optimizes // for numbers of values <= 256, all storage layouts are tested - Debugging.Assert(() => numDocs > 256); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocs > 256); for (int i = 0; i < numDocs; i++) { idField.SetStringValue(Convert.ToString(i, CultureInfo.InvariantCulture)); @@ -1580,7 +1580,7 @@ private void DoTestMissingVsFieldCache(Int64Producer longs) int numDocs = AtLeast(300); // numDocs should be always > 256 so that in case of a codec that optimizes // for numbers of values <= 256, all storage layouts are tested - Debugging.Assert(() => numDocs > 256); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocs > 256); for (int i = 0; i < numDocs; i++) { idField.SetStringValue(Convert.ToString(i, CultureInfo.InvariantCulture)); @@ -2649,13 +2649,13 @@ private void DoTestSortedSetVsStoredFields(int minLength, int maxLength, int max } for (int j = 0; j < stringValues.Length; j++) { - Debugging.Assert(() => docValues != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docValues != null); long ord = docValues.NextOrd(); - Debugging.Assert(() => ord != SortedSetDocValues.NO_MORE_ORDS); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ord != SortedSetDocValues.NO_MORE_ORDS); docValues.LookupOrd(ord, scratch); Assert.AreEqual(stringValues[j], scratch.Utf8ToString()); } - Debugging.Assert(() => docValues == null || docValues.NextOrd() == SortedSetDocValues.NO_MORE_ORDS); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docValues == null || docValues.NextOrd() == SortedSetDocValues.NO_MORE_ORDS); } } } // ir.Dispose(); diff --git a/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs b/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs index de001078e6..7498cfef8c 100644 --- a/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs +++ b/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs @@ -239,7 +239,7 @@ public override int NextPosition() posUpto = freq; return 0; } - Debugging.Assert(() => posUpto < freq); + if (Debugging.AssertsEnabled) Debugging.Assert(() => posUpto < freq); if (posUpto == 0 && random.NextBoolean()) { diff --git a/src/Lucene.Net.TestFramework/Index/MockRandomMergePolicy.cs b/src/Lucene.Net.TestFramework/Index/MockRandomMergePolicy.cs index dca1e12806..528d0c976e 100644 --- a/src/Lucene.Net.TestFramework/Index/MockRandomMergePolicy.cs +++ b/src/Lucene.Net.TestFramework/Index/MockRandomMergePolicy.cs @@ -106,7 +106,7 @@ public override MergeSpecification FindForcedMerges(SegmentInfos segmentInfos, i { foreach (SegmentCommitInfo info in merge.Segments) { - Debugging.Assert(() => segmentsToMerge.ContainsKey(info)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => segmentsToMerge.ContainsKey(info)); } } } diff --git a/src/Lucene.Net.TestFramework/Index/RandomCodec.cs b/src/Lucene.Net.TestFramework/Index/RandomCodec.cs index 2017704d01..2b0436f995 100644 --- a/src/Lucene.Net.TestFramework/Index/RandomCodec.cs +++ b/src/Lucene.Net.TestFramework/Index/RandomCodec.cs @@ -92,7 +92,7 @@ public override PostingsFormat GetPostingsFormatForField(string name) } previousMappings[name] = codec; // Safety: - Debugging.Assert(() => previousMappings.Count < 10000, () => "test went insane"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => previousMappings.Count < 10000, () => "test went insane"); } //if (LuceneTestCase.VERBOSE) @@ -115,7 +115,7 @@ public override DocValuesFormat GetDocValuesFormatForField(string name) } previousDVMappings[name] = codec; // Safety: - Debugging.Assert(() => previousDVMappings.Count < 10000, () => "test went insane"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => previousDVMappings.Count < 10000, () => "test went insane"); } //if (LuceneTestCase.VERBOSE) diff --git a/src/Lucene.Net.TestFramework/Index/RandomDocumentsWriterPerThreadPool.cs b/src/Lucene.Net.TestFramework/Index/RandomDocumentsWriterPerThreadPool.cs index 97c8f63728..52faf55f51 100644 --- a/src/Lucene.Net.TestFramework/Index/RandomDocumentsWriterPerThreadPool.cs +++ b/src/Lucene.Net.TestFramework/Index/RandomDocumentsWriterPerThreadPool.cs @@ -36,7 +36,7 @@ internal class RandomDocumentsWriterPerThreadPool : DocumentsWriterPerThreadPool public RandomDocumentsWriterPerThreadPool(int maxNumPerThreads, Random random) : base(maxNumPerThreads) { - Debugging.Assert(() => MaxThreadStates >= 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => MaxThreadStates >= 1); states = new ThreadState[maxNumPerThreads]; this.random = new Random(random.Next()); this.maxRetry = 1 + random.Next(10); @@ -56,14 +56,14 @@ public override ThreadState GetAndLock(Thread requestingThread, DocumentsWriter } } } - Debugging.Assert(() => NumThreadStatesActive > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => NumThreadStatesActive > 0); for (int i = 0; i < maxRetry; i++) { int ord = random.Next(NumThreadStatesActive); lock (this) { threadState = states[ord]; - Debugging.Assert(() => threadState != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => threadState != null); } if (threadState.TryLock()) @@ -89,12 +89,12 @@ public override ThreadState GetAndLock(Thread requestingThread, DocumentsWriter if (newThreadState != null) // did we get a new state? { threadState = states[NumThreadStatesActive - 1] = newThreadState; - //Debugging.Assert(threadState.HeldByCurrentThread); + //if (Debugging.AssertsEnabled) Debugging.Assert(threadState.HeldByCurrentThread); return threadState; } // if no new state is available lock the random one } - Debugging.Assert(() => threadState != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => threadState != null); threadState.@Lock(); return threadState; } diff --git a/src/Lucene.Net.TestFramework/Index/RandomIndexWriter.cs b/src/Lucene.Net.TestFramework/Index/RandomIndexWriter.cs index 146e070a7e..91a368e000 100644 --- a/src/Lucene.Net.TestFramework/Index/RandomIndexWriter.cs +++ b/src/Lucene.Net.TestFramework/Index/RandomIndexWriter.cs @@ -440,7 +440,7 @@ private void _DoRandomForceMerge() // LUCENENET specific - added leading undersc Console.WriteLine("RIW: doRandomForceMerge(" + limit + ")"); } IndexWriter.ForceMerge(limit); - Debugging.Assert(() => !doRandomForceMergeAssert || IndexWriter.SegmentCount <= limit, () => "limit=" + limit + " actual=" + IndexWriter.SegmentCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !doRandomForceMergeAssert || IndexWriter.SegmentCount <= limit, () => "limit=" + limit + " actual=" + IndexWriter.SegmentCount); } } } diff --git a/src/Lucene.Net.TestFramework/Index/ThreadedIndexingAndSearchingTestCase.cs b/src/Lucene.Net.TestFramework/Index/ThreadedIndexingAndSearchingTestCase.cs index e2dbc3b832..176882cce3 100644 --- a/src/Lucene.Net.TestFramework/Index/ThreadedIndexingAndSearchingTestCase.cs +++ b/src/Lucene.Net.TestFramework/Index/ThreadedIndexingAndSearchingTestCase.cs @@ -227,7 +227,7 @@ public override void Run() if (toDeleteSubDocs.Count > 0 && Random.NextBoolean()) { delSubDocs = toDeleteSubDocs[Random.Next(toDeleteSubDocs.Count)]; - Debugging.Assert(() => !delSubDocs.Deleted); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !delSubDocs.Deleted); toDeleteSubDocs.Remove(delSubDocs); // Update doc block, replacing prior packID packID = delSubDocs.PackID; @@ -364,7 +364,7 @@ public override void Run() foreach (SubDocs subDocs in toDeleteSubDocs) { - Debugging.Assert(() => !subDocs.Deleted); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !subDocs.Deleted); delPackIDs.Add(subDocs.PackID); outerInstance.DeleteDocuments(new Term("packID", subDocs.PackID)); subDocs.Deleted = true; diff --git a/src/Lucene.Net.TestFramework/Search/AssertingBulkScorer.cs b/src/Lucene.Net.TestFramework/Search/AssertingBulkScorer.cs index 4d88aa5b46..c3de50616a 100644 --- a/src/Lucene.Net.TestFramework/Search/AssertingBulkScorer.cs +++ b/src/Lucene.Net.TestFramework/Search/AssertingBulkScorer.cs @@ -61,7 +61,7 @@ public override void Score(ICollector collector) try { bool remaining = @in.Score(collector, DocsEnum.NO_MORE_DOCS); - Debugging.Assert(() => !remaining); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !remaining); } #pragma warning disable 168 catch (NotSupportedException e) diff --git a/src/Lucene.Net.TestFramework/Search/AssertingCollector.cs b/src/Lucene.Net.TestFramework/Search/AssertingCollector.cs index b2e77953ba..f183d867c2 100644 --- a/src/Lucene.Net.TestFramework/Search/AssertingCollector.cs +++ b/src/Lucene.Net.TestFramework/Search/AssertingCollector.cs @@ -52,7 +52,7 @@ public virtual void Collect(int doc) { if (inOrder || !AcceptsDocsOutOfOrder) { - Debugging.Assert(() => doc > lastCollected, () => "Out of order : " + lastCollected + " " + doc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => doc > lastCollected, () => "Out of order : " + lastCollected + " " + doc); } @in.Collect(doc); lastCollected = doc; diff --git a/src/Lucene.Net.TestFramework/Search/AssertingScorer.cs b/src/Lucene.Net.TestFramework/Search/AssertingScorer.cs index 72351d5c74..ed7ae45142 100644 --- a/src/Lucene.Net.TestFramework/Search/AssertingScorer.cs +++ b/src/Lucene.Net.TestFramework/Search/AssertingScorer.cs @@ -105,10 +105,10 @@ internal virtual bool Iterating() public override float GetScore() { - Debugging.Assert(Iterating); + if (Debugging.AssertsEnabled) Debugging.Assert(Iterating); float score = @in.GetScore(); - Debugging.Assert(() => !float.IsNaN(score)); - Debugging.Assert(() => !float.IsNaN(score)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !float.IsNaN(score)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !float.IsNaN(score)); return score; } @@ -125,7 +125,7 @@ public override int Freq { get { - Debugging.Assert(Iterating); + if (Debugging.AssertsEnabled) Debugging.Assert(Iterating); return @in.Freq; } } diff --git a/src/Lucene.Net.TestFramework/Search/QueryUtils.cs b/src/Lucene.Net.TestFramework/Search/QueryUtils.cs index 14db5823dd..8f046f89bb 100644 --- a/src/Lucene.Net.TestFramework/Search/QueryUtils.cs +++ b/src/Lucene.Net.TestFramework/Search/QueryUtils.cs @@ -294,7 +294,7 @@ private static IndexReader[] LoadEmptyReaders() // LUCENENET: Avoid static const private static IndexReader MakeEmptyIndex(Random random, int numDocs) { - Debugging.Assert(() => numDocs > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocs > 0); Directory d = new MockDirectoryWrapper(random, new RAMDirectory()); IndexWriter w = new IndexWriter(d, new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, new MockAnalyzer(random))); for (int i = 0; i < numDocs; i++) @@ -487,7 +487,7 @@ public virtual void SetNextReader(AtomicReaderContext context) leafPtr++; } lastReader[0] = (AtomicReader)context.Reader; - Debugging.Assert(() => readerContextArray[leafPtr].Reader == context.Reader); + if (Debugging.AssertsEnabled) Debugging.Assert(() => readerContextArray[leafPtr].Reader == context.Reader); this.scorer = null; lastDoc[0] = -1; } diff --git a/src/Lucene.Net.TestFramework/Search/RandomSimilarityProvider.cs b/src/Lucene.Net.TestFramework/Search/RandomSimilarityProvider.cs index f0cf8714d4..811bcb7045 100644 --- a/src/Lucene.Net.TestFramework/Search/RandomSimilarityProvider.cs +++ b/src/Lucene.Net.TestFramework/Search/RandomSimilarityProvider.cs @@ -71,7 +71,7 @@ public override Similarity Get(string field) { lock (this) { - Debugging.Assert(() => field != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => field != null); if (!previousMappings.TryGetValue(field, out Similarity sim) || sim == null) { sim = knownSims[Math.Max(0, Math.Abs(perFieldSeed ^ field.GetHashCode())) % knownSims.Count]; diff --git a/src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs b/src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs index 9b770f4cb3..78f47d997c 100644 --- a/src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs +++ b/src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs @@ -220,7 +220,7 @@ internal virtual TopDocs SearchNode(int nodeID, long[] nodeVersions, Query q, So } else { - Debugging.Assert(() => searchAfter == null); // not supported yet + if (Debugging.AssertsEnabled) Debugging.Assert(() => searchAfter == null); // not supported yet return s.LocalSearch(q, numHits, sort); } } @@ -306,7 +306,7 @@ public ShardIndexSearcher(ShardSearchingTestBase.NodeState nodeState, long[] nod this.outerInstance = nodeState; this.nodeVersions = nodeVersions; MyNodeID = nodeID; - Debugging.Assert(() => MyNodeID == nodeState.MyNodeID, () => "myNodeID=" + nodeID + " NodeState.this.myNodeID=" + nodeState.MyNodeID); + if (Debugging.AssertsEnabled) Debugging.Assert(() => MyNodeID == nodeState.MyNodeID, () => "myNodeID=" + nodeID + " NodeState.this.myNodeID=" + nodeState.MyNodeID); } public override Query Rewrite(Query original) @@ -348,7 +348,7 @@ public override Query Rewrite(Query original) public override TermStatistics TermStatistics(Term term, TermContext context) { - Debugging.Assert(() => term != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => term != null); long docFreq = 0; long totalTermFreq = 0; for (int nodeID = 0; nodeID < nodeVersions.Length; nodeID++) @@ -364,7 +364,7 @@ public override TermStatistics TermStatistics(Term term, TermContext context) subStats = outerInstance.termStatsCache[key]; // We pre-cached during rewrite so all terms // better be here... - Debugging.Assert(() => subStats != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => subStats != null); } long nodeDocFreq = subStats.DocFreq; @@ -419,7 +419,7 @@ public override CollectionStatistics CollectionStatistics(string field) } // Collection stats are pre-shared on reopen, so, // we better not have a cache miss: - Debugging.Assert(() => nodeStats != null, () => "myNodeID=" + MyNodeID + " nodeID=" + nodeID + " version=" + nodeVersions[nodeID] + " field=" + field); + if (Debugging.AssertsEnabled) Debugging.Assert(() => nodeStats != null, () => "myNodeID=" + MyNodeID + " nodeID=" + nodeID + " version=" + nodeVersions[nodeID] + " field=" + field); long nodeDocCount = nodeStats.DocCount; if (docCount >= 0 && nodeDocCount >= 0) @@ -451,7 +451,7 @@ public override CollectionStatistics CollectionStatistics(string field) sumDocFreq = -1; } - Debugging.Assert(() => nodeStats.MaxDoc >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => nodeStats.MaxDoc >= 0); maxDoc += nodeStats.MaxDoc; } @@ -551,7 +551,7 @@ public virtual TopDocs LocalSearchAfter(ScoreDoc after, Query query, int numHits public override TopFieldDocs Search(Query query, int numHits, Sort sort) { - Debugging.Assert(() => sort != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => sort != null); TopDocs[] shardHits = new TopDocs[nodeVersions.Length]; for (int nodeID = 0; nodeID < nodeVersions.Length; nodeID++) { @@ -604,7 +604,7 @@ public NodeState(ShardSearchingTestBase shardSearchingTestBase, Random random, i public void InitSearcher(long[] nodeVersions) { - Debugging.Assert(() => currentShardSearcher == null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentShardSearcher == null); Array.Copy(nodeVersions, 0, currentNodeVersions, 0, currentNodeVersions.Length); currentShardSearcher = new ShardIndexSearcher(this, GetCurrentNodeVersions(), Mgr.Acquire().IndexReader, MyNodeID); } @@ -781,8 +781,8 @@ protected virtual void Start(int numNodes, double runTimeSec, int maxSearcherAge for (int nodeID = 0; nodeID < numNodes; nodeID++) { IndexSearcher s = m_nodes[nodeID].Mgr.Acquire(); - Debugging.Assert(() => nodeVersions[nodeID] == m_nodes[nodeID].Searchers.Record(s)); - Debugging.Assert(() => s != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => nodeVersions[nodeID] == m_nodes[nodeID].Searchers.Record(s)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => s != null); try { BroadcastNodeReopen(nodeID, nodeVersions[nodeID], s); diff --git a/src/Lucene.Net.TestFramework/Store/MockDirectoryWrapper.cs b/src/Lucene.Net.TestFramework/Store/MockDirectoryWrapper.cs index 16e968fdfb..60aa77d7f1 100644 --- a/src/Lucene.Net.TestFramework/Store/MockDirectoryWrapper.cs +++ b/src/Lucene.Net.TestFramework/Store/MockDirectoryWrapper.cs @@ -963,7 +963,7 @@ protected override void Dispose(bool disposing) { if (endSet.Contains(s) && !startSet.Contains(s)) { - Debugging.Assert(() => pendingDeletions.Contains(s)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pendingDeletions.Contains(s)); if (LuceneTestCase.Verbose) { Console.WriteLine("MDW: Unreferenced check: Ignoring referenced file: " + s + " " + @@ -1024,7 +1024,7 @@ protected override void Dispose(bool disposing) extras += "\n\nThese files we had previously tried to delete, but couldn't: " + pendingDeletions; } - Debugging.Assert(() => false, () => "unreferenced files: before delete:\n " + Arrays.ToString(startFiles) + "\n after delete:\n " + Arrays.ToString(endFiles) + extras); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "unreferenced files: before delete:\n " + Arrays.ToString(startFiles) + "\n after delete:\n " + Arrays.ToString(endFiles) + extras); } DirectoryReader ir1 = DirectoryReader.Open(this); @@ -1034,7 +1034,7 @@ protected override void Dispose(bool disposing) DirectoryReader ir2 = DirectoryReader.Open(this); int numDocs2 = ir2.NumDocs; ir2.Dispose(); - Debugging.Assert(() => numDocs1 == numDocs2, () => "numDocs changed after opening/closing IW: before=" + numDocs1 + " after=" + numDocs2); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocs1 == numDocs2, () => "numDocs changed after opening/closing IW: before=" + numDocs1 + " after=" + numDocs2); } } } diff --git a/src/Lucene.Net.TestFramework/Support/JavaCompatibility/LuceneTestCase.cs b/src/Lucene.Net.TestFramework/Support/JavaCompatibility/LuceneTestCase.cs index 861bb3ec38..3d9ea80958 100644 --- a/src/Lucene.Net.TestFramework/Support/JavaCompatibility/LuceneTestCase.cs +++ b/src/Lucene.Net.TestFramework/Support/JavaCompatibility/LuceneTestCase.cs @@ -259,7 +259,7 @@ internal int randomInt(int max) [ExceptionToNetNumericConvention] // LUCENENET: This is for making test porting easier, keeping as-is internal int randomIntBetween(int min, int max) { - Debugging.Assert(() => max >= min, () => "max must be >= min: " + min + ", " + max); + if (Debugging.AssertsEnabled) Debugging.Assert(() => max >= min, () => "max must be >= min: " + min + ", " + max); long range = (long)max - (long)min; if (range < int.MaxValue) { diff --git a/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs b/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs index 2f2b3073a1..52e690435d 100644 --- a/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs +++ b/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs @@ -191,7 +191,7 @@ internal static int GetRandomCodePoint(Random r, Transition t) // LUCENENET spec } } - Debugging.Assert(() => code >= t.Min && code <= t.Max && (code < UnicodeUtil.UNI_SUR_HIGH_START || code > UnicodeUtil.UNI_SUR_LOW_END), () => "code=" + code + " min=" + t.Min + " max=" + t.Max); + if (Debugging.AssertsEnabled) Debugging.Assert(() => code >= t.Min && code <= t.Max && (code < UnicodeUtil.UNI_SUR_HIGH_START || code > UnicodeUtil.UNI_SUR_LOW_END), () => "code=" + code + " min=" + t.Min + " max=" + t.Max); return code; } @@ -399,7 +399,7 @@ public static void AssertNoDetachedStates(Automaton a) { int numStates = a.GetNumberOfStates(); a.ClearNumberedStates(); // force recomputation of cached numbered states - Debugging.Assert(() => numStates == a.GetNumberOfStates(), () => "automaton has " + (numStates - a.GetNumberOfStates()) + " detached states"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numStates == a.GetNumberOfStates(), () => "automaton has " + (numStates - a.GetNumberOfStates()) + " detached states"); } } diff --git a/src/Lucene.Net.TestFramework/Util/BaseDocIdSetTestCase.cs b/src/Lucene.Net.TestFramework/Util/BaseDocIdSetTestCase.cs index 83f4ded483..1c74e96aa9 100644 --- a/src/Lucene.Net.TestFramework/Util/BaseDocIdSetTestCase.cs +++ b/src/Lucene.Net.TestFramework/Util/BaseDocIdSetTestCase.cs @@ -56,7 +56,7 @@ public BaseDocIdSetTestCase(BeforeAfterClass beforeAfter) /// Create a random set which has of its bits set. protected static BitSet RandomSet(int numBits, int numBitsSet) { - Debugging.Assert(() => numBitsSet <= numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numBitsSet <= numBits); BitSet set = new BitSet(numBits); Random random = Random; if (numBitsSet == numBits) diff --git a/src/Lucene.Net.TestFramework/Util/FailOnNonBulkMergesInfoStream.cs b/src/Lucene.Net.TestFramework/Util/FailOnNonBulkMergesInfoStream.cs index db6aea80dd..e573e12bed 100644 --- a/src/Lucene.Net.TestFramework/Util/FailOnNonBulkMergesInfoStream.cs +++ b/src/Lucene.Net.TestFramework/Util/FailOnNonBulkMergesInfoStream.cs @@ -35,7 +35,7 @@ public override bool IsEnabled(string component) public override void Message(string component, string message) { - Debugging.Assert(() => !message.Contains("non-bulk merges")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !message.Contains("non-bulk merges")); } } } \ No newline at end of file diff --git a/src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs b/src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs index a7929e0136..875dc698c3 100644 --- a/src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs +++ b/src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs @@ -103,7 +103,7 @@ private static BytesRef ToBytesRef(Int32sRef ir) for (int i = 0; i < ir.Length; i++) { int x = ir.Int32s[ir.Offset + i]; - Debugging.Assert(() => x >= 0 && x <= 255); + if (Debugging.AssertsEnabled) Debugging.Assert(() => x >= 0 && x <= 255); br.Bytes[i] = (byte)x; } br.Length = ir.Length; @@ -219,7 +219,7 @@ public virtual void DoTest(bool testPruning) // of the term prefix that matches private T Run(FST fst, Int32sRef term, int[] prefixLength) { - Debugging.Assert(() => prefixLength == null || prefixLength.Length == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => prefixLength == null || prefixLength.Length == 1); FST.Arc arc = fst.GetFirstArc(new FST.Arc()); T NO_OUTPUT = fst.Outputs.NoOutput; T output = NO_OUTPUT; @@ -690,7 +690,7 @@ private void VerifyUnPruned(int inputMode, FST fst) if (!termsMap.ContainsKey(term) && term.CompareTo(pairs[upto].Input) > 0) { int pos = pairs.BinarySearch(new InputOutput(term, default(T))); - Debugging.Assert(() => pos < 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pos < 0); upto = -(pos + 1); if (random.NextBoolean()) @@ -887,7 +887,7 @@ private void VerifyPruned(int inputMode, FST fst, int prune1, int prune2) } else { - Debugging.Assert(() => prune2 > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => prune2 > 0); if (prune2 > 1 && cmo.Count >= prune2) { keep = true; diff --git a/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs b/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs index d89f9b4b93..bf79a356cf 100644 --- a/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs +++ b/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs @@ -2678,7 +2678,7 @@ public virtual void AssertTermsEquals(string info, IndexReader leftReader, Terms /// public virtual void AssertTermsStatisticsEquals(string info, Terms leftTerms, Terms rightTerms) { - Debugging.Assert(() => leftTerms.Comparer == rightTerms.Comparer); + if (Debugging.AssertsEnabled) Debugging.Assert(() => leftTerms.Comparer == rightTerms.Comparer); if (leftTerms.DocCount != -1 && rightTerms.DocCount != -1) { Assert.AreEqual(leftTerms.DocCount, rightTerms.DocCount, info); @@ -3053,7 +3053,7 @@ public virtual void AssertNormsEquals(string info, IndexReader leftReader, Index /// public virtual void AssertStoredFieldsEquals(string info, IndexReader leftReader, IndexReader rightReader) { - Debugging.Assert(() => leftReader.MaxDoc == rightReader.MaxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => leftReader.MaxDoc == rightReader.MaxDoc); for (int i = 0; i < leftReader.MaxDoc; i++) { Document leftDoc = leftReader.Document(i); @@ -3100,7 +3100,7 @@ public virtual void AssertStoredFieldEquals(string info, IIndexableField leftFie /// public virtual void AssertTermVectorsEquals(string info, IndexReader leftReader, IndexReader rightReader) { - Debugging.Assert(() => leftReader.MaxDoc == rightReader.MaxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => leftReader.MaxDoc == rightReader.MaxDoc); for (int i = 0; i < leftReader.MaxDoc; i++) { Fields leftFields = leftReader.GetTermVectors(i); @@ -3270,7 +3270,7 @@ public virtual void AssertDocValuesEquals(string info, int num, NumericDocValues // TODO: this is kinda stupid, we don't delete documents in the test. public virtual void AssertDeletedDocsEquals(string info, IndexReader leftReader, IndexReader rightReader) { - Debugging.Assert(() => leftReader.NumDeletedDocs == rightReader.NumDeletedDocs); + if (Debugging.AssertsEnabled) Debugging.Assert(() => leftReader.NumDeletedDocs == rightReader.NumDeletedDocs); IBits leftBits = MultiFields.GetLiveDocs(leftReader); IBits rightBits = MultiFields.GetLiveDocs(rightReader); @@ -3281,7 +3281,7 @@ public virtual void AssertDeletedDocsEquals(string info, IndexReader leftReader, return; } - Debugging.Assert(() => leftReader.MaxDoc == rightReader.MaxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => leftReader.MaxDoc == rightReader.MaxDoc); Assert.AreEqual(leftBits.Length, rightBits.Length, info); for (int i = 0; i < leftReader.MaxDoc; i++) { @@ -3365,7 +3365,7 @@ public static bool SlowFileExists(Directory dir, string fileName) //// if (TempDirBase == null) //// { //// DirectoryInfo directory = new DirectoryInfo(System.IO.Path.GetTempPath()); - //// //Debugging.Assert(() => directory.Exists && directory.Directory != null && directory.CanWrite()); + //// //if (Debugging.AssertsEnabled) Debugging.Assert(() => directory.Exists && directory.Directory != null && directory.CanWrite()); //// RandomizedContext ctx = RandomizedContext.Current; //// Type clazz = ctx.GetTargetType; @@ -3504,7 +3504,7 @@ public static FileInfo CreateTempFile() /// private static void RegisterToRemoveAfterSuite(FileSystemInfo f) { - Debugging.Assert(() => f != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => f != null); if (LuceneTestCase.LeaveTemporary) { diff --git a/src/Lucene.Net.TestFramework/Util/NullInfoStream.cs b/src/Lucene.Net.TestFramework/Util/NullInfoStream.cs index 50f9d60fc1..eafb5f2cf0 100644 --- a/src/Lucene.Net.TestFramework/Util/NullInfoStream.cs +++ b/src/Lucene.Net.TestFramework/Util/NullInfoStream.cs @@ -29,13 +29,13 @@ public class NullInfoStream : InfoStream { public override void Message(string component, string message) { - Debugging.Assert(() => component != null); - Debugging.Assert(() => message != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => component != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => message != null); } public override bool IsEnabled(string component) { - Debugging.Assert(() => component != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => component != null); return true; // to actually enable logging, we just ignore on message() } diff --git a/src/Lucene.Net.TestFramework/Util/TestRuleAssertionsRequired.cs b/src/Lucene.Net.TestFramework/Util/TestRuleAssertionsRequired.cs index f3cc2956c5..fa25854678 100644 --- a/src/Lucene.Net.TestFramework/Util/TestRuleAssertionsRequired.cs +++ b/src/Lucene.Net.TestFramework/Util/TestRuleAssertionsRequired.cs @@ -56,7 +56,7 @@ public override void Evaluate() { try { - Debugging.Assert(false); + if (Debugging.AssertsEnabled) Debugging.Assert(false); string msg = "Test class requires enabled assertions, enable globally (-ea)" + " or for Solr/Lucene subpackages only: " + Description.ClassName; Console.Error.WriteLine(msg); throw new Exception(msg); diff --git a/src/Lucene.Net.TestFramework/Util/TestRuleSetupAndRestoreClassEnv.cs b/src/Lucene.Net.TestFramework/Util/TestRuleSetupAndRestoreClassEnv.cs index eaf64337ad..f32b343422 100644 --- a/src/Lucene.Net.TestFramework/Util/TestRuleSetupAndRestoreClassEnv.cs +++ b/src/Lucene.Net.TestFramework/Util/TestRuleSetupAndRestoreClassEnv.cs @@ -169,7 +169,7 @@ public override void Before(LuceneTestCase testInstance) !ShouldAvoidCodec("Lucene3x"))) // preflex-only setup { codec = Codec.ForName("Lucene3x"); - Debugging.Assert(() => (codec is PreFlexRWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (codec is PreFlexRWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); LuceneTestCase.OldFormatImpersonationIsActive = true; } else if ("Lucene40".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) || ("random".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) && @@ -179,8 +179,8 @@ public override void Before(LuceneTestCase testInstance) { codec = Codec.ForName("Lucene40"); LuceneTestCase.OldFormatImpersonationIsActive = true; - Debugging.Assert(() => (codec is Lucene40RWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); - Debugging.Assert(() => (PostingsFormat.ForName("Lucene40") is Lucene40RWPostingsFormat), () => "fix your IPostingsFormatFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (codec is Lucene40RWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (PostingsFormat.ForName("Lucene40") is Lucene40RWPostingsFormat), () => "fix your IPostingsFormatFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); } else if ("Lucene41".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) || ("random".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) && "random".Equals(LuceneTestCase.TestPostingsFormat, StringComparison.Ordinal) && @@ -190,7 +190,7 @@ public override void Before(LuceneTestCase testInstance) { codec = Codec.ForName("Lucene41"); LuceneTestCase.OldFormatImpersonationIsActive = true; - Debugging.Assert(() => (codec is Lucene41RWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (codec is Lucene41RWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); } else if ("Lucene42".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) || ("random".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) && "random".Equals(LuceneTestCase.TestPostingsFormat, StringComparison.Ordinal) && @@ -200,7 +200,7 @@ public override void Before(LuceneTestCase testInstance) { codec = Codec.ForName("Lucene42"); LuceneTestCase.OldFormatImpersonationIsActive = true; - Debugging.Assert(() => (codec is Lucene42RWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (codec is Lucene42RWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); } else if ("Lucene45".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) || ("random".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) && "random".Equals(LuceneTestCase.TestPostingsFormat, StringComparison.Ordinal) && @@ -210,7 +210,7 @@ public override void Before(LuceneTestCase testInstance) { codec = Codec.ForName("Lucene45"); LuceneTestCase.OldFormatImpersonationIsActive = true; - Debugging.Assert(() => (codec is Lucene45RWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (codec is Lucene45RWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); } else if (("random".Equals(LuceneTestCase.TestPostingsFormat, StringComparison.Ordinal) == false) || ("random".Equals(LuceneTestCase.TestDocValuesFormat, StringComparison.Ordinal) == false)) @@ -275,7 +275,7 @@ public override void Before(LuceneTestCase testInstance) } else { - Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false); } Codec.Default = codec; diff --git a/src/Lucene.Net.TestFramework/Util/ThrottledIndexOutput.cs b/src/Lucene.Net.TestFramework/Util/ThrottledIndexOutput.cs index 1a8b2a16e6..9730e3f052 100644 --- a/src/Lucene.Net.TestFramework/Util/ThrottledIndexOutput.cs +++ b/src/Lucene.Net.TestFramework/Util/ThrottledIndexOutput.cs @@ -61,7 +61,7 @@ public static int MBitsToBytes(int mbits) public ThrottledIndexOutput(int bytesPerSecond, long flushDelayMillis, long closeDelayMillis, long seekDelayMillis, long minBytesWritten, IndexOutput @delegate) { - Debugging.Assert(() => bytesPerSecond > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bytesPerSecond > 0); this.@delegate = @delegate; this.bytesPerSecond = bytesPerSecond; this.flushDelayMillis = flushDelayMillis; diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs index f398ac18a5..f0b56237a4 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs @@ -422,7 +422,7 @@ public virtual void TestRandomMaps2() // Same length: no change to offset } - Debugging.Assert(() => inputOffsets.Count == output.Length, () => "inputOffsets.size()=" + inputOffsets.Count + " vs output.length()=" + output.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => inputOffsets.Count == output.Length, () => "inputOffsets.size()=" + inputOffsets.Count + " vs output.length()=" + output.Length); } else { diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs index e48cafa91f..6333c35fb3 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs @@ -211,7 +211,7 @@ private sealed class FactoryAnalyzer : Analyzer internal FactoryAnalyzer(TokenizerFactory tokenizer, TokenFilterFactory tokenfilter, CharFilterFactory charFilter) { - Debugging.Assert(() => tokenizer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => tokenizer != null); this.tokenizer = tokenizer; this.charFilter = charFilter; this.tokenfilter = tokenfilter; diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs index 4f66f90bb0..20ea9cefb5 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs @@ -172,7 +172,7 @@ public PredicateAnonymousInnerClassHelper2() public virtual bool Apply(object[] args) { - Debugging.Assert(() => args.Length == 3); + if (Debugging.AssertsEnabled) Debugging.Assert(() => args.Length == 3); return !((bool)args[2]); // args are broken if consumeAllTokens is false } } @@ -185,7 +185,7 @@ public PredicateAnonymousInnerClassHelper3() public virtual bool Apply(object[] args) { - Debugging.Assert(() => args.Length == 3); + if (Debugging.AssertsEnabled) Debugging.Assert(() => args.Length == 3); return !((bool)args[2]); // args are broken if consumeAllTokens is false } } diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries.cs index b8158af45e..48f9ea82a3 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries.cs @@ -168,16 +168,16 @@ public virtual void Test() for (int i = 0; i < tests.Length; i += 3) { FileInfo f = new FileInfo(System.IO.Path.Combine(DICTIONARY_HOME.FullName, tests[i])); - Debugging.Assert(() => f.Exists); + if (Debugging.AssertsEnabled) Debugging.Assert(() => f.Exists); using (Stream fileStream = f.OpenRead()) { using (ZipArchive zip = new ZipArchive(fileStream, ZipArchiveMode.Read, false, Encoding.UTF8)) { ZipArchiveEntry dicEntry = zip.GetEntry(tests[i + 1]); - Debugging.Assert(() => dicEntry != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => dicEntry != null); ZipArchiveEntry affEntry = zip.GetEntry(tests[i + 2]); - Debugging.Assert(() => affEntry != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => affEntry != null); using (Stream dictionary = dicEntry.Open()) { @@ -208,16 +208,16 @@ public virtual void TestOneDictionary() if (tests[i].Equals(toTest, StringComparison.Ordinal)) { FileInfo f = new FileInfo(System.IO.Path.Combine(DICTIONARY_HOME.FullName, tests[i])); - Debugging.Assert(() => f.Exists); + if (Debugging.AssertsEnabled) Debugging.Assert(() => f.Exists); using (Stream fileStream = f.OpenRead()) { using (ZipArchive zip = new ZipArchive(fileStream, ZipArchiveMode.Read, false, Encoding.UTF8)) { ZipArchiveEntry dicEntry = zip.GetEntry(tests[i + 1]); - Debugging.Assert(() => dicEntry != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => dicEntry != null); ZipArchiveEntry affEntry = zip.GetEntry(tests[i + 2]); - Debugging.Assert(() => affEntry != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => affEntry != null); using (Stream dictionary = dicEntry.Open()) { diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries2.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries2.cs index e6c2d9765d..cce39a6f31 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries2.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries2.cs @@ -184,16 +184,16 @@ public virtual void Test() for (int i = 0; i < tests.Length; i += 3) { FileInfo f = new FileInfo(System.IO.Path.Combine(DICTIONARY_HOME.FullName, tests[i])); - Debugging.Assert(() => f.Exists); + if (Debugging.AssertsEnabled) Debugging.Assert(() => f.Exists); using (Stream fileStream = f.OpenRead()) { using (ZipArchive zip = new ZipArchive(fileStream, ZipArchiveMode.Read, false, Encoding.UTF8)) { ZipArchiveEntry dicEntry = zip.GetEntry(tests[i + 1]); - Debugging.Assert(() => dicEntry != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => dicEntry != null); ZipArchiveEntry affEntry = zip.GetEntry(tests[i + 2]); - Debugging.Assert(() => affEntry != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => affEntry != null); using (Stream dictionary = dicEntry.Open()) { @@ -226,16 +226,16 @@ public virtual void TestOneDictionary() if (tests[i].Equals(toTest, StringComparison.Ordinal)) { FileInfo f = new FileInfo(System.IO.Path.Combine(DICTIONARY_HOME.FullName, tests[i])); - Debugging.Assert(() => f.Exists); + if (Debugging.AssertsEnabled) Debugging.Assert(() => f.Exists); using (Stream fileStream = f.OpenRead()) { using (ZipArchive zip = new ZipArchive(fileStream, ZipArchiveMode.Read, false, Encoding.UTF8)) { ZipArchiveEntry dicEntry = zip.GetEntry(tests[i + 1]); - Debugging.Assert(() => dicEntry != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => dicEntry != null); ZipArchiveEntry affEntry = zip.GetEntry(tests[i + 2]); - Debugging.Assert(() => affEntry != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => affEntry != null); using (Stream dictionary = dicEntry.Open()) { diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMapFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMapFilter.cs index 4ec4e7d65d..44f4ad2dbb 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMapFilter.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMapFilter.cs @@ -268,7 +268,7 @@ public virtual void TestBasic() private string GetRandomString(char start, int alphabetSize, int length) { - Debugging.Assert(() => alphabetSize <= 26); + if (Debugging.AssertsEnabled) Debugging.Assert(() => alphabetSize <= 26); char[] s = new char[2 * length]; for (int charIDX = 0; charIDX < length; charIDX++) { diff --git a/src/Lucene.Net.Tests.Facet/FacetTestCase.cs b/src/Lucene.Net.Tests.Facet/FacetTestCase.cs index 7c7a4ef9f2..5c1afbfaff 100644 --- a/src/Lucene.Net.Tests.Facet/FacetTestCase.cs +++ b/src/Lucene.Net.Tests.Facet/FacetTestCase.cs @@ -154,7 +154,7 @@ protected internal virtual void SortTies(LabelAndValue[] labelValues) if (numInRow > 1) { Array.Sort(labelValues, i - numInRow, i - (i - numInRow), Comparer.Create((a,b)=> { - Debugging.Assert(() => (double)a.Value == (double)b.Value); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (double)a.Value == (double)b.Value); return (new BytesRef(a.Label)).CompareTo(new BytesRef(b.Label)); })); } diff --git a/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs b/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs index a28559ca95..a9d27ab18d 100644 --- a/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs +++ b/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs @@ -1197,7 +1197,7 @@ public DrillSidewaysAnonymousInnerClassHelper2(TestRangeFacetCounts outerInstanc protected override Facets BuildFacetsResult(FacetsCollector drillDowns, FacetsCollector[] drillSideways, string[] drillSidewaysDims) { - Debugging.Assert(() => drillSideways.Length == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => drillSideways.Length == 1); return new DoubleRangeFacetCounts("field", vs, drillSideways[0], fastMatchFilter, ranges); } diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs index 6e8be41514..09c2ed7aa0 100644 --- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs +++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs @@ -385,7 +385,7 @@ public ValueSourceAnonymousInnerClassHelper(TestTaxonomyFacetSumValueSource oute public override FunctionValues GetValues(IDictionary context, AtomicReaderContext readerContext) { Scorer scorer = (Scorer)context["scorer"]; - Debugging.Assert(() => scorer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => scorer != null); return new DoubleDocValuesAnonymousInnerClassHelper(this, scorer); } diff --git a/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs b/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs index 115a1203e3..d89256a5a8 100644 --- a/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs +++ b/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs @@ -866,7 +866,7 @@ public virtual void SetScorer(Scorer scorer) public virtual void Collect(int doc) { - Debugging.Assert(() => doc > lastDocID); + if (Debugging.AssertsEnabled) Debugging.Assert(() => doc > lastDocID); lastDocID = doc; } @@ -948,8 +948,8 @@ public virtual void Inc(int[] dims, int[] dims2) public virtual void Inc(int[] dims, int[] dims2, int onlyDim) { - Debugging.Assert(() => dims.Length == counts.Length); - Debugging.Assert(() => dims2.Length == counts.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => dims.Length == counts.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => dims2.Length == counts.Length); for (int dim = 0; dim < dims.Length; dim++) { if (onlyDim == -1 || dim == onlyDim) diff --git a/src/Lucene.Net.Tests.Grouping/GroupFacetCollectorTest.cs b/src/Lucene.Net.Tests.Grouping/GroupFacetCollectorTest.cs index faa1519bf9..3329b699b6 100644 --- a/src/Lucene.Net.Tests.Grouping/GroupFacetCollectorTest.cs +++ b/src/Lucene.Net.Tests.Grouping/GroupFacetCollectorTest.cs @@ -559,7 +559,7 @@ private IndexContext CreateIndexContext(bool multipleFacetValuesPerDocument) Field[] facetFields; if (useDv) { - Debugging.Assert(() => !multipleFacetValuesPerDocument); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !multipleFacetValuesPerDocument); facetFields = new Field[2]; facetFields[0] = NewStringField("facet", "", Field.Store.NO); doc.Add(facetFields[0]); @@ -816,7 +816,7 @@ private AbstractGroupFacetCollector CreateRandomCollector(string groupField, str { BytesRef facetPrefixBR = facetPrefix == null ? null : new BytesRef(facetPrefix); // DocValues cannot be multi-valued: - Debugging.Assert(() => !multipleFacetsPerDocument || !groupField.EndsWith("_dv", StringComparison.Ordinal)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !multipleFacetsPerDocument || !groupField.EndsWith("_dv", StringComparison.Ordinal)); return TermGroupFacetCollector.CreateTermGroupFacetCollector(groupField, facetField, multipleFacetsPerDocument, facetPrefixBR, Random.nextInt(1024)); } diff --git a/src/Lucene.Net.Tests.Highlighter/PostingsHighlight/TestPostingsHighlighter.cs b/src/Lucene.Net.Tests.Highlighter/PostingsHighlight/TestPostingsHighlighter.cs index 859bff449d..99189a1563 100644 --- a/src/Lucene.Net.Tests.Highlighter/PostingsHighlight/TestPostingsHighlighter.cs +++ b/src/Lucene.Net.Tests.Highlighter/PostingsHighlight/TestPostingsHighlighter.cs @@ -775,8 +775,8 @@ public LoadFieldValuesPostingsHighlighter(int maxLength, string text) protected override IList LoadFieldValues(IndexSearcher searcher, string[] fields, int[] docids, int maxLength) { - Debugging.Assert(() => fields.Length == 1); - Debugging.Assert(() => docids.Length == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fields.Length == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docids.Length == 1); String[][] contents = RectangularArrays.ReturnRectangularArray(1, 1); //= new String[1][1]; contents[0][0] = text; return contents; @@ -1179,7 +1179,7 @@ internal class GetMultiValuedSeparatorPostingsHighlighter : PostingsHighlighter { protected override char GetMultiValuedSeparator(string field) { - Debugging.Assert(() => field.Equals("body", StringComparison.Ordinal)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => field.Equals("body", StringComparison.Ordinal)); return '\u2029'; } } diff --git a/src/Lucene.Net.Tests.Join/TestJoinUtil.cs b/src/Lucene.Net.Tests.Join/TestJoinUtil.cs index f79246dbad..09ea77f41c 100644 --- a/src/Lucene.Net.Tests.Join/TestJoinUtil.cs +++ b/src/Lucene.Net.Tests.Join/TestJoinUtil.cs @@ -1030,7 +1030,7 @@ private FixedBitSet CreateExpectedResult(string queryValue, bool from, IndexRead { DocsEnum docsEnum = MultiFields.GetTermDocsEnum(topLevelReader, MultiFields.GetLiveDocs(topLevelReader), "id", new BytesRef(otherSideDoc.id), 0); - Debugging.Assert(() => docsEnum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docsEnum != null); int doc = docsEnum.NextDoc(); expectedResult.Set(doc); } diff --git a/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs b/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs index 10bffe2fde..32a1bce486 100644 --- a/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs +++ b/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs @@ -65,8 +65,8 @@ public override ICommonQueryParserConfiguration GetParserConfig(Analyzer a) public override Query GetQuery(string query, ICommonQueryParserConfiguration cqpC) { - Debugging.Assert(() => cqpC != null, () => "Parameter must not be null"); - Debugging.Assert(() => cqpC is QueryParser, () => "Parameter must be instance of QueryParser"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => cqpC != null, () => "Parameter must not be null"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => cqpC is QueryParser, () => "Parameter must be instance of QueryParser"); QueryParser qp = (QueryParser)cqpC; return qp.Parse(query); } @@ -83,35 +83,35 @@ public override bool IsQueryParserException(Exception exception) public override void SetDefaultOperatorOR(ICommonQueryParserConfiguration cqpC) { - Debugging.Assert(() => cqpC is QueryParser); + if (Debugging.AssertsEnabled) Debugging.Assert(() => cqpC is QueryParser); QueryParser qp = (QueryParser)cqpC; qp.DefaultOperator = Operator.OR; } public override void SetDefaultOperatorAND(ICommonQueryParserConfiguration cqpC) { - Debugging.Assert(() => cqpC is QueryParser); + if (Debugging.AssertsEnabled) Debugging.Assert(() => cqpC is QueryParser); QueryParser qp = (QueryParser)cqpC; qp.DefaultOperator = Operator.AND; } public override void SetAnalyzeRangeTerms(ICommonQueryParserConfiguration cqpC, bool value) { - Debugging.Assert(() => cqpC is QueryParser); + if (Debugging.AssertsEnabled) Debugging.Assert(() => cqpC is QueryParser); QueryParser qp = (QueryParser)cqpC; qp.AnalyzeRangeTerms = (value); } public override void SetAutoGeneratePhraseQueries(ICommonQueryParserConfiguration cqpC, bool value) { - Debugging.Assert(() => cqpC is QueryParser); + if (Debugging.AssertsEnabled) Debugging.Assert(() => cqpC is QueryParser); QueryParser qp = (QueryParser)cqpC; qp.AutoGeneratePhraseQueries = value; } public override void SetDateResolution(ICommonQueryParserConfiguration cqpC, string field, DateTools.Resolution value) { - Debugging.Assert(() => cqpC is QueryParser); + if (Debugging.AssertsEnabled) Debugging.Assert(() => cqpC is QueryParser); QueryParser qp = (QueryParser)cqpC; qp.SetDateResolution(field, value); } diff --git a/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestStandardQP.cs b/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestStandardQP.cs index c336603c51..f9d285517d 100644 --- a/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestStandardQP.cs +++ b/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestStandardQP.cs @@ -59,8 +59,8 @@ public override ICommonQueryParserConfiguration GetParserConfig(Analyzer a) public override Query GetQuery(String query, ICommonQueryParserConfiguration cqpC) { - Debugging.Assert(() => cqpC != null, () => "Parameter must not be null"); - Debugging.Assert(() => (cqpC is StandardQueryParser), () => "Parameter must be instance of StandardQueryParser"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => cqpC != null, () => "Parameter must not be null"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (cqpC is StandardQueryParser), () => "Parameter must be instance of StandardQueryParser"); StandardQueryParser qp = (StandardQueryParser)cqpC; return Parse(query, qp); } @@ -80,7 +80,7 @@ public override bool IsQueryParserException(Exception exception) public override void SetDefaultOperatorOR(ICommonQueryParserConfiguration cqpC) { - Debugging.Assert(() => cqpC is StandardQueryParser); + if (Debugging.AssertsEnabled) Debugging.Assert(() => cqpC is StandardQueryParser); StandardQueryParser qp = (StandardQueryParser)cqpC; qp.DefaultOperator = (Operator.OR); } @@ -88,7 +88,7 @@ public override void SetDefaultOperatorOR(ICommonQueryParserConfiguration cqpC) public override void SetDefaultOperatorAND(ICommonQueryParserConfiguration cqpC) { - Debugging.Assert(() => cqpC is StandardQueryParser); + if (Debugging.AssertsEnabled) Debugging.Assert(() => cqpC is StandardQueryParser); StandardQueryParser qp = (StandardQueryParser)cqpC; qp.DefaultOperator = (Operator.AND); } @@ -111,7 +111,7 @@ public override void SetAutoGeneratePhraseQueries(ICommonQueryParserConfiguratio public override void SetDateResolution(ICommonQueryParserConfiguration cqpC, string field, DateTools.Resolution value) { - Debugging.Assert(() => cqpC is StandardQueryParser); + if (Debugging.AssertsEnabled) Debugging.Assert(() => cqpC is StandardQueryParser); StandardQueryParser qp = (StandardQueryParser)cqpC; qp.DateResolutionMap.Put(field, value); } diff --git a/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs b/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs index b41b01a201..a6fe124b20 100644 --- a/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs +++ b/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs @@ -461,7 +461,7 @@ protected override void HandleUpdateException(Exception exception) { // count-down number of failures failures.DecrementAndGet(); - Debugging.Assert(() => failures >= 0, () => "handler failed too many times: " + failures); + if (Debugging.AssertsEnabled) Debugging.Assert(() => failures >= 0, () => "handler failed too many times: " + failures); if (Verbose) { if (failures == 0) diff --git a/src/Lucene.Net.Tests.Replicator/IndexReplicationClientTest.cs b/src/Lucene.Net.Tests.Replicator/IndexReplicationClientTest.cs index 3c158bf2af..ee05bc1833 100644 --- a/src/Lucene.Net.Tests.Replicator/IndexReplicationClientTest.cs +++ b/src/Lucene.Net.Tests.Replicator/IndexReplicationClientTest.cs @@ -367,7 +367,7 @@ protected override void HandleUpdateException(Exception exception) { // count-down number of failures failures.DecrementAndGet(); - Debugging.Assert(() => failures >= 0, () => "handler failed too many times: " + failures); + if (Debugging.AssertsEnabled) Debugging.Assert(() => failures >= 0, () => "handler failed too many times: " + failures); if (Verbose) { if (failures == 0) diff --git a/src/Lucene.Net.Tests.Spatial/SpatialTestCase.cs b/src/Lucene.Net.Tests.Spatial/SpatialTestCase.cs index b57d7face6..6f54dd4405 100644 --- a/src/Lucene.Net.Tests.Spatial/SpatialTestCase.cs +++ b/src/Lucene.Net.Tests.Spatial/SpatialTestCase.cs @@ -152,7 +152,7 @@ protected virtual IRectangle randomRectangle() private double randomGaussianMinMeanMax(double min, double mean, double max) { - Debugging.Assert(() => mean > min); + if (Debugging.AssertsEnabled) Debugging.Assert(() => mean > min); return randomGaussianMeanMax(mean - min, max - min) + min; } @@ -166,7 +166,7 @@ private double randomGaussianMinMeanMax(double min, double mean, double max) private double randomGaussianMeanMax(double mean, double max) { // DWS: I verified the results empirically - Debugging.Assert(() => mean <= max && mean >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => mean <= max && mean >= 0); double g = randomGaussian(); double mean2 = mean; double flip = 1; @@ -180,7 +180,7 @@ private double randomGaussianMeanMax(double mean, double max) // 1 standard deviation alters the calculation double pivotMax = max - mean2; double pivot = Math.Min(mean2, pivotMax / 2);//from 0 to max-mean2 - Debugging.Assert(() => pivot >= 0 && pivotMax >= pivot && g >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pivot >= 0 && pivotMax >= pivot && g >= 0); double pivotResult; if (g <= 1) pivotResult = pivot * g; diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs index 016b78e80e..4b7301d8a5 100644 --- a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs +++ b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs @@ -651,7 +651,7 @@ public int CompareTo(TermFreq2 other) } else { - Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false); return 0; } } diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs index 5951a30590..4857d73d4c 100644 --- a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs +++ b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs @@ -603,7 +603,7 @@ public int CompareTo(TermFreqPayload2 other) } else { - Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false); return 0; } } @@ -1313,7 +1313,7 @@ public int Compare(Lookup.LookupResult a, Lookup.LookupResult b) else { int c = CHARSEQUENCE_COMPARER.Compare(a.Key, b.Key); - Debugging.Assert(() => c != 0, () => "term=" + a.Key); + if (Debugging.AssertsEnabled) Debugging.Assert(() => c != 0, () => "term=" + a.Key); return c; } } diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs index 2f160a6fd3..0780c825b9 100644 --- a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs +++ b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs @@ -680,7 +680,7 @@ private static string GetZipfToken(string[] tokens) return tokens[k]; } } - Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false); return null; } diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs index d835cf9a36..14251e72af 100644 --- a/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs +++ b/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs @@ -75,7 +75,7 @@ public class LookupBenchmarkTest : LuceneTestCase public override void SetUp() { - Debugging.Assert(() => false, () => "disable assertions before running benchmarks!"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "disable assertions before running benchmarks!"); IList input = ReadTop50KWiki(); input.Shuffle(Random); dictionaryInput = input.ToArray(); @@ -93,7 +93,7 @@ public static IList ReadTop50KWiki() List input = new List(); var resource = typeof(LookupBenchmarkTest).FindAndGetManifestResourceStream("Top50KWiki.utf8"); - Debugging.Assert(() => resource != null, () => "Resource missing: Top50KWiki.utf8"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => resource != null, () => "Resource missing: Top50KWiki.utf8"); string line = null; using (TextReader br = new StreamReader(resource, UTF_8)) diff --git a/src/Lucene.Net.Tests/Analysis/TestGraphTokenizers.cs b/src/Lucene.Net.Tests/Analysis/TestGraphTokenizers.cs index 8aaa6b92d3..1322b74063 100644 --- a/src/Lucene.Net.Tests/Analysis/TestGraphTokenizers.cs +++ b/src/Lucene.Net.Tests/Analysis/TestGraphTokenizers.cs @@ -176,7 +176,7 @@ internal virtual void FillTokens() pos += minPosLength; offset = 2 * pos; } - Debugging.Assert(() => maxPos <= pos, () => "input string mal-formed: posLength>1 tokens hang over the end"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => maxPos <= pos, () => "input string mal-formed: posLength>1 tokens hang over the end"); } } diff --git a/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs b/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs index 6a2347deac..40b002f3b8 100644 --- a/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs +++ b/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs @@ -296,7 +296,7 @@ private void AssertTermsSeeking(Terms leftTerms, Terms rightTerms) /// public virtual void AssertTermsStatistics(Terms leftTerms, Terms rightTerms) { - Debugging.Assert(() => leftTerms.Comparer == rightTerms.Comparer); + if (Debugging.AssertsEnabled) Debugging.Assert(() => leftTerms.Comparer == rightTerms.Comparer); if (leftTerms.DocCount != -1 && rightTerms.DocCount != -1) { Assert.AreEqual(leftTerms.DocCount, rightTerms.DocCount); diff --git a/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldDocValuesFormat.cs b/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldDocValuesFormat.cs index 42a0bd84b6..0a2e89e037 100644 --- a/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldDocValuesFormat.cs +++ b/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldDocValuesFormat.cs @@ -113,7 +113,7 @@ public virtual void TestTwoFieldsTwoFormats() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv1"); Assert.AreEqual(5, dv.Get(hits.ScoreDocs[i].Doc)); BinaryDocValues dv2 = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv2"); diff --git a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs index a82465ae4d..2ed15aae65 100644 --- a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs +++ b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs @@ -434,7 +434,7 @@ public virtual void SearchIndex(Directory dir, string oldName) // true if this is a 4.2+ index bool is42Index = MultiFields.GetMergedFieldInfos(reader).FieldInfo("dvSortedSet") != null; - Debugging.Assert(() => is40Index); // NOTE: currently we can only do this on trunk! + if (Debugging.AssertsEnabled) Debugging.Assert(() => is40Index); // NOTE: currently we can only do this on trunk! IBits liveDocs = MultiFields.GetLiveDocs(reader); diff --git a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs index 8fd5b72436..417df9366e 100644 --- a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs +++ b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs @@ -1039,7 +1039,7 @@ public virtual void TestNegativePositions() Assert.AreEqual(1, td.TotalHits); AtomicReader wrapper = SlowCompositeReaderWrapper.Wrap(ir); DocsAndPositionsEnum de = wrapper.GetTermPositionsEnum(new Term("field3", "broken")); - Debugging.Assert(() => de != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => de != null); Assert.AreEqual(0, de.NextDoc()); Assert.AreEqual(0, de.NextPosition()); ir.Dispose(); diff --git a/src/Lucene.Net.Tests/Index/TestCodecs.cs b/src/Lucene.Net.Tests/Index/TestCodecs.cs index 2b7fbe326c..2888663d84 100644 --- a/src/Lucene.Net.Tests/Index/TestCodecs.cs +++ b/src/Lucene.Net.Tests/Index/TestCodecs.cs @@ -446,7 +446,7 @@ public virtual void TestRandomPostings() for (int i = 0; i < NUM_TEST_THREADS - 1; i++) { threads[i].Join(); - Debugging.Assert(() => !threads[i].failed); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !threads[i].failed); } } @@ -759,14 +759,14 @@ public virtual void _run() if (doc == DocIdSetIterator.NO_MORE_DOCS) { // skipped past last doc - Debugging.Assert(() => upto2 == term2.docs.Length - 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => upto2 == term2.docs.Length - 1); ended = true; break; } else { // skipped to next doc - Debugging.Assert(() => upto2 < term2.docs.Length - 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => upto2 < term2.docs.Length - 1); if (doc >= term2.docs[1 + upto2]) { upto2++; diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriter.cs b/src/Lucene.Net.Tests/Index/TestIndexWriter.cs index e195ed4d80..a2517f58ec 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriter.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriter.cs @@ -2140,14 +2140,14 @@ public virtual void TestNRTReaderVersion() r = w.GetReader(); long version2 = r.Version; r.Dispose(); - Debugging.Assert(() => version2 > version); + if (Debugging.AssertsEnabled) Debugging.Assert(() => version2 > version); w.DeleteDocuments(new Term("id", "0")); r = w.GetReader(); w.Dispose(); long version3 = r.Version; r.Dispose(); - Debugging.Assert(() => version3 > version2); + if (Debugging.AssertsEnabled) Debugging.Assert(() => version3 > version2); d.Dispose(); } diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs index a48d95448a..ffdcfdd778 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs @@ -315,7 +315,7 @@ public override void Merge(IndexWriter writer, MergeTrigger trigger, bool newMer } for (int i = 0; i < merge.Segments.Count; i++) { - Debugging.Assert(() => merge.Segments[i].Info.DocCount < 20); + if (Debugging.AssertsEnabled) Debugging.Assert(() => merge.Segments[i].Info.DocCount < 20); } writer.Merge(merge); } diff --git a/src/Lucene.Net.Tests/Index/TestIndexableField.cs b/src/Lucene.Net.Tests/Index/TestIndexableField.cs index b71cdf2c7f..4308d1a932 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexableField.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexableField.cs @@ -317,7 +317,7 @@ public virtual void TestArbitraryFields() } else { - Debugging.Assert(() => stringValue != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => stringValue != null); Assert.AreEqual(stringValue, f.GetStringValue()); } } @@ -424,7 +424,7 @@ public bool MoveNext() return false; } - Debugging.Assert(() => fieldUpto < outerInstance.fieldCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldUpto < outerInstance.fieldCount); if (fieldUpto == 0) { fieldUpto = 1; diff --git a/src/Lucene.Net.Tests/Index/TestLongPostings.cs b/src/Lucene.Net.Tests/Index/TestLongPostings.cs index 118fcaaee9..c9859d7de3 100644 --- a/src/Lucene.Net.Tests/Index/TestLongPostings.cs +++ b/src/Lucene.Net.Tests/Index/TestLongPostings.cs @@ -462,9 +462,9 @@ public virtual void DoTestLongPostingsNoPositions(IndexOptions options) else { docs = postings = TestUtil.Docs(Random, r, "field", new BytesRef(term), null, null, DocsFlags.FREQS); - Debugging.Assert(() => postings != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => postings != null); } - Debugging.Assert(() => docs != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docs != null); int docID = -1; while (docID < DocIdSetIterator.NO_MORE_DOCS) diff --git a/src/Lucene.Net.Tests/Index/TestNRTThreads.cs b/src/Lucene.Net.Tests/Index/TestNRTThreads.cs index e2a79c0797..5dfadbfe0a 100644 --- a/src/Lucene.Net.Tests/Index/TestNRTThreads.cs +++ b/src/Lucene.Net.Tests/Index/TestNRTThreads.cs @@ -116,7 +116,7 @@ protected override void DoSearching(TaskScheduler es, long stopTime) protected override Directory GetDirectory(Directory @in) { - Debugging.Assert(() => @in is MockDirectoryWrapper); + if (Debugging.AssertsEnabled) Debugging.Assert(() => @in is MockDirectoryWrapper); if (!useNonNrtReaders) { ((MockDirectoryWrapper)@in).AssertNoDeleteOpenFile = true; diff --git a/src/Lucene.Net.Tests/Index/TestPayloads.cs b/src/Lucene.Net.Tests/Index/TestPayloads.cs index 0f208af1dd..2129822db1 100644 --- a/src/Lucene.Net.Tests/Index/TestPayloads.cs +++ b/src/Lucene.Net.Tests/Index/TestPayloads.cs @@ -310,7 +310,7 @@ private void GenerateRandomData(byte[] data) // this test needs the random data to be valid unicode string s = TestUtil.RandomFixedByteLengthUnicodeString(Random, data.Length); var b = s.GetBytes(utf8); - Debugging.Assert(() => b.Length == data.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => b.Length == data.Length); System.Buffer.BlockCopy(b, 0, data, 0, b.Length); } diff --git a/src/Lucene.Net.Tests/Index/TestPayloadsOnVectors.cs b/src/Lucene.Net.Tests/Index/TestPayloadsOnVectors.cs index 5b7e149235..f38c9411da 100644 --- a/src/Lucene.Net.Tests/Index/TestPayloadsOnVectors.cs +++ b/src/Lucene.Net.Tests/Index/TestPayloadsOnVectors.cs @@ -79,7 +79,7 @@ public virtual void TestMixupDocs() DirectoryReader reader = writer.GetReader(); Terms terms = reader.GetTermVector(1, "field"); - Debugging.Assert(() => terms != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => terms != null); TermsEnum termsEnum = terms.GetIterator(null); Assert.IsTrue(termsEnum.SeekExact(new BytesRef("withPayload"))); DocsAndPositionsEnum de = termsEnum.DocsAndPositions(null, null); @@ -128,7 +128,7 @@ public virtual void TestMixupMultiValued() writer.AddDocument(doc); DirectoryReader reader = writer.GetReader(); Terms terms = reader.GetTermVector(0, "field"); - Debugging.Assert(() => terms != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => terms != null); TermsEnum termsEnum = terms.GetIterator(null); Assert.IsTrue(termsEnum.SeekExact(new BytesRef("withPayload"))); DocsAndPositionsEnum de = termsEnum.DocsAndPositions(null, null); diff --git a/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs b/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs index c621f33aed..6e968359da 100644 --- a/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs +++ b/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs @@ -177,9 +177,9 @@ public virtual void DoTestNumbers(bool withPayloads) { dp.NextPosition(); int start = dp.StartOffset; - Debugging.Assert(() => start >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => start >= 0); int end = dp.EndOffset; - Debugging.Assert(() => end >= 0 && end >= start); + if (Debugging.AssertsEnabled) Debugging.Assert(() => end >= 0 && end >= start); // check that the offsets correspond to the term in the src text Assert.IsTrue(storedNumbers.Substring(start, end - start).Equals(term, StringComparison.Ordinal)); if (withPayloads) @@ -208,9 +208,9 @@ public virtual void DoTestNumbers(bool withPayloads) string storedNumbers = reader.Document(doc).Get("numbers"); dp.NextPosition(); int start = dp.StartOffset; - Debugging.Assert(() => start >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => start >= 0); int end = dp.EndOffset; - Debugging.Assert(() => end >= 0 && end >= start); + if (Debugging.AssertsEnabled) Debugging.Assert(() => end >= 0 && end >= start); // check that the offsets correspond to the term in the src text Assert.IsTrue(storedNumbers.Substring(start, end - start).Equals("hundred", StringComparison.Ordinal)); if (withPayloads) diff --git a/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs b/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs index aa7f52f876..b7d7a3c602 100644 --- a/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs +++ b/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs @@ -347,7 +347,7 @@ public virtual void VerifyEquals(DirectoryReader r1, DirectoryReader r2, string } if (r1.NumDocs != r2.NumDocs) { - Debugging.Assert(() => false, () => "r1.NumDocs=" + r1.NumDocs + " vs r2.NumDocs=" + r2.NumDocs); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "r1.NumDocs=" + r1.NumDocs + " vs r2.NumDocs=" + r2.NumDocs); } bool hasDeletes = !(r1.MaxDoc == r2.MaxDoc && r1.NumDocs == r1.MaxDoc); @@ -682,7 +682,7 @@ public static void VerifyEquals(Document d1, Document d2) IIndexableField f2 = ff2[i]; if (f1.GetBinaryValue() != null) { - Debugging.Assert(() => f2.GetBinaryValue() != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => f2.GetBinaryValue() != null); } else { diff --git a/src/Lucene.Net.Tests/Index/TestStressNRT.cs b/src/Lucene.Net.Tests/Index/TestStressNRT.cs index ef4f7958f1..3154b56a2d 100644 --- a/src/Lucene.Net.Tests/Index/TestStressNRT.cs +++ b/src/Lucene.Net.Tests/Index/TestStressNRT.cs @@ -263,8 +263,8 @@ public override void Run() { // install the new reader if it's newest (and check the current version since another reader may have already been installed) //System.out.println(Thread.currentThread().getName() + ": newVersion=" + newReader.getVersion()); - Debugging.Assert(() => newReader.RefCount > 0); - Debugging.Assert(() => outerInstance.reader.RefCount > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => newReader.RefCount > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.reader.RefCount > 0); if (newReader.Version > outerInstance.reader.Version) { if (Verbose) diff --git a/src/Lucene.Net.Tests/Index/TestTermsEnum.cs b/src/Lucene.Net.Tests/Index/TestTermsEnum.cs index def59ed669..d8dada1fd5 100644 --- a/src/Lucene.Net.Tests/Index/TestTermsEnum.cs +++ b/src/Lucene.Net.Tests/Index/TestTermsEnum.cs @@ -829,7 +829,7 @@ private void TestRandomSeeks(IndexReader r, params string[] validTermStrings) } else { - Debugging.Assert(() => loc >= -validTerms.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => loc >= -validTerms.Length); Assert.AreEqual(TermsEnum.SeekStatus.NOT_FOUND, result); } } diff --git a/src/Lucene.Net.Tests/Search/Spans/MultiSpansWrapper.cs b/src/Lucene.Net.Tests/Search/Spans/MultiSpansWrapper.cs index e8f5f73a02..bd1216b985 100644 --- a/src/Lucene.Net.Tests/Search/Spans/MultiSpansWrapper.cs +++ b/src/Lucene.Net.Tests/Search/Spans/MultiSpansWrapper.cs @@ -110,7 +110,7 @@ public override bool SkipTo(int target) } int subIndex = ReaderUtil.SubIndex(target, leaves); - Debugging.Assert(() => subIndex >= leafOrd); + if (Debugging.AssertsEnabled) Debugging.Assert(() => subIndex >= leafOrd); if (subIndex != leafOrd) { AtomicReaderContext ctx = leaves[subIndex]; diff --git a/src/Lucene.Net.Tests/Search/TestBooleanScorer.cs b/src/Lucene.Net.Tests/Search/TestBooleanScorer.cs index d203efb694..b870992f16 100644 --- a/src/Lucene.Net.Tests/Search/TestBooleanScorer.cs +++ b/src/Lucene.Net.Tests/Search/TestBooleanScorer.cs @@ -119,7 +119,7 @@ private class BulkScorerAnonymousInnerClassHelper : BulkScorer public override bool Score(ICollector c, int maxDoc) { - Debugging.Assert(() => doc == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => doc == -1); doc = 3000; FakeScorer fs = new FakeScorer(); fs.doc = doc; diff --git a/src/Lucene.Net.Tests/Search/TestConstantScoreQuery.cs b/src/Lucene.Net.Tests/Search/TestConstantScoreQuery.cs index a2aa38e536..8a244a0c9b 100644 --- a/src/Lucene.Net.Tests/Search/TestConstantScoreQuery.cs +++ b/src/Lucene.Net.Tests/Search/TestConstantScoreQuery.cs @@ -234,7 +234,7 @@ public virtual void TestQueryWrapperFilter() Filter filter = new QueryWrapperFilter(AssertingQuery.Wrap(Random, new TermQuery(new Term("field", "a")))); IndexSearcher s = NewSearcher(r); - Debugging.Assert(() => s is AssertingIndexSearcher); + if (Debugging.AssertsEnabled) Debugging.Assert(() => s is AssertingIndexSearcher); // this used to fail s.Search(new ConstantScoreQuery(filter), new TotalHitCountCollector()); diff --git a/src/Lucene.Net.Tests/Search/TestFieldCache.cs b/src/Lucene.Net.Tests/Search/TestFieldCache.cs index 3358372d96..7295a56140 100644 --- a/src/Lucene.Net.Tests/Search/TestFieldCache.cs +++ b/src/Lucene.Net.Tests/Search/TestFieldCache.cs @@ -372,7 +372,7 @@ public virtual void Test() break; } long ord = termOrds.NextOrd(); - Debugging.Assert(() => ord != SortedSetDocValues.NO_MORE_ORDS); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ord != SortedSetDocValues.NO_MORE_ORDS); BytesRef scratch = new BytesRef(); termOrds.LookupOrd(ord, scratch); Assert.AreEqual(v, scratch); diff --git a/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs b/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs index 6157c9a804..de9b7be1d2 100644 --- a/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs +++ b/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs @@ -365,14 +365,14 @@ internal SlowMinShouldMatchScorer(BooleanWeight weight, AtomicReader reader, Ind this.sims = new SimScorer[(int)dv.ValueCount]; foreach (BooleanClause clause in bq.GetClauses()) { - Debugging.Assert(() => !clause.IsProhibited); - Debugging.Assert(() => !clause.IsRequired); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !clause.IsProhibited); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !clause.IsRequired); Term term = ((TermQuery)clause.Query).Term; long ord = dv.LookupTerm(term.Bytes); if (ord >= 0) { bool success = ords.Add(ord); - Debugging.Assert(() => success); // no dups + if (Debugging.AssertsEnabled) Debugging.Assert(() => success); // no dups TermContext context = TermContext.Build(reader.Context, term); SimWeight w = weight.Similarity.ComputeWeight(1f, searcher.CollectionStatistics("field"), searcher.TermStatistics(term, context)); var dummy = w.GetValueForNormalization(); // ignored @@ -384,7 +384,7 @@ internal SlowMinShouldMatchScorer(BooleanWeight weight, AtomicReader reader, Ind public override float GetScore() { - Debugging.Assert(() => score != 0, currentMatched.ToString); + if (Debugging.AssertsEnabled) Debugging.Assert(() => score != 0, currentMatched.ToString); return (float)score * ((BooleanWeight)m_weight).Coord(currentMatched, ((BooleanWeight)m_weight).MaxCoord); } @@ -394,7 +394,7 @@ public override float GetScore() public override int NextDoc() { - Debugging.Assert(() => currentDoc != NO_MORE_DOCS); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentDoc != NO_MORE_DOCS); for (currentDoc = currentDoc + 1; currentDoc < maxDoc; currentDoc++) { currentMatched = 0; diff --git a/src/Lucene.Net.Tests/Search/TestMultiThreadTermVectors.cs b/src/Lucene.Net.Tests/Search/TestMultiThreadTermVectors.cs index f6c0ac7522..d1e61d861a 100644 --- a/src/Lucene.Net.Tests/Search/TestMultiThreadTermVectors.cs +++ b/src/Lucene.Net.Tests/Search/TestMultiThreadTermVectors.cs @@ -223,7 +223,7 @@ private void VerifyVectors(Fields vectors, int num) foreach (string field in vectors) { Terms terms = vectors.GetTerms(field); - Debugging.Assert(() => terms != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => terms != null); VerifyVector(terms.GetIterator(null), num); } } diff --git a/src/Lucene.Net.Tests/Search/TestNumericRangeQuery32.cs b/src/Lucene.Net.Tests/Search/TestNumericRangeQuery32.cs index a7dc07eed7..050e28c6d2 100644 --- a/src/Lucene.Net.Tests/Search/TestNumericRangeQuery32.cs +++ b/src/Lucene.Net.Tests/Search/TestNumericRangeQuery32.cs @@ -474,13 +474,13 @@ public virtual void TestEmptyEnums() int count = 3000; int lower = (distance * 3 / 2) + startOffset, upper = lower + count * distance + (distance / 3); // test empty enum - Debugging.Assert(() => lower < upper); + if (Debugging.AssertsEnabled) Debugging.Assert(() => lower < upper); Assert.IsTrue(0 < CountTerms(NumericRangeQuery.NewInt32Range("field4", 4, lower, upper, true, true))); Assert.AreEqual(0, CountTerms(NumericRangeQuery.NewInt32Range("field4", 4, upper, lower, true, true))); // test empty enum outside of bounds lower = distance * noDocs + startOffset; upper = 2 * lower; - Debugging.Assert(() => lower < upper); + if (Debugging.AssertsEnabled) Debugging.Assert(() => lower < upper); Assert.AreEqual(0, CountTerms(NumericRangeQuery.NewInt32Range("field4", 4, lower, upper, true, true))); } diff --git a/src/Lucene.Net.Tests/Search/TestNumericRangeQuery64.cs b/src/Lucene.Net.Tests/Search/TestNumericRangeQuery64.cs index 4e0ad53037..4262d3cbec 100644 --- a/src/Lucene.Net.Tests/Search/TestNumericRangeQuery64.cs +++ b/src/Lucene.Net.Tests/Search/TestNumericRangeQuery64.cs @@ -501,13 +501,13 @@ public virtual void TestEmptyEnums() int count = 3000; long lower = (distance * 3 / 2) + startOffset, upper = lower + count * distance + (distance / 3); // test empty enum - Debugging.Assert(() => lower < upper); + if (Debugging.AssertsEnabled) Debugging.Assert(() => lower < upper); Assert.IsTrue(0 < CountTerms(NumericRangeQuery.NewInt64Range("field4", 4, lower, upper, true, true))); Assert.AreEqual(0, CountTerms(NumericRangeQuery.NewInt64Range("field4", 4, upper, lower, true, true))); // test empty enum outside of bounds lower = distance * noDocs + startOffset; upper = 2L * lower; - Debugging.Assert(() => lower < upper); + if (Debugging.AssertsEnabled) Debugging.Assert(() => lower < upper); Assert.AreEqual(0, CountTerms(NumericRangeQuery.NewInt64Range("field4", 4, lower, upper, true, true))); } diff --git a/src/Lucene.Net.Tests/Search/TestTimeLimitingCollector.cs b/src/Lucene.Net.Tests/Search/TestTimeLimitingCollector.cs index 6ebd6212d9..8118ca6410 100644 --- a/src/Lucene.Net.Tests/Search/TestTimeLimitingCollector.cs +++ b/src/Lucene.Net.Tests/Search/TestTimeLimitingCollector.cs @@ -418,7 +418,7 @@ public virtual void Collect(int doc) //#endif } - Debugging.Assert(() => docId >= 0, () => " base=" + docBase + " doc=" + doc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docId >= 0, () => " base=" + docBase + " doc=" + doc); bits.Set(docId); lastDocCollected = docId; } diff --git a/src/Lucene.Net.Tests/Util/Automaton/TestUTF32ToUTF8.cs b/src/Lucene.Net.Tests/Util/Automaton/TestUTF32ToUTF8.cs index b18aaa8e9f..5addb895ec 100644 --- a/src/Lucene.Net.Tests/Util/Automaton/TestUTF32ToUTF8.cs +++ b/src/Lucene.Net.Tests/Util/Automaton/TestUTF32ToUTF8.cs @@ -76,7 +76,7 @@ private void TestOne(Random r, ByteRunAutomaton a, int startCode, int endCode, i nonSurrogateCount = endCode - startCode + 1 - (UnicodeUtil.UNI_SUR_LOW_END - UnicodeUtil.UNI_SUR_HIGH_START + 1); } - Debugging.Assert(() => nonSurrogateCount > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => nonSurrogateCount > 0); for (int iter = 0; iter < iters; iter++) { @@ -95,8 +95,8 @@ private void TestOne(Random r, ByteRunAutomaton a, int startCode, int endCode, i } } - Debugging.Assert(() => code >= startCode && code <= endCode, () => "code=" + code + " start=" + startCode + " end=" + endCode); - Debugging.Assert(() => !IsSurrogate(code)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => code >= startCode && code <= endCode, () => "code=" + code + " start=" + startCode + " end=" + endCode); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !IsSurrogate(code)); Assert.IsTrue(Matches(a, code), "DFA for range " + startCode + "-" + endCode + " failed to match code=" + code); } diff --git a/src/Lucene.Net.Tests/Util/Fst/TestFSTs.cs b/src/Lucene.Net.Tests/Util/Fst/TestFSTs.cs index df6777489e..b262eda0a0 100644 --- a/src/Lucene.Net.Tests/Util/Fst/TestFSTs.cs +++ b/src/Lucene.Net.Tests/Util/Fst/TestFSTs.cs @@ -571,7 +571,7 @@ public virtual void Run(int limit, bool verify, bool verifyByOutput) long tMid = Environment.TickCount; Console.WriteLine(((tMid - tStart) / 1000.0) + " sec to add all terms"); - Debugging.Assert(() => builder.TermCount == ord); + if (Debugging.AssertsEnabled) Debugging.Assert(() => builder.TermCount == ord); FST fst = builder.Finish(); long tEnd = Environment.TickCount; Console.WriteLine(((tEnd - tMid) / 1000.0) + " sec to finish/pack"); diff --git a/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoDocIdSet.cs b/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoDocIdSet.cs index e7cb03c15a..007eee8ed0 100644 --- a/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoDocIdSet.cs +++ b/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoDocIdSet.cs @@ -52,7 +52,7 @@ public override int NextDoc() { doc = NO_MORE_DOCS; } - Debugging.Assert(() => doc < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => doc < numBits); return doc; } diff --git a/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoSequence.cs b/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoSequence.cs index 35b2633a1e..9111a35aeb 100644 --- a/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoSequence.cs +++ b/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoSequence.cs @@ -91,7 +91,7 @@ private static void TstDecodeAllAdvanceToExpected(long[] values, EliasFanoDecode private static void TstDecodeAdvanceToMultiples(long[] values, EliasFanoDecoder efd, long m) { // test advancing to multiples of m - Debugging.Assert(() => m > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m > 0); long previousValue = -1L; long index = 0; long mm = m; @@ -120,7 +120,7 @@ private static void TstDecodeAdvanceToMultiples(long[] values, EliasFanoDecoder private static void TstDecodeBackToMultiples(long[] values, EliasFanoDecoder efd, long m) { // test backing to multiples of m - Debugging.Assert(() => m > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m > 0); efd.ToAfterSequence(); int index = values.Length - 1; if (index < 0) @@ -135,7 +135,7 @@ private static void TstDecodeBackToMultiples(long[] values, EliasFanoDecoder efd while (index >= 0) { expValue = values[index]; - Debugging.Assert(() => mm < previousValue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => mm < previousValue); if (expValue <= mm) { long backValue_ = efd.BackToValue(mm); diff --git a/src/Lucene.Net.Tests/Util/Test2BPagedBytes.cs b/src/Lucene.Net.Tests/Util/Test2BPagedBytes.cs index 63a75b9948..8fad1f744e 100644 --- a/src/Lucene.Net.Tests/Util/Test2BPagedBytes.cs +++ b/src/Lucene.Net.Tests/Util/Test2BPagedBytes.cs @@ -57,7 +57,7 @@ public virtual void Test() r2.NextBytes(bytes); dataOutput.WriteBytes(bytes, bytes.Length); long fp = dataOutput.GetFilePointer(); - Debugging.Assert(() => fp == lastFP + numBytes); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fp == lastFP + numBytes); lastFP = fp; netBytes += numBytes; } diff --git a/src/Lucene.Net.Tests/Util/TestWAH8DocIdSet.cs b/src/Lucene.Net.Tests/Util/TestWAH8DocIdSet.cs index f31d3cee08..3c272c7f71 100644 --- a/src/Lucene.Net.Tests/Util/TestWAH8DocIdSet.cs +++ b/src/Lucene.Net.Tests/Util/TestWAH8DocIdSet.cs @@ -94,7 +94,7 @@ public virtual void TestUnion() /// Create a random set which has of its bits set. protected static OpenBitSet RandomOpenSet(int numBits, int numBitsSet) { - Debugging.Assert(() => numBitsSet <= numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numBitsSet <= numBits); OpenBitSet set = new OpenBitSet(numBits); Random random = Random; if (numBitsSet == numBits) diff --git a/src/Lucene.Net/Analysis/NumericTokenStream.cs b/src/Lucene.Net/Analysis/NumericTokenStream.cs index fbff885d4b..ace2805b0d 100644 --- a/src/Lucene.Net/Analysis/NumericTokenStream.cs +++ b/src/Lucene.Net/Analysis/NumericTokenStream.cs @@ -178,7 +178,7 @@ public NumericTermAttribute() public void FillBytesRef() { - Debugging.Assert(() => ValueSize == 64 || ValueSize == 32); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ValueSize == 64 || ValueSize == 32); if (ValueSize == 64) { NumericUtils.Int64ToPrefixCoded(_value, Shift, _bytes); diff --git a/src/Lucene.Net/Analysis/TokenStreamToAutomaton.cs b/src/Lucene.Net/Analysis/TokenStreamToAutomaton.cs index 93d15f24c5..c70f8fee78 100644 --- a/src/Lucene.Net/Analysis/TokenStreamToAutomaton.cs +++ b/src/Lucene.Net/Analysis/TokenStreamToAutomaton.cs @@ -155,7 +155,7 @@ public virtual Automaton ToAutomaton(TokenStream @in) { posInc = 1; } - Debugging.Assert(() => pos > -1 || posInc > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pos > -1 || posInc > 0); if (posInc > 0) { @@ -163,7 +163,7 @@ public virtual Automaton ToAutomaton(TokenStream @in) pos += posInc; posData = positions.Get(pos); - Debugging.Assert(() => posData.leaving == null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => posData.leaving == null); if (posData.arriving == null) { diff --git a/src/Lucene.Net/Analysis/Tokenizer.cs b/src/Lucene.Net/Analysis/Tokenizer.cs index 4edf011b29..36acdea6a8 100644 --- a/src/Lucene.Net/Analysis/Tokenizer.cs +++ b/src/Lucene.Net/Analysis/Tokenizer.cs @@ -115,7 +115,7 @@ public void SetReader(TextReader input) throw new InvalidOperationException("TokenStream contract violation: Close() call missing"); } this.inputPending = input; - Debugging.Assert(SetReaderTestPoint); + if (Debugging.AssertsEnabled) Debugging.Assert(SetReaderTestPoint); } public override void Reset() diff --git a/src/Lucene.Net/Codecs/BlockTermState.cs b/src/Lucene.Net/Codecs/BlockTermState.cs index 28161158e8..13a8bd7a21 100644 --- a/src/Lucene.Net/Codecs/BlockTermState.cs +++ b/src/Lucene.Net/Codecs/BlockTermState.cs @@ -57,7 +57,7 @@ protected internal BlockTermState() public override void CopyFrom(TermState other) { - Debugging.Assert(() => other is BlockTermState, () => "can not copy from " + other.GetType().Name); + if (Debugging.AssertsEnabled) Debugging.Assert(() => other is BlockTermState, () => "can not copy from " + other.GetType().Name); BlockTermState other2 = (BlockTermState)other; base.CopyFrom(other); DocFreq = other2.DocFreq; diff --git a/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs b/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs index 30064e44f5..a3f1591ba5 100644 --- a/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs +++ b/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs @@ -167,13 +167,13 @@ public BlockTreeTermsReader(Directory dir, FieldInfos fieldInfos, SegmentInfo in { int field = @in.ReadVInt32(); long numTerms = @in.ReadVInt64(); - Debugging.Assert(() => numTerms >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numTerms >= 0); int numBytes = @in.ReadVInt32(); BytesRef rootCode = new BytesRef(new byte[numBytes]); @in.ReadBytes(rootCode.Bytes, 0, numBytes); rootCode.Length = numBytes; FieldInfo fieldInfo = fieldInfos.FieldInfo(field); - Debugging.Assert(() => fieldInfo != null, () => "field=" + field); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldInfo != null, () => "field=" + field); long sumTotalTermFreq = fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY ? -1 : @in.ReadVInt64(); long sumDocFreq = @in.ReadVInt64(); int docCount = @in.ReadVInt32(); @@ -291,7 +291,7 @@ public override IEnumerator GetEnumerator() public override Terms GetTerms(string field) { - Debugging.Assert(() => field != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => field != null); FieldReader ret; fields.TryGetValue(field, out ret); return ret; @@ -478,7 +478,7 @@ internal virtual void EndBlock(FieldReader.SegmentTermsEnum.Frame frame) } endBlockCount++; long otherBytes = frame.fpEnd - frame.fp - frame.suffixesReader.Length - frame.statsReader.Length; - Debugging.Assert(() => otherBytes > 0, () => "otherBytes=" + otherBytes + " frame.fp=" + frame.fp + " frame.fpEnd=" + frame.fpEnd); + if (Debugging.AssertsEnabled) Debugging.Assert(() => otherBytes > 0, () => "otherBytes=" + otherBytes + " frame.fp=" + frame.fp + " frame.fpEnd=" + frame.fpEnd); TotalBlockOtherBytes += otherBytes; } @@ -489,9 +489,12 @@ internal virtual void Term(BytesRef term) internal virtual void Finish() { - Debugging.Assert(() => startBlockCount == endBlockCount, () => "startBlockCount=" + startBlockCount + " endBlockCount=" + endBlockCount); - Debugging.Assert(() => TotalBlockCount == FloorSubBlockCount + NonFloorBlockCount, () => "floorSubBlockCount=" + FloorSubBlockCount + " nonFloorBlockCount=" + NonFloorBlockCount + " totalBlockCount=" + TotalBlockCount); - Debugging.Assert(() => TotalBlockCount == MixedBlockCount + TermsOnlyBlockCount + SubBlocksOnlyBlockCount, () => "totalBlockCount=" + TotalBlockCount + " mixedBlockCount=" + MixedBlockCount + " subBlocksOnlyBlockCount=" + SubBlocksOnlyBlockCount + " termsOnlyBlockCount=" + TermsOnlyBlockCount); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => startBlockCount == endBlockCount, () => "startBlockCount=" + startBlockCount + " endBlockCount=" + endBlockCount); + Debugging.Assert(() => TotalBlockCount == FloorSubBlockCount + NonFloorBlockCount, () => "floorSubBlockCount=" + FloorSubBlockCount + " nonFloorBlockCount=" + NonFloorBlockCount + " totalBlockCount=" + TotalBlockCount); + Debugging.Assert(() => TotalBlockCount == MixedBlockCount + TermsOnlyBlockCount + SubBlocksOnlyBlockCount, () => "totalBlockCount=" + TotalBlockCount + " mixedBlockCount=" + MixedBlockCount + " subBlocksOnlyBlockCount=" + SubBlocksOnlyBlockCount + " termsOnlyBlockCount=" + TermsOnlyBlockCount); + } } public override string ToString() @@ -529,7 +532,7 @@ public override string ToString() @out.AppendLine(" " + prefix.ToString().PadLeft(2, ' ') + ": " + blockCount); } } - Debugging.Assert(() => TotalBlockCount == total); + if (Debugging.AssertsEnabled) Debugging.Assert(() => TotalBlockCount == total); } return @out.ToString(); } @@ -560,7 +563,7 @@ public sealed class FieldReader : Terms internal FieldReader(BlockTreeTermsReader outerInstance, FieldInfo fieldInfo, long numTerms, BytesRef rootCode, long sumTotalTermFreq, long sumDocFreq, int docCount, long indexStartFP, int longsSize, IndexInput indexIn) { this.outerInstance = outerInstance; - Debugging.Assert(() => numTerms > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numTerms > 0); this.fieldInfo = fieldInfo; //DEBUG = BlockTreeTermsReader.DEBUG && fieldInfo.name.Equals("id", StringComparison.Ordinal); this.numTerms = numTerms; @@ -760,7 +763,7 @@ public Frame(BlockTreeTermsReader.FieldReader.IntersectEnum outerInstance, int o internal void LoadNextFloorBlock() { - Debugging.Assert(() => numFollowFloorBlocks > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numFollowFloorBlocks > 0); //if (DEBUG) System.out.println(" loadNextFoorBlock trans=" + transitions[transitionIndex]); do @@ -845,7 +848,7 @@ internal void Load(BytesRef frameIndexData) outerInstance.@in.Seek(fp); int code_ = outerInstance.@in.ReadVInt32(); entCount = (int)((uint)code_ >> 1); - Debugging.Assert(() => entCount > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => entCount > 0); isLastInFloor = (code_ & 1) != 0; // term suffixes: @@ -906,7 +909,7 @@ public bool Next() public bool NextLeaf() { //if (DEBUG) System.out.println(" frame.next ord=" + ord + " nextEnt=" + nextEnt + " entCount=" + entCount); - Debugging.Assert(() => nextEnt != -1 && nextEnt < entCount, () => "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); + if (Debugging.AssertsEnabled) Debugging.Assert(() => nextEnt != -1 && nextEnt < entCount, () => "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); nextEnt++; suffix = suffixesReader.ReadVInt32(); startBytePos = suffixesReader.Position; @@ -917,7 +920,7 @@ public bool NextLeaf() public bool NextNonLeaf() { //if (DEBUG) System.out.println(" frame.next ord=" + ord + " nextEnt=" + nextEnt + " entCount=" + entCount); - Debugging.Assert(() => nextEnt != -1 && nextEnt < entCount, () => "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); + if (Debugging.AssertsEnabled) Debugging.Assert(() => nextEnt != -1 && nextEnt < entCount, () => "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); nextEnt++; int code = suffixesReader.ReadVInt32(); suffix = (int)((uint)code >> 1); @@ -944,7 +947,7 @@ public void DecodeMetaData() // lazily catch up on metadata decode: int limit = TermBlockOrd; bool absolute = metaDataUpto == 0; - Debugging.Assert(() => limit > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => limit > 0); // TODO: better API would be "jump straight to term=N"??? while (metaDataUpto < limit) @@ -1021,7 +1024,7 @@ public IntersectEnum(BlockTreeTermsReader.FieldReader outerInstance, CompiledAut FST.Arc arc = outerInstance.index.GetFirstArc(arcs[0]); // Empty string prefix must have an output in the index! - Debugging.Assert(() => arc.IsFinal); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.IsFinal); // Special pushFrame since it's the first one: Frame f = stack[0]; @@ -1033,7 +1036,7 @@ public IntersectEnum(BlockTreeTermsReader.FieldReader outerInstance, CompiledAut f.Load(outerInstance.rootCode); // for assert: - Debugging.Assert(() => SetSavedStartTerm(startTerm)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => SetSavedStartTerm(startTerm)); currentFrame = f; if (startTerm != null) @@ -1067,7 +1070,7 @@ private Frame GetFrame(int ord) } stack = next; } - Debugging.Assert(() => stack[ord].ord == ord); + if (Debugging.AssertsEnabled) Debugging.Assert(() => stack[ord].ord == ord); return stack[ord]; } @@ -1101,7 +1104,7 @@ private Frame PushFrame(int state) // possible: FST.Arc arc = currentFrame.arc; int idx = currentFrame.prefix; - Debugging.Assert(() => currentFrame.suffix > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentFrame.suffix > 0); BytesRef output = currentFrame.outputPrefix; while (idx < f.prefix) { @@ -1110,14 +1113,14 @@ private Frame PushFrame(int state) // case by using current arc as starting point, // passed to findTargetArc arc = outerInstance.index.FindTargetArc(target, arc, GetArc(1 + idx), fstReader); - Debugging.Assert(() => arc != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc != null); output = outerInstance.outerInstance.fstOutputs.Add(output, arc.Output); idx++; } f.arc = arc; f.outputPrefix = output; - Debugging.Assert(() => arc.IsFinal); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.IsFinal); f.Load(outerInstance.outerInstance.fstOutputs.Add(output, arc.NextFinalOutput)); return f; } @@ -1168,7 +1171,7 @@ private int GetState() for (int idx = 0; idx < currentFrame.suffix; idx++) { state = runAutomaton.Step(state, currentFrame.suffixBytes[currentFrame.startBytePos + idx] & 0xff); - Debugging.Assert(() => state != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != -1); } return state; } @@ -1180,13 +1183,13 @@ private int GetState() private void SeekToStartTerm(BytesRef target) { //if (DEBUG) System.out.println("seek to startTerm=" + target.utf8ToString()); - Debugging.Assert(() => currentFrame.ord == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentFrame.ord == 0); if (term.Length < target.Length) { term.Bytes = ArrayUtil.Grow(term.Bytes, target.Length); } FST.Arc arc = arcs[0]; - Debugging.Assert(() => arc == currentFrame.arc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc == currentFrame.arc); for (int idx = 0; idx <= target.Length; idx++) { @@ -1264,7 +1267,7 @@ private void SeekToStartTerm(BytesRef target) } } - Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false); } public override BytesRef Next() @@ -1294,7 +1297,7 @@ public override BytesRef Next() } long lastFP = currentFrame.fpOrig; currentFrame = stack[currentFrame.ord - 1]; - Debugging.Assert(() => currentFrame.lastSubFP == lastFP); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentFrame.lastSubFP == lastFP); //if (DEBUG) System.out.println("\n frame ord=" + currentFrame.ord + " prefix=" + brToString(new BytesRef(term.bytes, term.offset, currentFrame.prefix)) + " state=" + currentFrame.state + " lastInFloor?=" + currentFrame.isLastInFloor + " fp=" + currentFrame.fp + " trans=" + (currentFrame.transitions.length == 0 ? "n/a" : currentFrame.transitions[currentFrame.transitionIndex]) + " outputPrefix=" + currentFrame.outputPrefix); } } @@ -1348,7 +1351,7 @@ public override BytesRef Next() byte[] commonSuffixBytes = compiledAutomaton.CommonSuffixRef.Bytes; int lenInPrefix = compiledAutomaton.CommonSuffixRef.Length - currentFrame.suffix; - Debugging.Assert(() => compiledAutomaton.CommonSuffixRef.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => compiledAutomaton.CommonSuffixRef.Offset == 0); int suffixBytesPos; int commonSuffixBytesPos = 0; @@ -1359,7 +1362,7 @@ public override BytesRef Next() // test whether the prefix part matches: byte[] termBytes = term.Bytes; int termBytesPos = currentFrame.prefix - lenInPrefix; - Debugging.Assert(() => termBytesPos >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termBytesPos >= 0); int termBytesPosEnd = currentFrame.prefix; while (termBytesPos < termBytesPosEnd) { @@ -1427,7 +1430,7 @@ public override BytesRef Next() { CopyTerm(); //if (DEBUG) System.out.println(" term match to state=" + state + "; return term=" + brToString(term)); - Debugging.Assert(() => savedStartTerm == null || term.CompareTo(savedStartTerm) > 0, () => "saveStartTerm=" + savedStartTerm.Utf8ToString() + " term=" + term.Utf8ToString()); + if (Debugging.AssertsEnabled) Debugging.Assert(() => savedStartTerm == null || term.CompareTo(savedStartTerm) > 0, () => "saveStartTerm=" + savedStartTerm.Utf8ToString() + " term=" + term.Utf8ToString()); return term; } else @@ -1531,7 +1534,7 @@ public SegmentTermsEnum(BlockTreeTermsReader.FieldReader outerInstance) { arc = outerInstance.index.GetFirstArc(arcs[0]); // Empty string prefix must have an output in the index! - Debugging.Assert(() => arc.IsFinal); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.IsFinal); } else { @@ -1579,7 +1582,7 @@ public Stats ComputeBlockStats() { arc = outerInstance.index.GetFirstArc(arcs[0]); // Empty string prefix must have an output in the index! - Debugging.Assert(() => arc.IsFinal); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.IsFinal); } else { @@ -1614,7 +1617,7 @@ public Stats ComputeBlockStats() } long lastFP = currentFrame.fpOrig; currentFrame = stack[currentFrame.ord - 1]; - Debugging.Assert(() => lastFP == currentFrame.lastSubFP); + if (Debugging.AssertsEnabled) Debugging.Assert(() => lastFP == currentFrame.lastSubFP); // if (DEBUG) { // System.out.println(" reset validIndexPrefix=" + validIndexPrefix); // } @@ -1654,7 +1657,7 @@ public Stats ComputeBlockStats() { arc = outerInstance.index.GetFirstArc(arcs[0]); // Empty string prefix must have an output in the index! - Debugging.Assert(() => arc.IsFinal); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.IsFinal); } else { @@ -1681,7 +1684,7 @@ private Frame GetFrame(int ord) } stack = next; } - Debugging.Assert(() => stack[ord].ord == ord); + if (Debugging.AssertsEnabled) Debugging.Assert(() => stack[ord].ord == ord); return stack[ord]; } @@ -1740,7 +1743,7 @@ internal Frame PushFrame(FST.Arc arc, long fp, int length) // System.out.println(" skip rewind!"); // } } - Debugging.Assert(() => length == f.prefix); + if (Debugging.AssertsEnabled) Debugging.Assert(() => length == f.prefix); } else { @@ -1786,7 +1789,7 @@ public override bool SeekExact(BytesRef target) term.Bytes = ArrayUtil.Grow(term.Bytes, 1 + target.Length); } - Debugging.Assert(() => ClearEOF()); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ClearEOF()); FST.Arc arc; int targetUpto; @@ -1808,12 +1811,12 @@ public override bool SeekExact(BytesRef target) // } arc = arcs[0]; - Debugging.Assert(() => arc.IsFinal); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.IsFinal); output = arc.Output; targetUpto = 0; Frame lastFrame = stack[0]; - Debugging.Assert(() => validIndexPrefix <= term.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => validIndexPrefix <= term.Length); int targetLimit = Math.Min(target.Length, validIndexPrefix); @@ -1837,7 +1840,7 @@ public override bool SeekExact(BytesRef target) //if (arc.label != (target.bytes[target.offset + targetUpto] & 0xFF)) { //System.out.println("FAIL: arc.label=" + (char) arc.label + " targetLabel=" + (char) (target.bytes[target.offset + targetUpto] & 0xFF)); //} - Debugging.Assert(() => arc.Label == (target.Bytes[target.Offset + targetUpto] & 0xFF), () => "arc.label=" + (char)arc.Label + " targetLabel=" + (char)(target.Bytes[target.Offset + targetUpto] & 0xFF)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.Label == (target.Bytes[target.Offset + targetUpto] & 0xFF), () => "arc.label=" + (char)arc.Label + " targetLabel=" + (char)(target.Bytes[target.Offset + targetUpto] & 0xFF)); if (arc.Output != outerInstance.outerInstance.NO_OUTPUT) { output = outerInstance.outerInstance.fstOutputs.Add(output, arc.Output); @@ -1904,7 +1907,7 @@ public override bool SeekExact(BytesRef target) else { // Target is exactly the same as current term - Debugging.Assert(() => term.Length == target.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => term.Length == target.Length); if (termExists) { // if (DEBUG) { @@ -1929,8 +1932,11 @@ public override bool SeekExact(BytesRef target) arc = outerInstance.index.GetFirstArc(arcs[0]); // Empty string prefix must have an output (block) in the index! - Debugging.Assert(() => arc.IsFinal); - Debugging.Assert(() => arc.Output != null); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => arc.IsFinal); + Debugging.Assert(() => arc.Output != null); + } // if (DEBUG) { // System.out.println(" no seek state; push root frame"); @@ -2002,7 +2008,7 @@ public override bool SeekExact(BytesRef target) arc = nextArc; term.Bytes[targetUpto] = (byte)targetLabel; // Aggregate output as we go: - Debugging.Assert(() => arc.Output != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.Output != null); if (arc.Output != outerInstance.outerInstance.NO_OUTPUT) { output = outerInstance.outerInstance.fstOutputs.Add(output, arc.Output); @@ -2070,7 +2076,7 @@ public override SeekStatus SeekCeil(BytesRef target) term.Bytes = ArrayUtil.Grow(term.Bytes, 1 + target.Length); } - Debugging.Assert(ClearEOF); + if (Debugging.AssertsEnabled) Debugging.Assert(ClearEOF); //if (DEBUG) { //System.out.println("\nBTTR.seekCeil seg=" + segment + " target=" + fieldInfo.name + ":" + target.utf8ToString() + " " + target + " current=" + brToString(term) + " (exists?=" + termExists + ") validIndexPrefix= " + validIndexPrefix); @@ -2097,12 +2103,12 @@ public override SeekStatus SeekCeil(BytesRef target) //} arc = arcs[0]; - Debugging.Assert(() => arc.IsFinal); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.IsFinal); output = arc.Output; targetUpto = 0; Frame lastFrame = stack[0]; - Debugging.Assert(() => validIndexPrefix <= term.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => validIndexPrefix <= term.Length); int targetLimit = Math.Min(target.Length, validIndexPrefix); @@ -2123,7 +2129,7 @@ public override SeekStatus SeekCeil(BytesRef target) break; } arc = arcs[1 + targetUpto]; - Debugging.Assert(() => arc.Label == (target.Bytes[target.Offset + targetUpto] & 0xFF), () => "arc.label=" + (char)arc.Label + " targetLabel=" + (char)(target.Bytes[target.Offset + targetUpto] & 0xFF)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.Label == (target.Bytes[target.Offset + targetUpto] & 0xFF), () => "arc.label=" + (char)arc.Label + " targetLabel=" + (char)(target.Bytes[target.Offset + targetUpto] & 0xFF)); // TOOD: we could save the outputs in local // byte[][] instead of making new objs ever // seek; but, often the FST doesn't have any @@ -2192,7 +2198,7 @@ public override SeekStatus SeekCeil(BytesRef target) else { // Target is exactly the same as current term - Debugging.Assert(() => term.Length == target.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => term.Length == target.Length); if (termExists) { //if (DEBUG) { @@ -2214,8 +2220,11 @@ public override SeekStatus SeekCeil(BytesRef target) arc = outerInstance.index.GetFirstArc(arcs[0]); // Empty string prefix must have an output (block) in the index! - Debugging.Assert(() => arc.IsFinal); - Debugging.Assert(() => arc.Output != null); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => arc.IsFinal); + Debugging.Assert(() => arc.Output != null); + } //if (DEBUG) { //System.out.println(" no seek state; push root frame"); @@ -2289,7 +2298,7 @@ public override SeekStatus SeekCeil(BytesRef target) term.Bytes[targetUpto] = (byte)targetLabel; arc = nextArc; // Aggregate output as we go: - Debugging.Assert(() => arc.Output != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.Output != null); if (arc.Output != outerInstance.outerInstance.NO_OUTPUT) { output = outerInstance.outerInstance.fstOutputs.Add(output, arc.Output); @@ -2359,7 +2368,7 @@ public override SeekStatus SeekCeil(BytesRef target) // while (true) // { // Frame f = GetFrame(ord); - // Debugging.Assert(f != null); + // if (Debugging.AssertsEnabled) Debugging.Assert(f != null); // BytesRef prefix = new BytesRef(term.Bytes, 0, f.Prefix); // if (f.NextEnt == -1) // { @@ -2371,7 +2380,7 @@ public override SeekStatus SeekCeil(BytesRef target) // } // if (OuterInstance.Index != null) // { - // Debugging.Assert(!isSeekFrame || f.Arc != null, "isSeekFrame=" + isSeekFrame + " f.arc=" + f.Arc); + // if (Debugging.AssertsEnabled) Debugging.Assert(!isSeekFrame || f.Arc != null, "isSeekFrame=" + isSeekFrame + " f.arc=" + f.Arc); // if (f.Prefix > 0 && isSeekFrame && f.Arc.Label != (term.Bytes[f.Prefix - 1] & 0xFF)) // { // @out.println(" broken seek state: arc.label=" + (char)f.Arc.Label + " vs term byte=" + (char)(term.Bytes[f.Prefix - 1] & 0xFF)); @@ -2422,7 +2431,7 @@ public override BytesRef Next() { arc = outerInstance.index.GetFirstArc(arcs[0]); // Empty string prefix must have an output in the index! - Debugging.Assert(() => arc.IsFinal); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.IsFinal); } else { @@ -2434,7 +2443,7 @@ public override BytesRef Next() targetBeforeCurrentLength = currentFrame.ord; - Debugging.Assert(() => !eof); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !eof); //if (DEBUG) { //System.out.println("\nBTTR.next seg=" + segment + " term=" + brToString(term) + " termExists?=" + termExists + " field=" + fieldInfo.name + " termBlockOrd=" + currentFrame.state.termBlockOrd + " validIndexPrefix=" + validIndexPrefix); //printSeekState(); @@ -2450,7 +2459,7 @@ public override BytesRef Next() // works properly: //if (DEBUG) System.out.println(" re-seek to pending term=" + term.utf8ToString() + " " + term); bool result = SeekExact(term); - Debugging.Assert(() => result); + if (Debugging.AssertsEnabled) Debugging.Assert(() => result); } // Pop finished blocks @@ -2466,7 +2475,7 @@ public override BytesRef Next() if (currentFrame.ord == 0) { //if (DEBUG) System.out.println(" return null"); - Debugging.Assert(SetEOF); + if (Debugging.AssertsEnabled) Debugging.Assert(SetEOF); term.Length = 0; validIndexPrefix = 0; currentFrame.Rewind(); @@ -2520,7 +2529,7 @@ public override BytesRef Term { get { - Debugging.Assert(() => !eof); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !eof); return term; } } @@ -2529,7 +2538,7 @@ public override int DocFreq { get { - Debugging.Assert(() => !eof); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !eof); //if (DEBUG) System.out.println("BTR.docFreq"); currentFrame.DecodeMetaData(); //if (DEBUG) System.out.println(" return " + currentFrame.state.docFreq); @@ -2541,7 +2550,7 @@ public override long TotalTermFreq { get { - Debugging.Assert(() => !eof); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !eof); currentFrame.DecodeMetaData(); return currentFrame.state.TotalTermFreq; } @@ -2549,7 +2558,7 @@ public override long TotalTermFreq public override DocsEnum Docs(IBits skipDocs, DocsEnum reuse, DocsFlags flags) { - Debugging.Assert(() => !eof); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !eof); //if (DEBUG) { //System.out.println("BTTR.docs seg=" + segment); //} @@ -2568,7 +2577,7 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits skipDocs, DocsAndPos return null; } - Debugging.Assert(() => !eof); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !eof); currentFrame.DecodeMetaData(); return outerInstance.outerInstance.postingsReader.DocsAndPositions(outerInstance.fieldInfo, currentFrame.state, skipDocs, reuse, flags); } @@ -2578,15 +2587,15 @@ public override void SeekExact(BytesRef target, TermState otherState) // if (DEBUG) { // System.out.println("BTTR.seekExact termState seg=" + segment + " target=" + target.utf8ToString() + " " + target + " state=" + otherState); // } - Debugging.Assert(ClearEOF); + if (Debugging.AssertsEnabled) Debugging.Assert(ClearEOF); if (target.CompareTo(term) != 0 || !termExists) { - Debugging.Assert(() => otherState != null && otherState is BlockTermState); + if (Debugging.AssertsEnabled) Debugging.Assert(() => otherState != null && otherState is BlockTermState); currentFrame = staticFrame; currentFrame.state.CopyFrom(otherState); term.CopyBytes(target); currentFrame.metaDataUpto = currentFrame.TermBlockOrd; - Debugging.Assert(() => currentFrame.metaDataUpto > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentFrame.metaDataUpto > 0); validIndexPrefix = 0; } else @@ -2599,7 +2608,7 @@ public override void SeekExact(BytesRef target, TermState otherState) public override TermState GetTermState() { - Debugging.Assert(() => !eof); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !eof); currentFrame.DecodeMetaData(); TermState ts = (TermState)currentFrame.state.Clone(); //if (DEBUG) System.out.println("BTTR.termState seg=" + segment + " state=" + ts); @@ -2731,7 +2740,7 @@ internal void LoadNextFloorBlock() //if (DEBUG) { //System.out.println(" loadNextFloorBlock fp=" + fp + " fpEnd=" + fpEnd); //} - Debugging.Assert(() => arc == null || isFloor, () => "arc=" + arc + " isFloor=" + isFloor); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc == null || isFloor, () => "arc=" + arc + " isFloor=" + isFloor); fp = fpEnd; nextEnt = -1; LoadBlock(); @@ -2765,9 +2774,9 @@ internal void LoadBlock() outerInstance.@in.Seek(fp); int code = outerInstance.@in.ReadVInt32(); entCount = (int)((uint)code >> 1); - Debugging.Assert(() => entCount > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => entCount > 0); isLastInFloor = (code & 1) != 0; - Debugging.Assert(() => arc == null || (isLastInFloor || isFloor)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc == null || (isLastInFloor || isFloor)); // TODO: if suffixes were stored in random-access // array structure, then we could do binary search @@ -2888,7 +2897,7 @@ public bool Next() public bool NextLeaf() { //if (DEBUG) System.out.println(" frame.next ord=" + ord + " nextEnt=" + nextEnt + " entCount=" + entCount); - Debugging.Assert(() => nextEnt != -1 && nextEnt < entCount, () => "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); + if (Debugging.AssertsEnabled) Debugging.Assert(() => nextEnt != -1 && nextEnt < entCount, () => "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); nextEnt++; suffix = suffixesReader.ReadVInt32(); startBytePos = suffixesReader.Position; @@ -2906,7 +2915,7 @@ public bool NextLeaf() public bool NextNonLeaf() { //if (DEBUG) System.out.println(" frame.next ord=" + ord + " nextEnt=" + nextEnt + " entCount=" + entCount); - Debugging.Assert(() => nextEnt != -1 && nextEnt < entCount, () => "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); + if (Debugging.AssertsEnabled) Debugging.Assert(() => nextEnt != -1 && nextEnt < entCount, () => "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); nextEnt++; int code = suffixesReader.ReadVInt32(); suffix = (int)((uint)code >> 1); @@ -2965,7 +2974,7 @@ public void ScanToFloorFrame(BytesRef target) return; } - Debugging.Assert(() => numFollowFloorBlocks != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numFollowFloorBlocks != 0); long newFP = fpOrig; while (true) @@ -3025,7 +3034,7 @@ public void DecodeMetaData() // lazily catch up on metadata decode: int limit = TermBlockOrd; bool absolute = metaDataUpto == 0; - Debugging.Assert(() => limit > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => limit > 0); // TODO: better API would be "jump straight to term=N"??? while (metaDataUpto < limit) @@ -3081,7 +3090,7 @@ private bool PrefixMatches(BytesRef target) /// public void ScanToSubBlock(long subFP) { - Debugging.Assert(() => !isLeafBlock); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !isLeafBlock); //if (DEBUG) System.out.println(" scanToSubBlock fp=" + fp + " subFP=" + subFP + " entCount=" + entCount + " lastSubFP=" + lastSubFP); //assert nextEnt == 0; if (lastSubFP == subFP) @@ -3089,12 +3098,12 @@ public void ScanToSubBlock(long subFP) //if (DEBUG) System.out.println(" already positioned"); return; } - Debugging.Assert(() => subFP < fp, () => "fp=" + fp + " subFP=" + subFP); + if (Debugging.AssertsEnabled) Debugging.Assert(() => subFP < fp, () => "fp=" + fp + " subFP=" + subFP); long targetSubCode = fp - subFP; //if (DEBUG) System.out.println(" targetSubCode=" + targetSubCode); while (true) { - Debugging.Assert(() => nextEnt < entCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => nextEnt < entCount); nextEnt++; int code = suffixesReader.ReadVInt32(); suffixesReader.SkipBytes(isLeafBlock ? code : (int)((uint)code >> 1)); @@ -3133,7 +3142,7 @@ public SeekStatus ScanToTermLeaf(BytesRef target, bool exactOnly) { // if (DEBUG) System.out.println(" scanToTermLeaf: block fp=" + fp + " prefix=" + prefix + " nextEnt=" + nextEnt + " (of " + entCount + ") target=" + brToString(target) + " term=" + brToString(term)); - Debugging.Assert(() => nextEnt != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => nextEnt != -1); outerInstance.termExists = true; subCode = 0; @@ -3147,7 +3156,7 @@ public SeekStatus ScanToTermLeaf(BytesRef target, bool exactOnly) return SeekStatus.END; } - Debugging.Assert(() => PrefixMatches(target)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => PrefixMatches(target)); // Loop over each entry (term or sub-block) in this block: //nextTerm: while(nextEnt < entCount) { @@ -3186,7 +3195,7 @@ public SeekStatus ScanToTermLeaf(BytesRef target, bool exactOnly) } else { - Debugging.Assert(() => targetPos == targetLimit); + if (Debugging.AssertsEnabled) Debugging.Assert(() => targetPos == targetLimit); cmp = termLen - target.Length; stop = true; } @@ -3242,7 +3251,7 @@ public SeekStatus ScanToTermLeaf(BytesRef target, bool exactOnly) // would have followed the index to this // sub-block from the start: - Debugging.Assert(() => outerInstance.termExists); + if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.termExists); FillTerm(); //if (DEBUG) System.out.println(" found!"); return SeekStatus.FOUND; @@ -3279,7 +3288,7 @@ public SeekStatus ScanToTermNonLeaf(BytesRef target, bool exactOnly) { //if (DEBUG) System.out.println(" scanToTermNonLeaf: block fp=" + fp + " prefix=" + prefix + " nextEnt=" + nextEnt + " (of " + entCount + ") target=" + brToString(target) + " term=" + brToString(term)); - Debugging.Assert(() => nextEnt != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => nextEnt != -1); if (nextEnt == entCount) { @@ -3291,7 +3300,7 @@ public SeekStatus ScanToTermNonLeaf(BytesRef target, bool exactOnly) return SeekStatus.END; } - Debugging.Assert(() => PrefixMatches(target)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => PrefixMatches(target)); // Loop over each entry (term or sub-block) in this block: //nextTerm: while(nextEnt < entCount) { @@ -3341,7 +3350,7 @@ public SeekStatus ScanToTermNonLeaf(BytesRef target, bool exactOnly) } else { - Debugging.Assert(() => targetPos == targetLimit); + if (Debugging.AssertsEnabled) Debugging.Assert(() => targetPos == targetLimit); cmp = termLen - target.Length; stop = true; } @@ -3398,7 +3407,7 @@ public SeekStatus ScanToTermNonLeaf(BytesRef target, bool exactOnly) // would have followed the index to this // sub-block from the start: - Debugging.Assert(() => outerInstance.termExists); + if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.termExists); FillTerm(); //if (DEBUG) System.out.println(" found!"); return SeekStatus.FOUND; diff --git a/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs b/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs index 2e8fc86343..7f5065ac29 100644 --- a/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs +++ b/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs @@ -259,9 +259,9 @@ private class FieldMetaData public FieldMetaData(FieldInfo fieldInfo, BytesRef rootCode, long numTerms, long indexStartFP, long sumTotalTermFreq, long sumDocFreq, int docCount, int longsSize) { - Debugging.Assert(() => numTerms > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numTerms > 0); this.FieldInfo = fieldInfo; - Debugging.Assert(() => rootCode != null, () => "field=" + fieldInfo.Name + " numTerms=" + numTerms); + if (Debugging.AssertsEnabled) Debugging.Assert(() => rootCode != null, () => "field=" + fieldInfo.Name + " numTerms=" + numTerms); this.RootCode = rootCode; this.IndexStartFP = indexStartFP; this.NumTerms = numTerms; @@ -368,14 +368,14 @@ public override TermsConsumer AddField(FieldInfo field) { //DEBUG = field.name.Equals("id", StringComparison.Ordinal); //if (DEBUG) System.out.println("\nBTTW.addField seg=" + segment + " field=" + field.name); - Debugging.Assert(() => currentField == null || currentField.Name.CompareToOrdinal(field.Name) < 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentField == null || currentField.Name.CompareToOrdinal(field.Name) < 0); currentField = field; return new TermsWriter(this, field); } internal static long EncodeOutput(long fp, bool hasTerms, bool isFloor) { - Debugging.Assert(() => fp < (1L << 62)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fp < (1L << 62)); return (fp << 2) | (uint)(hasTerms ? OUTPUT_FLAG_HAS_TERMS : 0) | (uint)(isFloor ? OUTPUT_FLAG_IS_FLOOR : 0); } @@ -480,11 +480,11 @@ public void CompileIndex(IList floorBlocks, RAMOutputStream scratc { // LUCENENET specific - we use a custom wrapper function to display floorBlocks, since // it might contain garbage that cannot be converted into text. - Debugging.Assert( + if (Debugging.AssertsEnabled) Debugging.Assert( () => (IsFloor && floorBlocks != null && floorBlocks.Count != 0) || (!IsFloor && floorBlocks == null), () => "isFloor=" + IsFloor + " floorBlocks=" + ToString(floorBlocks)); - Debugging.Assert(() => scratchBytes.GetFilePointer() == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => scratchBytes.GetFilePointer() == 0); // TODO: try writing the leading vLong in MSB order // (opposite of what Lucene does today), for better @@ -495,12 +495,12 @@ public void CompileIndex(IList floorBlocks, RAMOutputStream scratc scratchBytes.WriteVInt32(floorBlocks.Count); foreach (PendingBlock sub in floorBlocks) { - Debugging.Assert(() => sub.FloorLeadByte != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => sub.FloorLeadByte != -1); //if (DEBUG) { // System.out.println(" write floorLeadByte=" + Integer.toHexString(sub.floorLeadByte&0xff)); //} scratchBytes.WriteByte((byte)(sbyte)sub.FloorLeadByte); - Debugging.Assert(() => sub.Fp > Fp); + if (Debugging.AssertsEnabled) Debugging.Assert(() => sub.Fp > Fp); scratchBytes.WriteVInt64((sub.Fp - Fp) << 1 | (uint)(sub.HasTerms ? 1 : 0)); } } @@ -508,7 +508,7 @@ public void CompileIndex(IList floorBlocks, RAMOutputStream scratc ByteSequenceOutputs outputs = ByteSequenceOutputs.Singleton; Builder indexBuilder = new Builder(FST.INPUT_TYPE.BYTE1, 0, 0, true, false, int.MaxValue, outputs, null, false, PackedInt32s.COMPACT, true, 15); var bytes = new byte[(int)scratchBytes.GetFilePointer()]; - Debugging.Assert(() => bytes.Length > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bytes.Length > 0); scratchBytes.WriteTo(bytes, 0); indexBuilder.Add(Util.ToInt32sRef(Prefix, scratchIntsRef), new BytesRef(bytes, 0, bytes.Length)); scratchBytes.Reset(); @@ -727,8 +727,11 @@ internal virtual void WriteBlocks(Int32sRef prevTerm, int prefixLength, int coun // Suffix is 0, ie prefix 'foo' and term is // 'foo' so the term has empty string suffix // in this block - Debugging.Assert(() => lastSuffixLeadLabel == -1); - Debugging.Assert(() => numSubs == 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => lastSuffixLeadLabel == -1); + Debugging.Assert(() => numSubs == 0); + } suffixLeadLabel = -1; } else @@ -739,7 +742,7 @@ internal virtual void WriteBlocks(Int32sRef prevTerm, int prefixLength, int coun else { PendingBlock block = (PendingBlock)ent; - Debugging.Assert(() => block.Prefix.Length > prefixLength); + if (Debugging.AssertsEnabled) Debugging.Assert(() => block.Prefix.Length > prefixLength); suffixLeadLabel = block.Prefix.Bytes[block.Prefix.Offset + prefixLength] & 0xff; } @@ -860,7 +863,7 @@ internal virtual void WriteBlocks(Int32sRef prevTerm, int prefixLength, int coun //System.out.println(" = " + pendingCount); pendingCount = 0; - Debugging.Assert(() => outerInstance.minItemsInBlock == 1 || subCount > 1, () => "minItemsInBlock=" + outerInstance.minItemsInBlock + " subCount=" + subCount + " sub=" + sub + " of " + numSubs + " subTermCount=" + subTermCountSums[sub] + " subSubCount=" + subSubCounts[sub] + " depth=" + prefixLength); + if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.minItemsInBlock == 1 || subCount > 1, () => "minItemsInBlock=" + outerInstance.minItemsInBlock + " subCount=" + subCount + " sub=" + sub + " of " + numSubs + " subTermCount=" + subTermCountSums[sub] + " subSubCount=" + subSubCounts[sub] + " depth=" + prefixLength); subCount = 0; startLabel = subBytes[sub + 1]; @@ -875,8 +878,11 @@ internal virtual void WriteBlocks(Int32sRef prevTerm, int prefixLength, int coun // block. NOTE that this may be too small (< // minItemsInBlock); need a true segmenter // here - Debugging.Assert(() => startLabel != -1); - Debugging.Assert(() => firstBlock != null); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => startLabel != -1); + Debugging.Assert(() => firstBlock != null); + } prevTerm.Int32s[prevTerm.Offset + prefixLength] = startLabel; //System.out.println(" final " + (numSubs-sub-1) + " subs"); /* @@ -896,7 +902,7 @@ internal virtual void WriteBlocks(Int32sRef prevTerm, int prefixLength, int coun prevTerm.Int32s[prevTerm.Offset + prefixLength] = savLabel; - Debugging.Assert(() => firstBlock != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => firstBlock != null); firstBlock.CompileIndex(floorBlocks, outerInstance.scratchBytes); pending.Add(firstBlock); @@ -925,11 +931,11 @@ private string ToString(BytesRef b) // block: private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexPrefixLength, int startBackwards, int length, int futureTermCount, bool isFloor, int floorLeadByte, bool isLastInFloor) { - Debugging.Assert(() => length > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => length > 0); int start = pending.Count - startBackwards; - Debugging.Assert(() => start >= 0, () => "pending.Count=" + pending.Count + " startBackwards=" + startBackwards + " length=" + length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => start >= 0, () => "pending.Count=" + pending.Count + " startBackwards=" + startBackwards + " length=" + length); IList slice = pending.SubList(start, start + length); @@ -988,7 +994,7 @@ private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexP subIndices = null; foreach (PendingEntry ent in slice) { - Debugging.Assert(() => ent.IsTerm); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ent.IsTerm); PendingTerm term = (PendingTerm)ent; BlockTermState state = term.State; int suffix = term.Term.Length - prefixLength; @@ -1006,7 +1012,7 @@ private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexP statsWriter.WriteVInt32(state.DocFreq); if (fieldInfo.IndexOptions != IndexOptions.DOCS_ONLY) { - Debugging.Assert(() => state.TotalTermFreq >= state.DocFreq, () => state.TotalTermFreq + " vs " + state.DocFreq); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state.TotalTermFreq >= state.DocFreq, () => state.TotalTermFreq + " vs " + state.DocFreq); statsWriter.WriteVInt64(state.TotalTermFreq - state.DocFreq); } @@ -1014,7 +1020,7 @@ private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexP outerInstance.postingsWriter.EncodeTerm(longs, bytesWriter, fieldInfo, state, absolute); for (int pos = 0; pos < longsSize; pos++) { - Debugging.Assert(() => longs[pos] >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => longs[pos] >= 0); metaWriter.WriteVInt64(longs[pos]); } bytesWriter.WriteTo(metaWriter); @@ -1049,7 +1055,7 @@ private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexP statsWriter.WriteVInt32(state.DocFreq); if (fieldInfo.IndexOptions != IndexOptions.DOCS_ONLY) { - Debugging.Assert(() => state.TotalTermFreq >= state.DocFreq); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state.TotalTermFreq >= state.DocFreq); statsWriter.WriteVInt64(state.TotalTermFreq - state.DocFreq); } @@ -1065,7 +1071,7 @@ private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexP outerInstance.postingsWriter.EncodeTerm(longs, bytesWriter, fieldInfo, state, absolute); for (int pos = 0; pos < longsSize; pos++) { - Debugging.Assert(() => longs[pos] >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => longs[pos] >= 0); metaWriter.WriteVInt64(longs[pos]); } bytesWriter.WriteTo(metaWriter); @@ -1079,13 +1085,13 @@ private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexP PendingBlock block = (PendingBlock)ent; int suffix = block.Prefix.Length - prefixLength; - Debugging.Assert(() => suffix > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => suffix > 0); // For non-leaf block we borrow 1 bit to record // if entry is term or sub-block suffixWriter.WriteVInt32((suffix << 1) | 1); suffixWriter.WriteBytes(block.Prefix.Bytes, prefixLength, suffix); - Debugging.Assert(() => block.Fp < startFP); + if (Debugging.AssertsEnabled) Debugging.Assert(() => block.Fp < startFP); // if (DEBUG) { // BytesRef suffixBytes = new BytesRef(suffix); @@ -1099,7 +1105,7 @@ private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexP } } - Debugging.Assert(() => subIndices.Count != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => subIndices.Count != 0); } // TODO: we could block-write the term suffix pointers; @@ -1178,7 +1184,7 @@ public override PostingsConsumer StartTerm(BytesRef text) public override void FinishTerm(BytesRef text, TermStats stats) { - Debugging.Assert(() => stats.DocFreq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => stats.DocFreq > 0); //if (DEBUG) System.out.println("BTTW.finishTerm term=" + fieldInfo.name + ":" + toString(text) + " seg=" + segment + " df=" + stats.docFreq); blockBuilder.Add(Util.ToInt32sRef(text, scratchIntsRef), noOutputs.NoOutput); @@ -1200,10 +1206,13 @@ public override void Finish(long sumTotalTermFreq, long sumDocFreq, int docCount blockBuilder.Finish(); // We better have one final "root" block: - Debugging.Assert(() => pending.Count == 1 && !pending[0].IsTerm, () => "pending.size()=" + pending.Count + " pending=" + pending); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pending.Count == 1 && !pending[0].IsTerm, () => "pending.size()=" + pending.Count + " pending=" + pending); PendingBlock root = (PendingBlock)pending[0]; - Debugging.Assert(() => root.Prefix.Length == 0); - Debugging.Assert(() => root.Index.EmptyOutput != null); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => root.Prefix.Length == 0); + Debugging.Assert(() => root.Index.EmptyOutput != null); + } this.sumTotalTermFreq = sumTotalTermFreq; this.sumDocFreq = sumDocFreq; @@ -1226,9 +1235,12 @@ public override void Finish(long sumTotalTermFreq, long sumDocFreq, int docCount } else { - Debugging.Assert(() => sumTotalTermFreq == 0 || fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY && sumTotalTermFreq == -1); - Debugging.Assert(() => sumDocFreq == 0); - Debugging.Assert(() => docCount == 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => sumTotalTermFreq == 0 || fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY && sumTotalTermFreq == -1); + Debugging.Assert(() => sumDocFreq == 0); + Debugging.Assert(() => docCount == 0); + } } } diff --git a/src/Lucene.Net/Codecs/CodecUtil.cs b/src/Lucene.Net/Codecs/CodecUtil.cs index 648b032464..66980d925d 100644 --- a/src/Lucene.Net/Codecs/CodecUtil.cs +++ b/src/Lucene.Net/Codecs/CodecUtil.cs @@ -268,7 +268,7 @@ public static long ChecksumEntireFile(IndexInput input) IndexInput clone = (IndexInput)input.Clone(); clone.Seek(0); ChecksumIndexInput @in = new BufferedChecksumIndexInput(clone); - Debugging.Assert(() => @in.GetFilePointer() == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => @in.GetFilePointer() == 0); @in.Seek(@in.Length - FooterLength()); return CheckFooter(@in); } diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs index 05fd3e8fb2..a87b9187d3 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs @@ -106,7 +106,7 @@ private void Reset() private void WriteBlock() { - Debugging.Assert(() => blockChunks > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => blockChunks > 0); fieldsIndexOut.WriteVInt32(blockChunks); // The trick here is that we only store the difference from the average start @@ -144,7 +144,7 @@ private void WriteBlock() for (int i = 0; i < blockChunks; ++i) { long delta = docBase - avgChunkDocs * i; - Debugging.Assert(() => PackedInt32s.BitsRequired(MoveSignToLowOrderBit(delta)) <= writer.BitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => PackedInt32s.BitsRequired(MoveSignToLowOrderBit(delta)) <= writer.BitsPerValue); writer.Add(MoveSignToLowOrderBit(delta)); docBase += docBaseDeltas[i]; } @@ -179,7 +179,7 @@ private void WriteBlock() { startPointer += startPointerDeltas[i]; long delta = startPointer - avgChunkSize * i; - Debugging.Assert(() => PackedInt32s.BitsRequired(MoveSignToLowOrderBit(delta)) <= writer.BitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => PackedInt32s.BitsRequired(MoveSignToLowOrderBit(delta)) <= writer.BitsPerValue); writer.Add(MoveSignToLowOrderBit(delta)); } writer.Finish(); @@ -197,7 +197,7 @@ internal void WriteIndex(int numDocs, long startPointer) { firstStartPointer = maxStartPointer = startPointer; } - Debugging.Assert(() => firstStartPointer > 0 && startPointer >= firstStartPointer); + if (Debugging.AssertsEnabled) Debugging.Assert(() => firstStartPointer > 0 && startPointer >= firstStartPointer); docBaseDeltas[blockChunks] = numDocs; startPointerDeltas[blockChunks] = startPointer - maxStartPointer; diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs index 46eb4f4b28..b741c66f18 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs @@ -101,7 +101,7 @@ public CompressingStoredFieldsReader(Directory d, SegmentInfo si, string segment indexStream = d.OpenChecksumInput(indexStreamFN, context); string codecNameIdx = formatName + CompressingStoredFieldsWriter.CODEC_SFX_IDX; version = CodecUtil.CheckHeader(indexStream, codecNameIdx, CompressingStoredFieldsWriter.VERSION_START, CompressingStoredFieldsWriter.VERSION_CURRENT); - Debugging.Assert(() => CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); + if (Debugging.AssertsEnabled) Debugging.Assert(() => CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); indexReader = new CompressingStoredFieldsIndexReader(indexStream, si); long maxPointer = -1; @@ -140,7 +140,7 @@ public CompressingStoredFieldsReader(Directory d, SegmentInfo si, string segment { throw new CorruptIndexException("Version mismatch between stored fields index and data: " + version + " != " + fieldsVersion); } - Debugging.Assert(() => CodecUtil.HeaderLength(codecNameDat) == fieldsStream.GetFilePointer()); + if (Debugging.AssertsEnabled) Debugging.Assert(() => CodecUtil.HeaderLength(codecNameDat) == fieldsStream.GetFilePointer()); if (version >= CompressingStoredFieldsWriter.VERSION_BIG_CHUNKS) { @@ -333,8 +333,11 @@ public override void VisitDocument(int docID, StoredFieldVisitor visitor) DataInput documentInput; if (version >= CompressingStoredFieldsWriter.VERSION_BIG_CHUNKS && totalLength >= 2 * chunkSize) { - Debugging.Assert(() => chunkSize > 0); - Debugging.Assert(() => offset < chunkSize); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => chunkSize > 0); + Debugging.Assert(() => offset < chunkSize); + } decompressor.Decompress(fieldsStream, chunkSize, offset, Math.Min(length, chunkSize - offset), bytes); documentInput = new DataInputAnonymousInnerClassHelper(this, offset, length); @@ -343,7 +346,7 @@ public override void VisitDocument(int docID, StoredFieldVisitor visitor) { BytesRef bytes = totalLength <= BUFFER_REUSE_THRESHOLD ? this.bytes : new BytesRef(); decompressor.Decompress(fieldsStream, totalLength, offset, length, bytes); - Debugging.Assert(() => bytes.Length == length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bytes.Length == length); documentInput = new ByteArrayDataInput(bytes.Bytes, bytes.Offset, bytes.Length); } @@ -354,7 +357,7 @@ public override void VisitDocument(int docID, StoredFieldVisitor visitor) FieldInfo fieldInfo = fieldInfos.FieldInfo(fieldNumber); int bits = (int)(infoAndBits & CompressingStoredFieldsWriter.TYPE_MASK); - Debugging.Assert(() => bits <= CompressingStoredFieldsWriter.NUMERIC_DOUBLE, () => "bits=" + bits.ToString("x")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bits <= CompressingStoredFieldsWriter.NUMERIC_DOUBLE, () => "bits=" + bits.ToString("x")); switch (visitor.NeedsField(fieldInfo)) { @@ -391,7 +394,7 @@ public DataInputAnonymousInnerClassHelper(CompressingStoredFieldsReader outerIns internal virtual void FillBuffer() { - Debugging.Assert(() => decompressed <= length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => decompressed <= length); if (decompressed == length) { throw new Exception(); @@ -489,7 +492,7 @@ internal int ChunkSize() /// internal void Next(int doc) { - Debugging.Assert(() => doc >= this.docBase + this.chunkDocs, () => doc + " " + this.docBase + " " + this.chunkDocs); + if (Debugging.AssertsEnabled) Debugging.Assert(() => doc >= this.docBase + this.chunkDocs, () => doc + " " + this.docBase + " " + this.chunkDocs); fieldsStream.Seek(outerInstance.indexReader.GetStartPointer(doc)); int docBase = fieldsStream.ReadVInt32(); @@ -588,7 +591,7 @@ internal void Decompress() /// internal void CopyCompressedData(DataOutput @out) { - Debugging.Assert(() => outerInstance.Version == CompressingStoredFieldsWriter.VERSION_CURRENT); + if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.Version == CompressingStoredFieldsWriter.VERSION_CURRENT); long chunkEnd = docBase + chunkDocs == outerInstance.numDocs ? outerInstance.maxPointer : outerInstance.indexReader.GetStartPointer(docBase + chunkDocs); @out.CopyBytes(fieldsStream, chunkEnd - fieldsStream.GetFilePointer()); } diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsWriter.cs b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsWriter.cs index b9a390067b..025efa393f 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsWriter.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsWriter.cs @@ -91,7 +91,7 @@ public sealed class CompressingStoredFieldsWriter : StoredFieldsWriter /// Sole constructor. public CompressingStoredFieldsWriter(Directory directory, SegmentInfo si, string segmentSuffix, IOContext context, string formatName, CompressionMode compressionMode, int chunkSize) { - Debugging.Assert(() => directory != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => directory != null); this.directory = directory; this.segment = si.Name; this.segmentSuffix = segmentSuffix; @@ -114,8 +114,11 @@ public CompressingStoredFieldsWriter(Directory directory, SegmentInfo si, string string codecNameDat = formatName + CODEC_SFX_DAT; CodecUtil.WriteHeader(indexStream, codecNameIdx, VERSION_CURRENT); CodecUtil.WriteHeader(fieldsStream, codecNameDat, VERSION_CURRENT); - Debugging.Assert(() => CodecUtil.HeaderLength(codecNameDat) == fieldsStream.GetFilePointer()); - Debugging.Assert(() => CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => CodecUtil.HeaderLength(codecNameDat) == fieldsStream.GetFilePointer()); + Debugging.Assert(() => CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); + } indexWriter = new CompressingStoredFieldsIndexWriter(indexStream); indexStream = null; @@ -178,7 +181,7 @@ public override void FinishDocument() /// private static void SaveInt32s(int[] values, int length, DataOutput @out) { - Debugging.Assert(() => length > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => length > 0); if (length == 1) { @out.WriteVInt32(values[0]); @@ -246,7 +249,7 @@ private void Flush() for (int i = numBufferedDocs - 1; i > 0; --i) { lengths[i] = endOffsets[i] - endOffsets[i - 1]; - Debugging.Assert(() => lengths[i] >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => lengths[i] >= 0); } WriteHeader(docBase, numBufferedDocs, numStoredFields, lengths); @@ -374,7 +377,7 @@ public override void Finish(FieldInfos fis, int numDocs) } else { - Debugging.Assert(() => bufferedDocs.Length == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bufferedDocs.Length == 0); } if (docBase != numDocs) { @@ -382,7 +385,7 @@ public override void Finish(FieldInfos fis, int numDocs) } indexWriter.Finish(numDocs, fieldsStream.GetFilePointer()); CodecUtil.WriteFooter(fieldsStream); - Debugging.Assert(() => bufferedDocs.Length == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bufferedDocs.Length == 0); } [MethodImpl(MethodImplOptions.NoInlining)] @@ -443,7 +446,7 @@ public override int Merge(MergeState mergeState) if (numBufferedDocs == 0 && startOffsets[it.chunkDocs - 1] < chunkSize && startOffsets[it.chunkDocs - 1] + it.lengths[it.chunkDocs - 1] >= chunkSize && NextDeletedDoc(it.docBase, liveDocs, it.docBase + it.chunkDocs) == it.docBase + it.chunkDocs) // no deletion in the chunk - chunk is large enough - chunk is small enough - starting a new chunk { - Debugging.Assert(() => docID == it.docBase); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docID == it.docBase); // no need to decompress, just copy data indexWriter.WriteIndex(it.chunkDocs, fieldsStream.GetFilePointer()); diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs index c8e4a25ce0..b1dd11a1d4 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs @@ -78,7 +78,7 @@ public CompressingTermVectorsReader(Directory d, SegmentInfo si, string segmentS indexStream = d.OpenChecksumInput(indexStreamFN, context); string codecNameIdx = formatName + CompressingTermVectorsWriter.CODEC_SFX_IDX; version = CodecUtil.CheckHeader(indexStream, codecNameIdx, CompressingTermVectorsWriter.VERSION_START, CompressingTermVectorsWriter.VERSION_CURRENT); - Debugging.Assert(() => CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); + if (Debugging.AssertsEnabled) Debugging.Assert(() => CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); indexReader = new CompressingStoredFieldsIndexReader(indexStream, si); if (version >= CompressingTermVectorsWriter.VERSION_CHECKSUM) @@ -104,7 +104,7 @@ public CompressingTermVectorsReader(Directory d, SegmentInfo si, string segmentS { throw new Exception("Version mismatch between stored fields index and data: " + version + " != " + version2); } - Debugging.Assert(() => CodecUtil.HeaderLength(codecNameDat) == vectorsStream.GetFilePointer()); + if (Debugging.AssertsEnabled) Debugging.Assert(() => CodecUtil.HeaderLength(codecNameDat) == vectorsStream.GetFilePointer()); packedIntsVersion = vectorsStream.ReadVInt32(); chunkSize = vectorsStream.ReadVInt32(); @@ -216,7 +216,7 @@ public override Fields Get(int doc) int[] fieldNums; { int token = vectorsStream.ReadByte() & 0xFF; - Debugging.Assert(() => token != 0); // means no term vectors, cannot happen since we checked for numFields == 0 + if (Debugging.AssertsEnabled) Debugging.Assert(() => token != 0); // means no term vectors, cannot happen since we checked for numFields == 0 int bitsPerFieldNum = token & 0x1F; int totalDistinctFields = (int)((uint)token >> 5); if (totalDistinctFields == 0x07) @@ -246,7 +246,7 @@ public override Fields Get(int doc) for (int i = 0; i < totalFields; ++i) { int fieldNumOff = (int)allFieldNumOffs.Get(i); - Debugging.Assert(() => fieldNumOff >= 0 && fieldNumOff < fieldNums.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldNumOff >= 0 && fieldNumOff < fieldNums.Length); int fgs = (int)fieldFlags.Get(fieldNumOff); f.Set(i, fgs); } @@ -383,7 +383,7 @@ public override Fields Get(int doc) totalPayloads += freq; } } - Debugging.Assert(() => i != totalFields - 1 || termIndex == totalTerms, () => termIndex + " " + totalTerms); + if (Debugging.AssertsEnabled) Debugging.Assert(() => i != totalFields - 1 || termIndex == totalTerms, () => termIndex + " " + totalTerms); } int[][] positionIndex = PositionIndex(skip, numFields, numTerms, termFreqs); @@ -516,7 +516,7 @@ public override Fields Get(int doc) ++posIdx; } } - Debugging.Assert(() => posIdx == totalFreq); + if (Debugging.AssertsEnabled) Debugging.Assert(() => posIdx == totalFreq); } termIndex += termCount; } @@ -538,7 +538,7 @@ public override Fields Get(int doc) } termIndex += termCount; } - Debugging.Assert(() => termIndex == totalTerms, () => termIndex + " " + totalTerms); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termIndex == totalTerms, () => termIndex + " " + totalTerms); } // decompress data @@ -577,7 +577,7 @@ public override Fields Get(int doc) } } - Debugging.Assert(() => Sum(fieldLengths) == docLen, () => Sum(fieldLengths) + " != " + docLen); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Sum(fieldLengths) == docLen, () => Sum(fieldLengths) + " != " + docLen); return new TVFields(this, fieldNums, FieldFlags, fieldNumOffs, fieldNumTerms, fieldLengths, prefixLengths, suffixLengths, fieldTermFreqs, positionIndex, positions, startOffsets, lengths, payloadBytes, payloadIndex, suffixBytes); } @@ -732,7 +732,7 @@ public override Terms GetTerms(string field) break; } } - Debugging.Assert(() => fieldLen >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldLen >= 0); return new TVTerms(outerInstance, numTerms[idx], fieldFlags[idx], prefixLengths[idx], suffixLengths[idx], termFreqs[idx], positionIndex[idx], positions[idx], startOffsets[idx], lengths[idx], payloadIndex[idx], payloadBytes, new BytesRef(suffixBytes.Bytes, suffixBytes.Offset + fieldOff, fieldLen)); } @@ -843,7 +843,7 @@ public override BytesRef Next() } else { - Debugging.Assert(() => ord < numTerms); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ord < numTerms); ++ord; } diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs index 4c2d15a71f..04ea6cf52c 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs @@ -245,7 +245,7 @@ internal virtual void AddPosition(int position, int startOffset, int length, int /// Sole constructor. public CompressingTermVectorsWriter(Directory directory, SegmentInfo si, string segmentSuffix, IOContext context, string formatName, CompressionMode compressionMode, int chunkSize) { - Debugging.Assert(() => directory != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => directory != null); this.directory = directory; this.segment = si.Name; this.segmentSuffix = segmentSuffix; @@ -269,8 +269,11 @@ public CompressingTermVectorsWriter(Directory directory, SegmentInfo si, string string codecNameDat = formatName + CODEC_SFX_DAT; CodecUtil.WriteHeader(indexStream, codecNameIdx, VERSION_CURRENT); CodecUtil.WriteHeader(vectorsStream, codecNameDat, VERSION_CURRENT); - Debugging.Assert(() => CodecUtil.HeaderLength(codecNameDat) == vectorsStream.GetFilePointer()); - Debugging.Assert(() => CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => CodecUtil.HeaderLength(codecNameDat) == vectorsStream.GetFilePointer()); + Debugging.Assert(() => CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); + } indexWriter = new CompressingStoredFieldsIndexWriter(indexStream); indexStream = null; @@ -351,7 +354,7 @@ public override void FinishField() public override void StartTerm(BytesRef term, int freq) { - Debugging.Assert(() => freq >= 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => freq >= 1); int prefix = StringHelper.BytesDifference(lastTerm, term); curField.AddTerm(freq, prefix, term.Length - prefix); termSuffixes.WriteBytes(term.Bytes, term.Offset + prefix, term.Length - prefix); @@ -367,7 +370,7 @@ public override void StartTerm(BytesRef term, int freq) public override void AddPosition(int position, int startOffset, int endOffset, BytesRef payload) { - Debugging.Assert(() => curField.flags != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => curField.flags != 0); curField.AddPosition(position, startOffset, endOffset - startOffset, payload == null ? 0 : payload.Length); if (curField.hasPayloads && payload != null) { @@ -384,7 +387,7 @@ private bool TriggerFlush() private void Flush() { int chunkDocs = pendingDocs.Count; - Debugging.Assert(() => chunkDocs > 0, chunkDocs.ToString); + if (Debugging.AssertsEnabled) Debugging.Assert(() => chunkDocs > 0, chunkDocs.ToString); // write the index file indexWriter.WriteIndex(chunkDocs, vectorsStream.GetFilePointer()); @@ -464,7 +467,7 @@ private int[] FlushFieldNums() } int numDistinctFields = fieldNums.Count; - Debugging.Assert(() => numDistinctFields > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numDistinctFields > 0); int bitsRequired = PackedInt32s.BitsRequired(fieldNums.Max); int token = (Math.Min(numDistinctFields - 1, 0x07) << 5) | bitsRequired; vectorsStream.WriteByte((byte)(sbyte)token); @@ -496,7 +499,7 @@ private void FlushFields(int totalFields, int[] fieldNums) foreach (FieldData fd in dd.fields) { int fieldNumIndex = Array.BinarySearch(fieldNums, fd.fieldNum); - Debugging.Assert(() => fieldNumIndex >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldNumIndex >= 0); writer.Add(fieldNumIndex); } } @@ -516,7 +519,7 @@ private void FlushFlags(int totalFields, int[] fieldNums) foreach (FieldData fd in dd.fields) { int fieldNumOff = Array.BinarySearch(fieldNums, fd.fieldNum); - Debugging.Assert(() => fieldNumOff >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldNumOff >= 0); if (fieldFlags[fieldNumOff] == -1) { fieldFlags[fieldNumOff] = fd.flags; @@ -538,10 +541,10 @@ private void FlushFlags(int totalFields, int[] fieldNums) PackedInt32s.Writer writer = PackedInt32s.GetWriterNoHeader(vectorsStream, PackedInt32s.Format.PACKED, fieldFlags.Length, FLAGS_BITS, 1); foreach (int flags in fieldFlags) { - Debugging.Assert(() => flags >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => flags >= 0); writer.Add(flags); } - Debugging.Assert(() => writer.Ord == fieldFlags.Length - 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => writer.Ord == fieldFlags.Length - 1); writer.Finish(); } else @@ -556,7 +559,7 @@ private void FlushFlags(int totalFields, int[] fieldNums) writer.Add(fd.flags); } } - Debugging.Assert(() => writer.Ord == totalFields - 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => writer.Ord == totalFields - 1); writer.Finish(); } } @@ -581,7 +584,7 @@ private void FlushNumTerms(int totalFields) writer.Add(fd.numTerms); } } - Debugging.Assert(() => writer.Ord == totalFields - 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => writer.Ord == totalFields - 1); writer.Finish(); } @@ -649,7 +652,7 @@ private void FlushPositions() previousPosition = position; } } - Debugging.Assert(() => pos == fd.totalPositions); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pos == fd.totalPositions); } } } @@ -685,7 +688,7 @@ private void FlushOffsets(int[] fieldNums) ++pos; } } - Debugging.Assert(() => pos == fd.totalPositions); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pos == fd.totalPositions); } } } @@ -753,7 +756,7 @@ private void FlushOffsets(int[] fieldNums) writer.Add(lengthsBuf[fd.offStart + pos++] - fd.prefixLengths[i] - fd.suffixLengths[i]); } } - Debugging.Assert(() => pos == fd.totalPositions); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pos == fd.totalPositions); } } } @@ -797,8 +800,11 @@ public override void Finish(FieldInfos fis, int numDocs) public override void AddProx(int numProx, DataInput positions, DataInput offsets) { - Debugging.Assert(() => (curField.hasPositions) == (positions != null)); - Debugging.Assert(() => (curField.hasOffsets) == (offsets != null)); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => (curField.hasPositions) == (positions != null)); + Debugging.Assert(() => (curField.hasOffsets) == (offsets != null)); + } if (curField.hasPositions) { @@ -920,7 +926,7 @@ public override int Merge(MergeState mergeState) { int docBase = vectorsStream.ReadVInt32(); int chunkDocs = vectorsStream.ReadVInt32(); - Debugging.Assert(() => docBase + chunkDocs <= matchingSegmentReader.MaxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docBase + chunkDocs <= matchingSegmentReader.MaxDoc); if (docBase + chunkDocs < matchingSegmentReader.MaxDoc && NextDeletedDoc(docBase, liveDocs, docBase + chunkDocs) == docBase + chunkDocs) { long chunkEnd = index.GetStartPointer(docBase + chunkDocs); diff --git a/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs b/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs index fe3e2267dd..0801ce3100 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs @@ -152,7 +152,7 @@ public DecompressorAnonymousInnerClassHelper() public override void Decompress(DataInput @in, int originalLength, int offset, int length, BytesRef bytes) { - Debugging.Assert(() => offset + length <= originalLength); + if (Debugging.AssertsEnabled) Debugging.Assert(() => offset + length <= originalLength); // add 7 padding bytes, this is not necessary but can help decompression run faster if (bytes.Bytes.Length < originalLength + 7) { @@ -212,7 +212,7 @@ internal DeflateDecompressor() public override void Decompress(DataInput input, int originalLength, int offset, int length, BytesRef bytes) { - Debugging.Assert(() => offset + length <= originalLength); + if (Debugging.AssertsEnabled) Debugging.Assert(() => offset + length <= originalLength); if (length == 0) { bytes.Length = 0; @@ -275,7 +275,7 @@ public override void Compress(byte[] bytes, int off, int len, DataOutput output) if (resultArray.Length == 0) { - Debugging.Assert(() => len == 0, len.ToString); + if (Debugging.AssertsEnabled) Debugging.Assert(() => len == 0, len.ToString); output.WriteVInt32(0); return; } diff --git a/src/Lucene.Net/Codecs/Compressing/LZ4.cs b/src/Lucene.Net/Codecs/Compressing/LZ4.cs index cf1c493536..f48d040325 100644 --- a/src/Lucene.Net/Codecs/Compressing/LZ4.cs +++ b/src/Lucene.Net/Codecs/Compressing/LZ4.cs @@ -76,7 +76,7 @@ private static bool ReadInt32Equals(byte[] buf, int i, int j) private static int CommonBytes(byte[] b, int o1, int o2, int limit) { - Debugging.Assert(() => o1 < o2); + if (Debugging.AssertsEnabled) Debugging.Assert(() => o1 < o2); int count = 0; while (o2 < limit && b[o1++] == b[o2++]) { @@ -135,7 +135,7 @@ public static int Decompress(DataInput compressed, int decompressedLen, byte[] d var byte1 = compressed.ReadByte(); var byte2 = compressed.ReadByte(); int matchDec = (byte1 & 0xFF) | ((byte2 & 0xFF) << 8); - Debugging.Assert(() => matchDec > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => matchDec > 0); int matchLen = token & 0x0F; if (matchLen == 0x0F) @@ -203,14 +203,14 @@ private static void EncodeLastLiterals(byte[] bytes, int anchor, int literalLen, private static void EncodeSequence(byte[] bytes, int anchor, int matchRef, int matchOff, int matchLen, DataOutput @out) { int literalLen = matchOff - anchor; - Debugging.Assert(() => matchLen >= 4); + if (Debugging.AssertsEnabled) Debugging.Assert(() => matchLen >= 4); // encode token int token = (Math.Min(literalLen, 0x0F) << 4) | Math.Min(matchLen - 4, 0x0F); EncodeLiterals(bytes, token, anchor, literalLen, @out); // encode match dec int matchDec = matchOff - matchRef; - Debugging.Assert(() => matchDec > 0 && matchDec < 1 << 16); + if (Debugging.AssertsEnabled) Debugging.Assert(() => matchDec > 0 && matchDec < 1 << 16); @out.WriteByte((byte)(sbyte)matchDec); @out.WriteByte((byte)(sbyte)((int)((uint)matchDec >> 8))); @@ -275,7 +275,7 @@ public static void Compress(byte[] bytes, int off, int len, DataOutput @out, Has int v = ReadInt32(bytes, off); int h = Hash(v, hashLog); @ref = @base + (int)hashTable.Get(h); - Debugging.Assert(() => PackedInt32s.BitsRequired(off - @base) <= hashTable.BitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => PackedInt32s.BitsRequired(off - @base) <= hashTable.BitsPerValue); hashTable.Set(h, off - @base); if (off - @ref < MAX_DISTANCE && ReadInt32(bytes, @ref) == v) { @@ -297,7 +297,7 @@ public static void Compress(byte[] bytes, int off, int len, DataOutput @out, Has // last literals int literalLen = end - anchor; - Debugging.Assert(() => literalLen >= LAST_LITERALS || literalLen == len); + if (Debugging.AssertsEnabled) Debugging.Assert(() => literalLen >= LAST_LITERALS || literalLen == len); EncodeLastLiterals(bytes, anchor, end - anchor, @out); } @@ -365,7 +365,7 @@ private void AddHash(byte[] bytes, int off) int v = ReadInt32(bytes, off); int h = HashHC(v); int delta = off - hashTable[h]; - Debugging.Assert(() => delta > 0, delta.ToString); + if (Debugging.AssertsEnabled) Debugging.Assert(() => delta > 0, delta.ToString); if (delta >= MAX_DISTANCE) { delta = MAX_DISTANCE - 1; @@ -513,7 +513,7 @@ public static void CompressHC(byte[] src, int srcOff, int srcLen, DataOutput @ou while (true) { - Debugging.Assert(() => match1.start >= anchor); + if (Debugging.AssertsEnabled) Debugging.Assert(() => match1.start >= anchor); if (match1.End() >= mfLimit || !ht.InsertAndFindWiderMatch(src, match1.End() - 2, match1.start + 1, matchLimit, match1.len, match2)) { // no better match @@ -529,7 +529,7 @@ public static void CompressHC(byte[] src, int srcOff, int srcLen, DataOutput @ou CopyTo(match0, match1); } } - Debugging.Assert(() => match2.start > match1.start); + if (Debugging.AssertsEnabled) Debugging.Assert(() => match2.start > match1.start); if (match2.start - match1.start < 3) // First Match too small : removed { diff --git a/src/Lucene.Net/Codecs/DocValuesConsumer.cs b/src/Lucene.Net/Codecs/DocValuesConsumer.cs index 8cbd4a53e9..d1ac158bce 100644 --- a/src/Lucene.Net/Codecs/DocValuesConsumer.cs +++ b/src/Lucene.Net/Codecs/DocValuesConsumer.cs @@ -486,7 +486,7 @@ private IEnumerable GetMergeSortedSetValuesEnumerable(OrdinalMap map, if (currentLiveDocs == null || currentLiveDocs.Get(docIDUpto)) { - Debugging.Assert(() => docIDUpto < currentReader.MaxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docIDUpto < currentReader.MaxDoc); SortedSetDocValues dv = dvs[readerUpto]; dv.SetDocument(docIDUpto); ordUpto = ordLength = 0; @@ -516,7 +516,7 @@ internal class BitsFilteredTermsEnum : FilteredTermsEnum internal BitsFilteredTermsEnum(TermsEnum @in, Int64BitSet liveTerms) : base(@in, false) { - Debugging.Assert(() => liveTerms != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => liveTerms != null); this.liveTerms = liveTerms; } diff --git a/src/Lucene.Net/Codecs/FieldsConsumer.cs b/src/Lucene.Net/Codecs/FieldsConsumer.cs index 5c791189ab..a39d96f7b8 100644 --- a/src/Lucene.Net/Codecs/FieldsConsumer.cs +++ b/src/Lucene.Net/Codecs/FieldsConsumer.cs @@ -87,7 +87,7 @@ public virtual void Merge(MergeState mergeState, Fields fields) foreach (string field in fields) { FieldInfo info = mergeState.FieldInfos.FieldInfo(field); - Debugging.Assert(() => info != null, () => "FieldInfo for field is null: " + field); + if (Debugging.AssertsEnabled) Debugging.Assert(() => info != null, () => "FieldInfo for field is null: " + field); Terms terms = fields.GetTerms(field); if (terms != null) { diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xFields.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xFields.cs index 94db36b0b8..e4ffdbf322 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xFields.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xFields.cs @@ -164,7 +164,7 @@ public override int Count { get { - Debugging.Assert(() => preTerms.Count == fields.Count); + if (Debugging.AssertsEnabled) Debugging.Assert(() => preTerms.Count == fields.Count); return fields.Count; } } @@ -249,7 +249,7 @@ public override bool HasOffsets get { // preflex doesn't support this - Debugging.Assert(() => fieldInfo.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) < 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldInfo.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) < 0); return false; } } @@ -304,11 +304,11 @@ private bool SeekToNonBMP(SegmentTermEnum te, BytesRef term, int pos) { int savLength = term.Length; - Debugging.Assert(() => term.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => term.Offset == 0); // The 3 bytes starting at downTo make up 1 // unicode character: - Debugging.Assert(() => IsHighBMPChar(term.Bytes, pos)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => IsHighBMPChar(term.Bytes, pos)); // NOTE: we cannot make this assert, because // AutomatonQuery legitimately sends us malformed UTF8 @@ -361,7 +361,7 @@ private bool SeekToNonBMP(SegmentTermEnum te, BytesRef term, int pos) // Now test if prefix is identical and we found // a non-BMP char at the same position: BytesRef b2 = t2.Bytes; - Debugging.Assert(() => b2.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => b2.Offset == 0); bool matches; if (b2.Length >= term.Length && IsNonBMPChar(b2.Bytes, pos)) @@ -465,8 +465,11 @@ private bool DoPop() Console.WriteLine(" try pop"); } - Debugging.Assert(() => newSuffixStart <= prevTerm.Length); - Debugging.Assert(() => newSuffixStart < scratchTerm.Length || newSuffixStart == 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => newSuffixStart <= prevTerm.Length); + Debugging.Assert(() => newSuffixStart < scratchTerm.Length || newSuffixStart == 0); + } if (prevTerm.Length > newSuffixStart && IsNonBMPChar(prevTerm.Bytes, newSuffixStart) && IsHighBMPChar(scratchTerm.Bytes, newSuffixStart)) { @@ -495,7 +498,7 @@ private bool DoPop() } BytesRef b2 = t2.Bytes; - Debugging.Assert(() => b2.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => b2.Offset == 0); // Set newSuffixStart -- we can't use // termEnum's since the above seek may have @@ -596,8 +599,11 @@ private void SurrogateDance() // this code assumes TermInfosReader/SegmentTermEnum // always use BytesRef.offset == 0 - Debugging.Assert(() => prevTerm.Offset == 0); - Debugging.Assert(() => scratchTerm.Offset == 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => prevTerm.Offset == 0); + Debugging.Assert(() => scratchTerm.Offset == 0); + } // Need to loop here because we may need to do multiple // pops, and possibly a continue in the end, ie: @@ -650,7 +656,7 @@ private void DoPushes() if (IsNonBMPChar(scratchTerm.Bytes, upTo) && (upTo > newSuffixStart || (upTo >= prevTerm.Length || (!IsNonBMPChar(prevTerm.Bytes, upTo) && !IsHighBMPChar(prevTerm.Bytes, upTo))))) { // A non-BMP char (4 bytes UTF8) starts here: - Debugging.Assert(() => scratchTerm.Length >= upTo + 4); + if (Debugging.AssertsEnabled) Debugging.Assert(() => scratchTerm.Length >= upTo + 4); int savLength = scratchTerm.Length; scratch[0] = (sbyte)scratchTerm.Bytes[upTo]; @@ -698,7 +704,7 @@ private void DoPushes() if (t2 != null && t2.Field == internedFieldName) { BytesRef b2 = t2.Bytes; - Debugging.Assert(() => b2.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => b2.Offset == 0); if (b2.Length >= upTo + 3 && IsHighBMPChar(b2.Bytes, upTo)) { matches = true; @@ -823,7 +829,7 @@ public override SeekStatus SeekCeil(BytesRef term) TermInfosReader tis = outerInstance.TermsDict; Term t0 = new Term(fieldInfo.Name, term); - Debugging.Assert(() => termEnum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termEnum != null); tis.SeekEnum(termEnum, t0, false); @@ -854,7 +860,7 @@ public override SeekStatus SeekCeil(BytesRef term) // find an E, try swapping in S, backwards: scratchTerm.CopyBytes(term); - Debugging.Assert(() => scratchTerm.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => scratchTerm.Offset == 0); for (int i = scratchTerm.Length - 1; i >= 0; i--) { @@ -903,7 +909,7 @@ public override SeekStatus SeekCeil(BytesRef term) } BytesRef br = t.Bytes; - Debugging.Assert(() => br.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => br.Offset == 0); SetNewSuffixStart(term, br); @@ -913,14 +919,14 @@ public override SeekStatus SeekCeil(BytesRef term) if (t2 == null || t2.Field != internedFieldName) { // PreFlex codec interns field names; verify: - Debugging.Assert(() => t2 == null || !t2.Field.Equals(internedFieldName, StringComparison.Ordinal)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => t2 == null || !t2.Field.Equals(internedFieldName, StringComparison.Ordinal)); current = null; return SeekStatus.END; } else { current = t2.Bytes; - Debugging.Assert(() => !unicodeSortOrder || term.CompareTo(current) < 0, () => "term=" + UnicodeUtil.ToHexString(term.Utf8ToString()) + " vs current=" + UnicodeUtil.ToHexString(current.Utf8ToString())); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !unicodeSortOrder || term.CompareTo(current) < 0, () => "term=" + UnicodeUtil.ToHexString(term.Utf8ToString()) + " vs current=" + UnicodeUtil.ToHexString(current.Utf8ToString())); return SeekStatus.NOT_FOUND; } } @@ -996,7 +1002,7 @@ public override BytesRef Next() if (t == null || t.Field != internedFieldName) { // PreFlex codec interns field names; verify: - Debugging.Assert(() => t == null || !t.Field.Equals(internedFieldName, StringComparison.Ordinal)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => t == null || !t.Field.Equals(internedFieldName, StringComparison.Ordinal)); current = null; } else @@ -1021,7 +1027,7 @@ public override BytesRef Next() if (t == null || t.Field != internedFieldName) { // PreFlex codec interns field names; verify: - Debugging.Assert(() => t == null || !t.Field.Equals(internedFieldName, StringComparison.Ordinal)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => t == null || !t.Field.Equals(internedFieldName, StringComparison.Ordinal)); return null; } else @@ -1190,7 +1196,7 @@ public override int Advance(int target) public override int NextPosition() { - Debugging.Assert(() => docID != NO_MORE_DOCS); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docID != NO_MORE_DOCS); return pos.NextPosition(); } diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xNormsProducer.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xNormsProducer.cs index 5e2c5d09de..36cb64e3e5 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xNormsProducer.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xNormsProducer.cs @@ -136,7 +136,7 @@ public Lucene3xNormsProducer(Directory dir, SegmentInfo info, FieldInfos fields, } } // TODO: change to a real check? see LUCENE-3619 - Debugging.Assert(() => singleNormStream == null || nextNormSeek == singleNormStream.Length, () => singleNormStream != null ? "len: " + singleNormStream.Length + " expected: " + nextNormSeek : "null"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => singleNormStream == null || nextNormSeek == singleNormStream.Length, () => singleNormStream != null ? "len: " + singleNormStream.Length + " expected: " + nextNormSeek : "null"); success = true; } finally @@ -188,7 +188,7 @@ private static bool HasSeparateNorms(SegmentInfo info, int number) } else { - Debugging.Assert(() => Convert.ToInt64(v, CultureInfo.InvariantCulture) != SegmentInfo.NO); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Convert.ToInt64(v, CultureInfo.InvariantCulture) != SegmentInfo.NO); return true; } } @@ -258,7 +258,7 @@ public override long Get(int docID) public override NumericDocValues GetNumeric(FieldInfo field) { var dv = norms[field.Name]; - Debugging.Assert(() => dv != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => dv != null); return dv.Instance; } diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSegmentInfoReader.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSegmentInfoReader.cs index 9b8ee1528a..9b12c54f16 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSegmentInfoReader.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSegmentInfoReader.cs @@ -193,7 +193,7 @@ private SegmentCommitInfo ReadLegacySegmentInfo(Directory dir, int format, Index //System.out.println("version=" + version + " name=" + name + " docCount=" + docCount + " delGen=" + delGen + " dso=" + docStoreOffset + " dss=" + docStoreSegment + " dssCFs=" + docStoreIsCompoundFile + " b=" + b + " format=" + format); - Debugging.Assert(() => 1 == b, () => "expected 1 but was: " + b + " format: " + format); + if (Debugging.AssertsEnabled) Debugging.Assert(() => 1 == b, () => "expected 1 but was: " + b + " format: " + format); int numNormGen = input.ReadInt32(); IDictionary normGen; if (numNormGen == SegmentInfo.NO) @@ -211,7 +211,7 @@ private SegmentCommitInfo ReadLegacySegmentInfo(Directory dir, int format, Index bool isCompoundFile = input.ReadByte() == SegmentInfo.YES; int delCount = input.ReadInt32(); - Debugging.Assert(() => delCount <= docCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => delCount <= docCount); bool hasProx = input.ReadByte() == 1; @@ -283,7 +283,7 @@ private SegmentCommitInfo ReadLegacySegmentInfo(Directory dir, int format, Index else { // We should have already hit indexformat too old exception - Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false); } } } diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xStoredFieldsReader.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xStoredFieldsReader.cs index a9eb3e5fbe..95649dd0a3 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xStoredFieldsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xStoredFieldsReader.cs @@ -193,7 +193,7 @@ public Lucene3xStoredFieldsReader(Directory d, SegmentInfo si, FieldInfos fn, IO // Verify the file is long enough to hold all of our // docs - Debugging.Assert(() => ((int)(indexSize / 8)) >= size + this.docStoreOffset, () => "indexSize=" + indexSize + " size=" + size + " docStoreOffset=" + docStoreOffset); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ((int)(indexSize / 8)) >= size + this.docStoreOffset, () => "indexSize=" + indexSize + " size=" + size + " docStoreOffset=" + docStoreOffset); } else { @@ -273,7 +273,7 @@ public override sealed void VisitDocument(int n, StoredFieldVisitor visitor) FieldInfo fieldInfo = fieldInfos.FieldInfo(fieldNumber); int bits = fieldsStream.ReadByte() & 0xFF; - Debugging.Assert(() => bits <= (FIELD_IS_NUMERIC_MASK | FIELD_IS_BINARY), () => "bits=" + bits.ToString("x")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bits <= (FIELD_IS_NUMERIC_MASK | FIELD_IS_BINARY), () => "bits=" + bits.ToString("x")); switch (visitor.NeedsField(fieldInfo)) { diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs index 4822cd5958..fd12bbaf81 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs @@ -140,8 +140,11 @@ public Lucene3xTermVectorsReader(Directory d, SegmentInfo si, FieldInfos fieldIn tvf = d.OpenInput(fn, context); int tvfFormat = CheckValidFormat(tvf); - Debugging.Assert(() => format == tvdFormat); - Debugging.Assert(() => format == tvfFormat); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => format == tvdFormat); + Debugging.Assert(() => format == tvfFormat); + } numTotalDocs = (int)(tvx.Length >> 4); @@ -149,7 +152,7 @@ public Lucene3xTermVectorsReader(Directory d, SegmentInfo si, FieldInfos fieldIn { this.docStoreOffset = 0; this.size = numTotalDocs; - Debugging.Assert(() => size == 0 || numTotalDocs == size); + if (Debugging.AssertsEnabled) Debugging.Assert(() => size == 0 || numTotalDocs == size); } else { @@ -157,7 +160,7 @@ public Lucene3xTermVectorsReader(Directory d, SegmentInfo si, FieldInfos fieldIn this.size = size; // Verify the file is long enough to hold all of our // docs - Debugging.Assert(() => numTotalDocs >= size + docStoreOffset, () => "numTotalDocs=" + numTotalDocs + " size=" + size + " docStoreOffset=" + docStoreOffset); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numTotalDocs >= size + docStoreOffset, () => "numTotalDocs=" + numTotalDocs + " size=" + size + " docStoreOffset=" + docStoreOffset); } this.fieldInfos = fieldInfos; @@ -233,7 +236,7 @@ public TVFields(Lucene3xTermVectorsReader outerInstance, int docID) outerInstance.tvd.Seek(outerInstance.tvx.ReadInt64()); int fieldCount = outerInstance.tvd.ReadVInt32(); - Debugging.Assert(() => fieldCount >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldCount >= 0); if (fieldCount != 0) { fieldNumbers = new int[fieldCount]; @@ -686,7 +689,7 @@ public override int Freq } else { - Debugging.Assert(() => startOffsets != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => startOffsets != null); return startOffsets.Length; } } @@ -737,7 +740,7 @@ public override BytesRef GetPayload() public override int NextPosition() { - Debugging.Assert(() => (positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length); if (positions != null) { diff --git a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermDocs.cs b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermDocs.cs index 691e1d9145..80fe7fa61e 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermDocs.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermDocs.cs @@ -171,7 +171,7 @@ public virtual bool Next() else { freq = m_freqStream.ReadVInt32(); // else read freq - Debugging.Assert(() => freq != 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => freq != 1); } } diff --git a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermEnum.cs b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermEnum.cs index af78aa04f3..5bf10aa3ea 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermEnum.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermEnum.cs @@ -105,8 +105,11 @@ public SegmentTermEnum(IndexInput i, FieldInfos fis, bool isi) indexInterval = input.ReadInt32(); skipInterval = input.ReadInt32(); maxSkipLevels = input.ReadInt32(); - Debugging.Assert(() => indexInterval > 0, () => "indexInterval=" + indexInterval + " is negative; must be > 0"); - Debugging.Assert(() => skipInterval > 0, () => "skipInterval=" + skipInterval + " is negative; must be > 0"); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => indexInterval > 0, () => "indexInterval=" + indexInterval + " is negative; must be > 0"); + Debugging.Assert(() => skipInterval > 0, () => "skipInterval=" + skipInterval + " is negative; must be > 0"); + } } } diff --git a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermPositions.cs b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermPositions.cs index 87d74684f5..c7dd0e2d21 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermPositions.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermPositions.cs @@ -169,7 +169,7 @@ protected internal override void SkipProx(long proxPointer, int payloadLength) private void SkipPositions(int n) { - Debugging.Assert(() => m_indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m_indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); for (int f = n; f > 0; f--) // skip unread positions { ReadDeltaPosition(); diff --git a/src/Lucene.Net/Codecs/Lucene3x/TermBuffer.cs b/src/Lucene.Net/Codecs/Lucene3x/TermBuffer.cs index 045c149608..de422f4775 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/TermBuffer.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/TermBuffer.cs @@ -70,7 +70,7 @@ public void Read(IndexInput input, FieldInfos fieldInfos) newSuffixStart = input.ReadVInt32(); int length = input.ReadVInt32(); int totalLength = newSuffixStart + length; - Debugging.Assert(() => totalLength <= ByteBlockPool.BYTE_BLOCK_SIZE - 2, () => "termLength=" + totalLength + ",resource=" + input); + if (Debugging.AssertsEnabled) Debugging.Assert(() => totalLength <= ByteBlockPool.BYTE_BLOCK_SIZE - 2, () => "termLength=" + totalLength + ",resource=" + input); if (bytes.Bytes.Length < totalLength) { bytes.Grow(totalLength); @@ -88,14 +88,14 @@ public void Read(IndexInput input, FieldInfos fieldInfos) } else { - Debugging.Assert(() => fieldInfos.FieldInfo(currentFieldNumber) != null, currentFieldNumber.ToString); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldInfos.FieldInfo(currentFieldNumber) != null, currentFieldNumber.ToString); field = fieldInfos.FieldInfo(currentFieldNumber).Name.Intern(); } } else { - Debugging.Assert(() => field.Equals(fieldInfos.FieldInfo(fieldNumber).Name, StringComparison.Ordinal), + if (Debugging.AssertsEnabled) Debugging.Assert(() => field.Equals(fieldInfos.FieldInfo(fieldNumber).Name, StringComparison.Ordinal), () => "currentFieldNumber=" + currentFieldNumber + " field=" + field + " vs " + fieldInfos.FieldInfo(fieldNumber) == null ? "null" : fieldInfos.FieldInfo(fieldNumber).Name); diff --git a/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs b/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs index 82adf13cdd..fd148496ac 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs @@ -65,7 +65,7 @@ public sealed class TermInfoAndOrd : TermInfo public TermInfoAndOrd(TermInfo ti, long termOrd) : base(ti) { - Debugging.Assert(() => termOrd >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termOrd >= 0); this.termOrd = termOrd; } } @@ -291,7 +291,7 @@ internal TermInfo SeekEnum(SegmentTermEnum enumerator, Term term, TermInfoAndOrd termsCache.Put(new CloneableTerm(DeepCopyOf(term)), new TermInfoAndOrd(ti, enumerator.position)); } } - else + else if (Debugging.AssertsEnabled) { Debugging.Assert(() => SameTermInfo(ti, tiOrd, enumerator)); Debugging.Assert(() => (int)enumerator.position == tiOrd.termOrd); @@ -333,7 +333,7 @@ internal TermInfo SeekEnum(SegmentTermEnum enumerator, Term term, TermInfoAndOrd termsCache.Put(new CloneableTerm(DeepCopyOf(term)), new TermInfoAndOrd(ti_, enumerator.position)); } } - else + else if (Debugging.AssertsEnabled) { Debugging.Assert(() => SameTermInfo(ti_, tiOrd, enumerator)); Debugging.Assert(() => enumerator.position == tiOrd.termOrd); diff --git a/src/Lucene.Net/Codecs/Lucene40/BitVector.cs b/src/Lucene.Net/Codecs/Lucene40/BitVector.cs index f9409ae88f..c68d8e489e 100644 --- a/src/Lucene.Net/Codecs/Lucene40/BitVector.cs +++ b/src/Lucene.Net/Codecs/Lucene40/BitVector.cs @@ -126,7 +126,7 @@ public bool GetAndSet(int bit) if (count != -1) { count++; - Debugging.Assert(() => count <= size); + if (Debugging.AssertsEnabled) Debugging.Assert(() => count <= size); } return false; } @@ -163,7 +163,7 @@ public bool GetAndClear(int bit) if (count != -1) { count--; - Debugging.Assert(() => count >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => count >= 0); } return true; } @@ -175,7 +175,7 @@ public bool GetAndClear(int bit) /// public bool Get(int bit) { - Debugging.Assert(() => bit >= 0 && bit < size, () => "bit " + bit + " is out of bounds 0.." + (size - 1)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bit >= 0 && bit < size, () => "bit " + bit + " is out of bounds 0.." + (size - 1)); return (bits[bit >> 3] & (1 << (bit & 7))) != 0; } @@ -215,7 +215,7 @@ public int Count() // LUCENENET TODO: API - make into a property } count = c; } - Debugging.Assert(() => count <= size, () => "count=" + count + " size=" + size); + if (Debugging.AssertsEnabled) Debugging.Assert(() => count <= size, () => "count=" + count + " size=" + size); return count; } @@ -259,7 +259,7 @@ public int GetRecomputedCount() /// public void Write(Directory d, string name, IOContext context) { - Debugging.Assert(() => !(d is CompoundFileDirectory)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !(d is CompoundFileDirectory)); IndexOutput output = d.CreateOutput(name, context); try { @@ -275,7 +275,7 @@ public void Write(Directory d, string name, IOContext context) WriteBits(output); } CodecUtil.WriteFooter(output); - Debugging.Assert(VerifyCount); + if (Debugging.AssertsEnabled) Debugging.Assert(VerifyCount); } finally { @@ -351,7 +351,7 @@ private void WriteClearedDgaps(IndexOutput output) output.WriteByte(bits[i]); last = i; numCleared -= (8 - BitUtil.BitCount(bits[i])); - Debugging.Assert(() => numCleared >= 0 || (i == (bits.Length - 1) && numCleared == -(8 - (size & 7)))); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numCleared >= 0 || (i == (bits.Length - 1) && numCleared == -(8 - (size & 7)))); } } } @@ -460,7 +460,7 @@ public BitVector(Directory d, string name, IOContext context) CodecUtil.CheckEOF(input); #pragma warning restore 612, 618 } - Debugging.Assert(VerifyCount); + if (Debugging.AssertsEnabled) Debugging.Assert(VerifyCount); } finally { @@ -471,10 +471,10 @@ public BitVector(Directory d, string name, IOContext context) // asserts only private bool VerifyCount() { - Debugging.Assert(() => count != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => count != -1); int countSav = count; count = -1; - Debugging.Assert(() => countSav == Count(), () => "saved count was " + countSav + " but recomputed count is " + count); + if (Debugging.AssertsEnabled) Debugging.Assert(() => countSav == Count(), () => "saved count was " + countSav + " but recomputed count is " + count); return true; } @@ -501,7 +501,7 @@ private void ReadSetDgaps(IndexInput input) last += input.ReadVInt32(); bits[last] = input.ReadByte(); n -= BitUtil.BitCount(bits[last]); - Debugging.Assert(() => n >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => n >= 0); } } @@ -524,7 +524,7 @@ private void ReadClearedDgaps(IndexInput input) last += input.ReadVInt32(); bits[last] = input.ReadByte(); numCleared -= 8 - BitUtil.BitCount(bits[last]); - Debugging.Assert(() => numCleared >= 0 || (last == (bits.Length - 1) && numCleared == -(8 - (size & 7)))); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numCleared >= 0 || (last == (bits.Length - 1) && numCleared == -(8 - (size & 7)))); } } } diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40LiveDocsFormat.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40LiveDocsFormat.cs index 98dc1a6c2a..f0ef72a5c2 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40LiveDocsFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40LiveDocsFormat.cs @@ -92,8 +92,11 @@ public override IBits ReadLiveDocs(Directory dir, SegmentCommitInfo info, IOCont { string filename = IndexFileNames.FileNameFromGeneration(info.Info.Name, DELETES_EXTENSION, info.DelGen); BitVector liveDocs = new BitVector(dir, filename, context); - Debugging.Assert(() => liveDocs.Count() == info.Info.DocCount - info.DelCount, () => "liveDocs.count()=" + liveDocs.Count() + " info.docCount=" + info.Info.DocCount + " info.getDelCount()=" + info.DelCount); - Debugging.Assert(() => liveDocs.Length == info.Info.DocCount); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => liveDocs.Count() == info.Info.DocCount - info.DelCount, () => "liveDocs.count()=" + liveDocs.Count() + " info.docCount=" + info.Info.DocCount + " info.getDelCount()=" + info.DelCount); + Debugging.Assert(() => liveDocs.Length == info.Info.DocCount); + } return liveDocs; } @@ -102,8 +105,11 @@ public override void WriteLiveDocs(IMutableBits bits, Directory dir, SegmentComm { string filename = IndexFileNames.FileNameFromGeneration(info.Info.Name, DELETES_EXTENSION, info.NextDelGen); BitVector liveDocs = (BitVector)bits; - Debugging.Assert(() => liveDocs.Count() == info.Info.DocCount - info.DelCount - newDelCount); - Debugging.Assert(() => liveDocs.Length == info.Info.DocCount); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => liveDocs.Count() == info.Info.DocCount - info.DelCount - newDelCount); + Debugging.Assert(() => liveDocs.Length == info.Info.DocCount); + } liveDocs.Write(dir, filename, context); } diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsFormat.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsFormat.cs index b19545dc3a..65c6b31471 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsFormat.cs @@ -235,7 +235,7 @@ private Lucene40PostingsFormat(int minBlockSize, int maxBlockSize) : base() { this.m_minBlockSize = minBlockSize; - Debugging.Assert(() => minBlockSize > 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => minBlockSize > 1); this.m_maxBlockSize = maxBlockSize; } diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsReader.cs index c2a5f96a00..b6d75b11d5 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsReader.cs @@ -193,13 +193,13 @@ public override void DecodeTerm(long[] longs, DataInput @in, FieldInfo fieldInfo System.out.println(" freqFP=" + termState2.freqOffset); } */ - Debugging.Assert(() => termState2.freqOffset < freqIn.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termState2.freqOffset < freqIn.Length); if (termState2.DocFreq >= skipMinimum) { termState2.skipOffset = @in.ReadVInt64(); // if (DEBUG) System.out.println(" skipOffset=" + termState2.skipOffset + " vs freqIn.length=" + freqIn.length()); - Debugging.Assert(() => termState2.freqOffset + termState2.skipOffset < freqIn.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termState2.freqOffset + termState2.skipOffset < freqIn.Length); } else { @@ -356,7 +356,7 @@ internal virtual DocsEnum Reset(FieldInfo fieldInfo, StandardTermState termState // cases freqIn.Seek(termState.freqOffset); m_limit = termState.DocFreq; - Debugging.Assert(() => m_limit > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m_limit > 0); m_ord = 0; m_doc = -1; m_accum = 0; @@ -546,7 +546,7 @@ internal AllDocsSegmentDocsEnum(Lucene40PostingsReader outerInstance, IndexInput : base(outerInstance, startFreqIn, null) { this.outerInstance = outerInstance; - Debugging.Assert(() => m_liveDocs == null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m_liveDocs == null); } public override int NextDoc() @@ -639,7 +639,7 @@ internal LiveDocsSegmentDocsEnum(Lucene40PostingsReader outerInstance, IndexInpu : base(outerInstance, startFreqIn, liveDocs) { this.outerInstance = outerInstance; - Debugging.Assert(() => liveDocs != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => liveDocs != null); } public override int NextDoc() @@ -783,8 +783,11 @@ public SegmentDocsAndPositionsEnum(Lucene40PostingsReader outerInstance, IndexIn public SegmentDocsAndPositionsEnum Reset(FieldInfo fieldInfo, StandardTermState termState, IBits liveDocs) { - Debugging.Assert(() => fieldInfo.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); - Debugging.Assert(() => !fieldInfo.HasPayloads); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => fieldInfo.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); + Debugging.Assert(() => !fieldInfo.HasPayloads); + } this.liveDocs = liveDocs; @@ -795,7 +798,7 @@ public SegmentDocsAndPositionsEnum Reset(FieldInfo fieldInfo, StandardTermState lazyProxPointer = termState.proxOffset; limit = termState.DocFreq; - Debugging.Assert(() => limit > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => limit > 0); ord = 0; doc = -1; @@ -930,7 +933,7 @@ public override int NextPosition() posPendingCount--; - Debugging.Assert(() => posPendingCount >= 0, () => "nextPosition() was called too many times (more than freq() times) posPendingCount=" + posPendingCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => posPendingCount >= 0, () => "nextPosition() was called too many times (more than freq() times) posPendingCount=" + posPendingCount); return position; } @@ -1003,8 +1006,11 @@ public virtual SegmentFullPositionsEnum Reset(FieldInfo fieldInfo, StandardTermS { storeOffsets = fieldInfo.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; storePayloads = fieldInfo.HasPayloads; - Debugging.Assert(() => fieldInfo.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0); - Debugging.Assert(() => storePayloads || storeOffsets); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => fieldInfo.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0); + Debugging.Assert(() => storePayloads || storeOffsets); + } if (payload == null) { payload = new BytesRef(); @@ -1160,9 +1166,9 @@ public override int NextPosition() { // new payload length payloadLength = proxIn.ReadVInt32(); - Debugging.Assert(() => payloadLength >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => payloadLength >= 0); } - Debugging.Assert(() => payloadLength != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => payloadLength != -1); } if (storeOffsets) @@ -1200,9 +1206,9 @@ public override int NextPosition() { // new payload length payloadLength = proxIn.ReadVInt32(); - Debugging.Assert(() => payloadLength >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => payloadLength >= 0); } - Debugging.Assert(() => payloadLength != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => payloadLength != -1); payloadPending = true; code_ = (int)((uint)code_ >> 1); @@ -1222,7 +1228,7 @@ public override int NextPosition() posPendingCount--; - Debugging.Assert(() => posPendingCount >= 0, () => "nextPosition() was called too many times (more than freq() times) posPendingCount=" + posPendingCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => posPendingCount >= 0, () => "nextPosition() was called too many times (more than freq() times) posPendingCount=" + posPendingCount); //System.out.println("StandardR.D&PE nextPos return pos=" + position); return position; @@ -1244,8 +1250,11 @@ public override BytesRef GetPayload() { return null; } - Debugging.Assert(() => lazyProxPointer == -1); - Debugging.Assert(() => posPendingCount < freq); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => lazyProxPointer == -1); + Debugging.Assert(() => posPendingCount < freq); + } if (payloadPending) { diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsReader.cs index 7057d681d7..d5d158cd1f 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsReader.cs @@ -92,8 +92,11 @@ public Lucene40StoredFieldsReader(Directory d, SegmentInfo si, FieldInfos fn, IO CodecUtil.CheckHeader(indexStream, Lucene40StoredFieldsWriter.CODEC_NAME_IDX, Lucene40StoredFieldsWriter.VERSION_START, Lucene40StoredFieldsWriter.VERSION_CURRENT); CodecUtil.CheckHeader(fieldsStream, Lucene40StoredFieldsWriter.CODEC_NAME_DAT, Lucene40StoredFieldsWriter.VERSION_START, Lucene40StoredFieldsWriter.VERSION_CURRENT); - Debugging.Assert(() => Lucene40StoredFieldsWriter.HEADER_LENGTH_DAT == fieldsStream.GetFilePointer()); - Debugging.Assert(() => Lucene40StoredFieldsWriter.HEADER_LENGTH_IDX == indexStream.GetFilePointer()); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => Lucene40StoredFieldsWriter.HEADER_LENGTH_DAT == fieldsStream.GetFilePointer()); + Debugging.Assert(() => Lucene40StoredFieldsWriter.HEADER_LENGTH_IDX == indexStream.GetFilePointer()); + } long indexSize = indexStream.Length - Lucene40StoredFieldsWriter.HEADER_LENGTH_IDX; this.size = (int)(indexSize >> 3); // Verify two sources of "maxDoc" agree: @@ -174,7 +177,7 @@ public override void VisitDocument(int n, StoredFieldVisitor visitor) FieldInfo fieldInfo = fieldInfos.FieldInfo(fieldNumber); int bits = fieldsStream.ReadByte() & 0xFF; - Debugging.Assert(() => bits <= (Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_MASK | Lucene40StoredFieldsWriter.FIELD_IS_BINARY), () => "bits=" + bits.ToString("x")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bits <= (Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_MASK | Lucene40StoredFieldsWriter.FIELD_IS_BINARY), () => "bits=" + bits.ToString("x")); switch (visitor.NeedsField(fieldInfo)) { @@ -281,7 +284,7 @@ public IndexInput RawDocs(int[] lengths, int startDocID, int numDocs) { long offset; int docID = startDocID + count + 1; - Debugging.Assert(() => docID <= numTotalDocs); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docID <= numTotalDocs); if (docID < numTotalDocs) { offset = indexStream.ReadInt64(); diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs index 4e39ff108b..d4e1ada0ee 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs @@ -91,7 +91,7 @@ public sealed class Lucene40StoredFieldsWriter : StoredFieldsWriter /// Sole constructor. public Lucene40StoredFieldsWriter(Directory directory, string segment, IOContext context) { - Debugging.Assert(() => directory != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => directory != null); this.directory = directory; this.segment = segment; @@ -103,8 +103,11 @@ public Lucene40StoredFieldsWriter(Directory directory, string segment, IOContext CodecUtil.WriteHeader(fieldsStream, CODEC_NAME_DAT, VERSION_CURRENT); CodecUtil.WriteHeader(indexStream, CODEC_NAME_IDX, VERSION_CURRENT); - Debugging.Assert(() => HEADER_LENGTH_DAT == fieldsStream.GetFilePointer()); - Debugging.Assert(() => HEADER_LENGTH_IDX == indexStream.GetFilePointer()); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => HEADER_LENGTH_DAT == fieldsStream.GetFilePointer()); + Debugging.Assert(() => HEADER_LENGTH_IDX == indexStream.GetFilePointer()); + } success = true; } finally @@ -263,7 +266,7 @@ public void AddRawDocuments(IndexInput stream, int[] lengths, int numDocs) position += lengths[i]; } fieldsStream.CopyBytes(stream, position - start); - Debugging.Assert(() => fieldsStream.GetFilePointer() == position); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldsStream.GetFilePointer() == position); } public override void Finish(FieldInfos fis, int numDocs) @@ -325,7 +328,7 @@ private int CopyFieldsWithDeletions(MergeState mergeState, AtomicReader reader, int docCount = 0; int maxDoc = reader.MaxDoc; IBits liveDocs = reader.LiveDocs; - Debugging.Assert(() => liveDocs != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => liveDocs != null); if (matchingFieldsReader != null) { // We can bulk-copy because the fieldInfos are "congruent" diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs index 8251d2e075..9684342c01 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs @@ -120,16 +120,19 @@ public Lucene40TermVectorsReader(Directory d, SegmentInfo si, FieldInfos fieldIn fn = IndexFileNames.SegmentFileName(segment, "", VECTORS_FIELDS_EXTENSION); tvf = d.OpenInput(fn, context); int tvfVersion = CodecUtil.CheckHeader(tvf, CODEC_NAME_FIELDS, VERSION_START, VERSION_CURRENT); - Debugging.Assert(() => HEADER_LENGTH_INDEX == tvx.GetFilePointer()); - Debugging.Assert(() => HEADER_LENGTH_DOCS == tvd.GetFilePointer()); - Debugging.Assert(() => HEADER_LENGTH_FIELDS == tvf.GetFilePointer()); - Debugging.Assert(() => tvxVersion == tvdVersion); - Debugging.Assert(() => tvxVersion == tvfVersion); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => HEADER_LENGTH_INDEX == tvx.GetFilePointer()); + Debugging.Assert(() => HEADER_LENGTH_DOCS == tvd.GetFilePointer()); + Debugging.Assert(() => HEADER_LENGTH_FIELDS == tvf.GetFilePointer()); + Debugging.Assert(() => tvxVersion == tvdVersion); + Debugging.Assert(() => tvxVersion == tvfVersion); + } numTotalDocs = (int)(tvx.Length - HEADER_LENGTH_INDEX >> 4); this.size = numTotalDocs; - Debugging.Assert(() => size == 0 || numTotalDocs == size); + if (Debugging.AssertsEnabled) Debugging.Assert(() => size == 0 || numTotalDocs == size); this.fieldInfos = fieldInfos; success = true; @@ -200,7 +203,7 @@ internal void RawDocs(int[] tvdLengths, int[] tvfLengths, int startDocID, int nu while (count < numDocs) { int docID = startDocID + count + 1; - Debugging.Assert(() => docID <= numTotalDocs); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docID <= numTotalDocs); if (docID < numTotalDocs) { tvdPosition = tvx.ReadInt64(); @@ -210,7 +213,7 @@ internal void RawDocs(int[] tvdLengths, int[] tvfLengths, int startDocID, int nu { tvdPosition = tvd.Length; tvfPosition = tvf.Length; - Debugging.Assert(() => count == numDocs - 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => count == numDocs - 1); } tvdLengths[count] = (int)(tvdPosition - lastTvdPosition); tvfLengths[count] = (int)(tvfPosition - lastTvfPosition); @@ -248,7 +251,7 @@ public TVFields(Lucene40TermVectorsReader outerInstance, int docID) outerInstance.tvd.Seek(outerInstance.tvx.ReadInt64()); int fieldCount = outerInstance.tvd.ReadVInt32(); - Debugging.Assert(() => fieldCount >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldCount >= 0); if (fieldCount != 0) { fieldNumbers = new int[fieldCount]; @@ -518,7 +521,7 @@ public override BytesRef Next() } payloadOffsets[posUpto] = totalPayloadLength; totalPayloadLength += lastPayloadLength; - Debugging.Assert(() => totalPayloadLength >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => totalPayloadLength >= 0); } payloadData = new byte[totalPayloadLength]; tvf.ReadBytes(payloadData, 0, payloadData.Length); @@ -668,7 +671,7 @@ public override int Freq } else { - Debugging.Assert(() => startOffsets != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => startOffsets != null); return startOffsets.Length; } } @@ -730,7 +733,7 @@ public override BytesRef GetPayload() public override int NextPosition() { - Debugging.Assert(() => (positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length); if (positions != null) { diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs index 0a6e736377..dc72a6b78d 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs @@ -78,9 +78,12 @@ public Lucene40TermVectorsWriter(Directory directory, string segment, IOContext CodecUtil.WriteHeader(tvd, Lucene40TermVectorsReader.CODEC_NAME_DOCS, Lucene40TermVectorsReader.VERSION_CURRENT); tvf = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene40TermVectorsReader.VECTORS_FIELDS_EXTENSION), context); CodecUtil.WriteHeader(tvf, Lucene40TermVectorsReader.CODEC_NAME_FIELDS, Lucene40TermVectorsReader.VERSION_CURRENT); - Debugging.Assert(() => Lucene40TermVectorsReader.HEADER_LENGTH_INDEX == tvx.GetFilePointer()); - Debugging.Assert(() => Lucene40TermVectorsReader.HEADER_LENGTH_DOCS == tvd.GetFilePointer()); - Debugging.Assert(() => Lucene40TermVectorsReader.HEADER_LENGTH_FIELDS == tvf.GetFilePointer()); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => Lucene40TermVectorsReader.HEADER_LENGTH_INDEX == tvx.GetFilePointer()); + Debugging.Assert(() => Lucene40TermVectorsReader.HEADER_LENGTH_DOCS == tvd.GetFilePointer()); + Debugging.Assert(() => Lucene40TermVectorsReader.HEADER_LENGTH_FIELDS == tvf.GetFilePointer()); + } success = true; } finally @@ -110,7 +113,7 @@ public override void StartDocument(int numVectorFields) public override void StartField(FieldInfo info, int numTerms, bool positions, bool offsets, bool payloads) { - Debugging.Assert(() => lastFieldName == null || info.Name.CompareToOrdinal(lastFieldName) > 0, () => "fieldName=" + info.Name + " lastFieldName=" + lastFieldName); + if (Debugging.AssertsEnabled) Debugging.Assert(() => lastFieldName == null || info.Name.CompareToOrdinal(lastFieldName) > 0, () => "fieldName=" + info.Name + " lastFieldName=" + lastFieldName); lastFieldName = info.Name; this.positions = positions; this.offsets = offsets; @@ -139,7 +142,7 @@ public override void StartField(FieldInfo info, int numTerms, bool positions, bo [MethodImpl(MethodImplOptions.NoInlining)] public override void FinishDocument() { - Debugging.Assert(() => fieldCount == numVectorFields); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldCount == numVectorFields); for (int i = 1; i < fieldCount; i++) { tvd.WriteVInt64(fps[i] - fps[i - 1]); @@ -268,8 +271,11 @@ public override void FinishTerm() if (bufferedIndex > 0) { // dump buffer - Debugging.Assert(() => positions && (offsets || payloads)); - Debugging.Assert(() => bufferedIndex == bufferedFreq); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => positions && (offsets || payloads)); + Debugging.Assert(() => bufferedIndex == bufferedFreq); + } if (payloads) { tvf.WriteBytes(payloadData.Bytes, payloadData.Offset, payloadData.Length); @@ -357,8 +363,11 @@ private void AddRawDocuments(Lucene40TermVectorsReader reader, int[] tvdLengths, } tvd.CopyBytes(reader.TvdStream, tvdPosition - tvdStart); tvf.CopyBytes(reader.TvfStream, tvfPosition - tvfStart); - Debugging.Assert(() => tvd.GetFilePointer() == tvdPosition); - Debugging.Assert(() => tvf.GetFilePointer() == tvfPosition); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => tvd.GetFilePointer() == tvdPosition); + Debugging.Assert(() => tvf.GetFilePointer() == tvfPosition); + } } [MethodImpl(MethodImplOptions.NoInlining)] diff --git a/src/Lucene.Net/Codecs/Lucene41/ForUtil.cs b/src/Lucene.Net/Codecs/Lucene41/ForUtil.cs index cb39d5e34b..871c2b06c7 100644 --- a/src/Lucene.Net/Codecs/Lucene41/ForUtil.cs +++ b/src/Lucene.Net/Codecs/Lucene41/ForUtil.cs @@ -88,7 +88,7 @@ private static int ComputeIterations(PackedInt32s.IDecoder decoder) private static int EncodedSize(PackedInt32s.Format format, int packedIntsVersion, int bitsPerValue) { long byteCount = format.ByteCount(packedIntsVersion, Lucene41PostingsFormat.BLOCK_SIZE, bitsPerValue); - Debugging.Assert(() => byteCount >= 0 && byteCount <= int.MaxValue, byteCount.ToString); + if (Debugging.AssertsEnabled) Debugging.Assert(() => byteCount >= 0 && byteCount <= int.MaxValue, byteCount.ToString); return (int)byteCount; } @@ -111,8 +111,11 @@ internal ForUtil(float acceptableOverheadRatio, DataOutput @out) for (int bpv = 1; bpv <= 32; ++bpv) { PackedInt32s.FormatAndBits formatAndBits = PackedInt32s.FastestFormatAndBits(Lucene41PostingsFormat.BLOCK_SIZE, bpv, acceptableOverheadRatio); - Debugging.Assert(() => formatAndBits.Format.IsSupported(formatAndBits.BitsPerValue)); - Debugging.Assert(() => formatAndBits.BitsPerValue <= 32); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => formatAndBits.Format.IsSupported(formatAndBits.BitsPerValue)); + Debugging.Assert(() => formatAndBits.BitsPerValue <= 32); + } encodedSizes[bpv] = EncodedSize(formatAndBits.Format, PackedInt32s.VERSION_CURRENT, formatAndBits.BitsPerValue); encoders[bpv] = PackedInt32s.GetEncoder(formatAndBits.Format, PackedInt32s.VERSION_CURRENT, formatAndBits.BitsPerValue); decoders[bpv] = PackedInt32s.GetDecoder(formatAndBits.Format, PackedInt32s.VERSION_CURRENT, formatAndBits.BitsPerValue); @@ -141,7 +144,7 @@ internal ForUtil(DataInput @in) var bitsPerValue = (code & 31) + 1; PackedInt32s.Format format = PackedInt32s.Format.ById(formatId); - Debugging.Assert(() => format.IsSupported(bitsPerValue)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => format.IsSupported(bitsPerValue)); encodedSizes[bpv] = EncodedSize(format, packedIntsVersion, bitsPerValue); encoders[bpv] = PackedInt32s.GetEncoder(format, packedIntsVersion, bitsPerValue); decoders[bpv] = PackedInt32s.GetDecoder(format, packedIntsVersion, bitsPerValue); @@ -166,12 +169,12 @@ internal void WriteBlock(int[] data, byte[] encoded, IndexOutput @out) } int numBits = BitsRequired(data); - Debugging.Assert(() => numBits > 0 && numBits <= 32, numBits.ToString); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numBits > 0 && numBits <= 32, numBits.ToString); PackedInt32s.IEncoder encoder = encoders[numBits]; int iters = iterations[numBits]; - Debugging.Assert(() => iters * encoder.ByteValueCount >= Lucene41PostingsFormat.BLOCK_SIZE); + if (Debugging.AssertsEnabled) Debugging.Assert(() => iters * encoder.ByteValueCount >= Lucene41PostingsFormat.BLOCK_SIZE); int encodedSize = encodedSizes[numBits]; - Debugging.Assert(() => iters * encoder.ByteBlockCount >= encodedSize); + if (Debugging.AssertsEnabled) Debugging.Assert(() => iters * encoder.ByteBlockCount >= encodedSize); @out.WriteByte((byte)numBits); @@ -189,7 +192,7 @@ internal void WriteBlock(int[] data, byte[] encoded, IndexOutput @out) internal void ReadBlock(IndexInput @in, byte[] encoded, int[] decoded) { int numBits = @in.ReadByte(); - Debugging.Assert(() => numBits <= 32, numBits.ToString); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numBits <= 32, numBits.ToString); if (numBits == ALL_VALUES_EQUAL) { @@ -203,7 +206,7 @@ internal void ReadBlock(IndexInput @in, byte[] encoded, int[] decoded) PackedInt32s.IDecoder decoder = decoders[numBits]; int iters = iterations[numBits]; - Debugging.Assert(() => iters * decoder.ByteValueCount >= Lucene41PostingsFormat.BLOCK_SIZE); + if (Debugging.AssertsEnabled) Debugging.Assert(() => iters * decoder.ByteValueCount >= Lucene41PostingsFormat.BLOCK_SIZE); decoder.Decode(encoded, 0, decoded, 0, iters); } @@ -221,7 +224,7 @@ internal void SkipBlock(IndexInput @in) @in.ReadVInt32(); return; } - Debugging.Assert(() => numBits > 0 && numBits <= 32, numBits.ToString); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numBits > 0 && numBits <= 32, numBits.ToString); int encodedSize = encodedSizes[numBits]; @in.Seek(@in.GetFilePointer() + encodedSize); } @@ -248,7 +251,7 @@ private static int BitsRequired(int[] data) long or = 0; for (int i = 0; i < Lucene41PostingsFormat.BLOCK_SIZE; ++i) { - Debugging.Assert(() => data[i] >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => data[i] >= 0); or |= (uint)data[i]; } return PackedInt32s.BitsRequired(or); diff --git a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsFormat.cs b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsFormat.cs index 02115c90b5..f589769535 100644 --- a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsFormat.cs @@ -392,9 +392,9 @@ public Lucene41PostingsFormat(int minTermBlockSize, int maxTermBlockSize) : base() { this.minTermBlockSize = minTermBlockSize; - Debugging.Assert(() => minTermBlockSize > 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => minTermBlockSize > 1); this.maxTermBlockSize = maxTermBlockSize; - Debugging.Assert(() => minTermBlockSize <= maxTermBlockSize); + if (Debugging.AssertsEnabled) Debugging.Assert(() => minTermBlockSize <= maxTermBlockSize); } public override string ToString() diff --git a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsReader.cs b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsReader.cs index 0ef9de60ab..52c0859822 100644 --- a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsReader.cs @@ -402,7 +402,7 @@ public DocsEnum Reset(IBits liveDocs, Lucene41PostingsWriter.Int32BlockTermState private void RefillDocs() { int left = docFreq - docUpto; - Debugging.Assert(() => left > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => left > 0); if (left >= Lucene41PostingsFormat.BLOCK_SIZE) { @@ -511,7 +511,7 @@ public override int Advance(int target) if (!skipped) { - Debugging.Assert(() => skipOffset != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => skipOffset != -1); // this is the first time this enum has skipped // since reset() was called; load the skip data: skipper.Init(docTermStartFP + skipOffset, docTermStartFP, 0, 0, docFreq); @@ -528,7 +528,7 @@ public override int Advance(int target) // if (DEBUG) { // System.out.println("skipper moved to docUpto=" + newDocUpto + " vs current=" + docUpto + "; docID=" + skipper.getDoc() + " fp=" + skipper.getDocPointer()); // } - Debugging.Assert(() => newDocUpto % Lucene41PostingsFormat.BLOCK_SIZE == 0, () => "got " + newDocUpto); + if (Debugging.AssertsEnabled) Debugging.Assert(() => newDocUpto % Lucene41PostingsFormat.BLOCK_SIZE == 0, () => "got " + newDocUpto); docUpto = newDocUpto; // Force to read next block @@ -731,7 +731,7 @@ public DocsAndPositionsEnum Reset(IBits liveDocs, Lucene41PostingsWriter.Int32Bl private void RefillDocs() { int left = docFreq - docUpto; - Debugging.Assert(() => left > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => left > 0); if (left >= Lucene41PostingsFormat.BLOCK_SIZE) { @@ -875,7 +875,7 @@ public override int Advance(int target) if (!skipped) { - Debugging.Assert(() => skipOffset != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => skipOffset != -1); // this is the first time this enum has skipped // since reset() was called; load the skip data: // if (DEBUG) { @@ -894,7 +894,7 @@ public override int Advance(int target) // System.out.println(" skipper moved to docUpto=" + newDocUpto + " vs current=" + docUpto + "; docID=" + skipper.getDoc() + " fp=" + skipper.getDocPointer() + " pos.fp=" + skipper.getPosPointer() + " pos.bufferUpto=" + skipper.getPosBufferUpto()); // } - Debugging.Assert(() => newDocUpto % Lucene41PostingsFormat.BLOCK_SIZE == 0, () => "got " + newDocUpto); + if (Debugging.AssertsEnabled) Debugging.Assert(() => newDocUpto % Lucene41PostingsFormat.BLOCK_SIZE == 0, () => "got " + newDocUpto); docUpto = newDocUpto; // Force to read next block @@ -983,7 +983,7 @@ private void SkipPositions() // if (DEBUG) { // System.out.println(" skip whole block @ fp=" + posIn.getFilePointer()); // } - Debugging.Assert(() => posIn.GetFilePointer() != lastPosBlockFP); + if (Debugging.AssertsEnabled) Debugging.Assert(() => posIn.GetFilePointer() != lastPosBlockFP); outerInstance.forUtil.SkipBlock(posIn); toSkip -= Lucene41PostingsFormat.BLOCK_SIZE; } @@ -1236,7 +1236,7 @@ public EverythingEnum Reset(IBits liveDocs, Lucene41PostingsWriter.Int32BlockTer private void RefillDocs() { int left = docFreq - docUpto; - Debugging.Assert(() => left > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => left > 0); if (left >= Lucene41PostingsFormat.BLOCK_SIZE) { @@ -1450,7 +1450,7 @@ public override int Advance(int target) if (!skipped) { - Debugging.Assert(() => skipOffset != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => skipOffset != -1); // this is the first time this enum has skipped // since reset() was called; load the skip data: // if (DEBUG) { @@ -1468,7 +1468,7 @@ public override int Advance(int target) // if (DEBUG) { // System.out.println(" skipper moved to docUpto=" + newDocUpto + " vs current=" + docUpto + "; docID=" + skipper.getDoc() + " fp=" + skipper.getDocPointer() + " pos.fp=" + skipper.getPosPointer() + " pos.bufferUpto=" + skipper.getPosBufferUpto() + " pay.fp=" + skipper.getPayPointer() + " lastStartOffset=" + lastStartOffset); // } - Debugging.Assert(() => newDocUpto % Lucene41PostingsFormat.BLOCK_SIZE == 0, () => "got " + newDocUpto); + if (Debugging.AssertsEnabled) Debugging.Assert(() => newDocUpto % Lucene41PostingsFormat.BLOCK_SIZE == 0, () => "got " + newDocUpto); docUpto = newDocUpto; // Force to read next block @@ -1568,7 +1568,7 @@ private void SkipPositions() // if (DEBUG) { // System.out.println(" skip whole block @ fp=" + posIn.getFilePointer()); // } - Debugging.Assert(() => posIn.GetFilePointer() != lastPosBlockFP); + if (Debugging.AssertsEnabled) Debugging.Assert(() => posIn.GetFilePointer() != lastPosBlockFP); outerInstance.forUtil.SkipBlock(posIn); if (indexHasPayloads) diff --git a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsWriter.cs b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsWriter.cs index a60d951774..305ab0d766 100644 --- a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsWriter.cs +++ b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsWriter.cs @@ -377,8 +377,11 @@ public override void AddPosition(int position, BytesRef payload, int startOffset if (fieldHasOffsets) { - Debugging.Assert(() => startOffset >= lastStartOffset); - Debugging.Assert(() => endOffset >= startOffset); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => startOffset >= lastStartOffset); + Debugging.Assert(() => endOffset >= startOffset); + } offsetStartDeltaBuffer[posBufferUpto] = startOffset - lastStartOffset; offsetLengthBuffer[posBufferUpto] = endOffset - startOffset; lastStartOffset = startOffset; @@ -439,11 +442,11 @@ public override void FinishDoc() public override void FinishTerm(BlockTermState state) { Int32BlockTermState state2 = (Int32BlockTermState)state; - Debugging.Assert(() => state2.DocFreq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state2.DocFreq > 0); // TODO: wasteful we are counting this (counting # docs // for this term) in two places? - Debugging.Assert(() => state2.DocFreq == docCount, () => state2.DocFreq + " vs " + docCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state2.DocFreq == docCount, () => state2.DocFreq + " vs " + docCount); // if (DEBUG) { // System.out.println("FPW.finishTerm docFreq=" + state2.docFreq); @@ -498,7 +501,7 @@ public override void FinishTerm(BlockTermState state) // totalTermFreq is just total number of positions(or payloads, or offsets) // associated with current term. - Debugging.Assert(() => state2.TotalTermFreq != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state2.TotalTermFreq != -1); if (state2.TotalTermFreq > Lucene41PostingsFormat.BLOCK_SIZE) { // record file offset for last pos in last block @@ -576,7 +579,7 @@ public override void FinishTerm(BlockTermState state) if (fieldHasPayloads) { - Debugging.Assert(() => payloadBytesReadUpto == payloadByteUpto); + if (Debugging.AssertsEnabled) Debugging.Assert(() => payloadBytesReadUpto == payloadByteUpto); payloadByteUpto = 0; } } diff --git a/src/Lucene.Net/Codecs/Lucene41/Lucene41SkipReader.cs b/src/Lucene.Net/Codecs/Lucene41/Lucene41SkipReader.cs index 6b459ce7df..16474f8d44 100644 --- a/src/Lucene.Net/Codecs/Lucene41/Lucene41SkipReader.cs +++ b/src/Lucene.Net/Codecs/Lucene41/Lucene41SkipReader.cs @@ -129,7 +129,7 @@ public void Init(long skipPointer, long docBasePointer, long posBasePointer, lon } else { - Debugging.Assert(() => posBasePointer == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => posBasePointer == 0); } } diff --git a/src/Lucene.Net/Codecs/Lucene42/Lucene42NormsConsumer.cs b/src/Lucene.Net/Codecs/Lucene42/Lucene42NormsConsumer.cs index c32c67633b..db8a5f6e1d 100644 --- a/src/Lucene.Net/Codecs/Lucene42/Lucene42NormsConsumer.cs +++ b/src/Lucene.Net/Codecs/Lucene42/Lucene42NormsConsumer.cs @@ -93,7 +93,7 @@ public override void AddNumericField(FieldInfo field, IEnumerable values) long count = 0; foreach (long? nv in values) { - Debugging.Assert(() => nv != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => nv != null); long v = nv.Value; if (gcd != 1) @@ -127,7 +127,7 @@ public override void AddNumericField(FieldInfo field, IEnumerable values) ++count; } - Debugging.Assert(() => count == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => count == maxDoc); } if (uniqueValues != null) diff --git a/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesConsumer.cs b/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesConsumer.cs index 49deb1b413..c8491c3364 100644 --- a/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesConsumer.cs +++ b/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesConsumer.cs @@ -495,13 +495,13 @@ public override void AddSortedSetField(FieldInfo field, IEnumerable va } else { - Debugging.Assert(() => current == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => current == 1); ordsIter.MoveNext(); yield return ordsIter.Current; } } - Debugging.Assert(() => !ordsIter.MoveNext()); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !ordsIter.MoveNext()); } protected override void Dispose(bool disposing) diff --git a/src/Lucene.Net/Codecs/Lucene46/Lucene46FieldInfosWriter.cs b/src/Lucene.Net/Codecs/Lucene46/Lucene46FieldInfosWriter.cs index 52be418aba..5b692e8589 100644 --- a/src/Lucene.Net/Codecs/Lucene46/Lucene46FieldInfosWriter.cs +++ b/src/Lucene.Net/Codecs/Lucene46/Lucene46FieldInfosWriter.cs @@ -73,7 +73,7 @@ public override void Write(Directory directory, string segmentName, string segme if (fi.IsIndexed) { bits |= Lucene46FieldInfosFormat.IS_INDEXED; - Debugging.Assert(() => indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); + if (Debugging.AssertsEnabled) Debugging.Assert(() => indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); if (indexOptions == IndexOptions.DOCS_ONLY) { bits |= Lucene46FieldInfosFormat.OMIT_TERM_FREQ_AND_POSITIONS; @@ -94,7 +94,7 @@ public override void Write(Directory directory, string segmentName, string segme // pack the DV types in one byte var dv = DocValuesByte(fi.DocValuesType); var nrm = DocValuesByte(fi.NormType); - Debugging.Assert(() => (dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0); var val = (byte)(0xff & ((nrm << 4) | (byte)dv)); output.WriteByte(val); output.WriteInt64(fi.DocValuesGen); diff --git a/src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs b/src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs index ecdb5bdc23..4462d2fd74 100644 --- a/src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs +++ b/src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs @@ -106,7 +106,7 @@ public override int NextDoc() current = subs[upto].DocsEnum; currentBase = mergeState.DocBase[reader]; currentMap = mergeState.DocMaps[reader]; - Debugging.Assert(() => currentMap.MaxDoc == subs[upto].Slice.Length, () => "readerIndex=" + reader + " subs.len=" + subs.Length + " len1=" + currentMap.MaxDoc + " vs " + subs[upto].Slice.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentMap.MaxDoc == subs[upto].Slice.Length, () => "readerIndex=" + reader + " subs.len=" + subs.Length + " len1=" + currentMap.MaxDoc + " vs " + subs[upto].Slice.Length); } } diff --git a/src/Lucene.Net/Codecs/MultiLevelSkipListReader.cs b/src/Lucene.Net/Codecs/MultiLevelSkipListReader.cs index c6520340d4..40a675b8fb 100644 --- a/src/Lucene.Net/Codecs/MultiLevelSkipListReader.cs +++ b/src/Lucene.Net/Codecs/MultiLevelSkipListReader.cs @@ -253,7 +253,7 @@ public virtual void Init(long skipPointer, int df) { this.skipPointer[0] = skipPointer; this.docCount = df; - Debugging.Assert(() => skipPointer >= 0 && skipPointer <= skipStream[0].Length, () => "invalid skip pointer: " + skipPointer + ", length=" + skipStream[0].Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => skipPointer >= 0 && skipPointer <= skipStream[0].Length, () => "invalid skip pointer: " + skipPointer + ", length=" + skipStream[0].Length); Array.Clear(m_skipDoc, 0, m_skipDoc.Length); Array.Clear(numSkipped, 0, numSkipped.Length); Array.Clear(childPointer, 0, childPointer.Length); diff --git a/src/Lucene.Net/Codecs/MultiLevelSkipListWriter.cs b/src/Lucene.Net/Codecs/MultiLevelSkipListWriter.cs index beb24e422a..81713c72b8 100644 --- a/src/Lucene.Net/Codecs/MultiLevelSkipListWriter.cs +++ b/src/Lucene.Net/Codecs/MultiLevelSkipListWriter.cs @@ -148,7 +148,7 @@ public virtual void ResetSkip() /// If an I/O error occurs. public virtual void BufferSkip(int df) { - Debugging.Assert(() => df % skipInterval == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => df % skipInterval == 0); int numLevels = 1; df /= skipInterval; diff --git a/src/Lucene.Net/Codecs/PerField/PerFieldDocValuesFormat.cs b/src/Lucene.Net/Codecs/PerField/PerFieldDocValuesFormat.cs index d9904c6ef3..4ef99e0b12 100644 --- a/src/Lucene.Net/Codecs/PerField/PerFieldDocValuesFormat.cs +++ b/src/Lucene.Net/Codecs/PerField/PerFieldDocValuesFormat.cs @@ -155,7 +155,7 @@ internal virtual DocValuesConsumer GetInstance(FieldInfo field) string formatName_ = format.Name; string previousValue = field.PutAttribute(PER_FIELD_FORMAT_KEY, formatName_); - Debugging.Assert(() => field.DocValuesGen != -1 || previousValue == null, () => "formatName=" + formatName_ + " prevValue=" + previousValue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => field.DocValuesGen != -1 || previousValue == null, () => "formatName=" + formatName_ + " prevValue=" + previousValue); int? suffix = null; @@ -199,12 +199,12 @@ internal virtual DocValuesConsumer GetInstance(FieldInfo field) else { // we've already seen this format, so just grab its suffix - Debugging.Assert(() => suffixes.ContainsKey(formatName_)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => suffixes.ContainsKey(formatName_)); suffix = consumer.Suffix; } previousValue = field.PutAttribute(PER_FIELD_SUFFIX_KEY, Convert.ToString(suffix, CultureInfo.InvariantCulture)); - Debugging.Assert(() => field.DocValuesGen != -1 || previousValue == null, () => "suffix=" + Convert.ToString(suffix, CultureInfo.InvariantCulture) + " prevValue=" + previousValue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => field.DocValuesGen != -1 || previousValue == null, () => "suffix=" + Convert.ToString(suffix, CultureInfo.InvariantCulture) + " prevValue=" + previousValue); // TODO: we should only provide the "slice" of FIS // that this DVF actually sees ... @@ -265,7 +265,7 @@ public FieldsReader(PerFieldDocValuesFormat outerInstance, SegmentReadState read { // null formatName means the field is in fieldInfos, but has no docvalues! string suffix = fi.GetAttribute(PER_FIELD_SUFFIX_KEY); - Debugging.Assert(() => suffix != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => suffix != null); DocValuesFormat format = DocValuesFormat.ForName(formatName); string segmentSuffix = GetFullSegmentSuffix(readState.SegmentSuffix, GetSuffix(formatName, suffix)); // LUCENENET: Eliminated extra lookup by using TryGetValue instead of ContainsKey @@ -306,7 +306,7 @@ internal FieldsReader(PerFieldDocValuesFormat outerInstance, FieldsReader other) { DocValuesProducer producer; oldToNew.TryGetValue(ent.Value, out producer); - Debugging.Assert(() => producer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => producer != null); fields[ent.Key] = producer; } } diff --git a/src/Lucene.Net/Codecs/PerField/PerFieldPostingsFormat.cs b/src/Lucene.Net/Codecs/PerField/PerFieldPostingsFormat.cs index e5dbc96318..4f3a7ea473 100644 --- a/src/Lucene.Net/Codecs/PerField/PerFieldPostingsFormat.cs +++ b/src/Lucene.Net/Codecs/PerField/PerFieldPostingsFormat.cs @@ -116,7 +116,7 @@ public override TermsConsumer AddField(FieldInfo field) string formatName = format.Name; string previousValue = field.PutAttribute(PER_FIELD_FORMAT_KEY, formatName); - Debugging.Assert(() => previousValue == null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => previousValue == null); int? suffix; @@ -147,12 +147,12 @@ public override TermsConsumer AddField(FieldInfo field) else { // we've already seen this format, so just grab its suffix - Debugging.Assert(() => suffixes.ContainsKey(formatName)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => suffixes.ContainsKey(formatName)); suffix = consumer.Suffix; } previousValue = field.PutAttribute(PER_FIELD_SUFFIX_KEY, Convert.ToString(suffix, CultureInfo.InvariantCulture)); - Debugging.Assert(() => previousValue == null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => previousValue == null); // TODO: we should only provide the "slice" of FIS // that this PF actually sees ... then stuff like @@ -219,7 +219,7 @@ public FieldsReader(PerFieldPostingsFormat outerInstance, SegmentReadState readS { // null formatName means the field is in fieldInfos, but has no postings! string suffix = fi.GetAttribute(PER_FIELD_SUFFIX_KEY); - Debugging.Assert(() => suffix != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => suffix != null); PostingsFormat format = PostingsFormat.ForName(formatName); string segmentSuffix = GetSuffix(formatName, suffix); // LUCENENET: Eliminated extra lookup by using TryGetValue instead of ContainsKey diff --git a/src/Lucene.Net/Codecs/PostingsConsumer.cs b/src/Lucene.Net/Codecs/PostingsConsumer.cs index aff9f0e810..3213742a16 100644 --- a/src/Lucene.Net/Codecs/PostingsConsumer.cs +++ b/src/Lucene.Net/Codecs/PostingsConsumer.cs @@ -151,7 +151,7 @@ public virtual TermStats Merge(MergeState mergeState, IndexOptions indexOptions, } else { - Debugging.Assert(() => indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); + if (Debugging.AssertsEnabled) Debugging.Assert(() => indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); var postingsEnum = (DocsAndPositionsEnum)postings; while (true) { diff --git a/src/Lucene.Net/Codecs/TermVectorsWriter.cs b/src/Lucene.Net/Codecs/TermVectorsWriter.cs index 028e99ddbe..0f1ac1131e 100644 --- a/src/Lucene.Net/Codecs/TermVectorsWriter.cs +++ b/src/Lucene.Net/Codecs/TermVectorsWriter.cs @@ -286,7 +286,7 @@ protected void AddAllDocVectors(Fields vectors, MergeState mergeState) fieldCount++; FieldInfo fieldInfo = mergeState.FieldInfos.FieldInfo(fieldName); - Debugging.Assert(() => lastFieldName == null || fieldName.CompareToOrdinal(lastFieldName) > 0, () => "lastFieldName=" + lastFieldName + " fieldName=" + fieldName); + if (Debugging.AssertsEnabled) Debugging.Assert(() => lastFieldName == null || fieldName.CompareToOrdinal(lastFieldName) > 0, () => "lastFieldName=" + lastFieldName + " fieldName=" + fieldName); lastFieldName = fieldName; Terms terms = vectors.GetTerms(fieldName); @@ -299,7 +299,7 @@ protected void AddAllDocVectors(Fields vectors, MergeState mergeState) bool hasPositions = terms.HasPositions; bool hasOffsets = terms.HasOffsets; bool hasPayloads = terms.HasPayloads; - Debugging.Assert(() => !hasPayloads || hasPositions); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !hasPayloads || hasPositions); int numTerms = (int)terms.Count; if (numTerms == -1) @@ -328,11 +328,14 @@ protected void AddAllDocVectors(Fields vectors, MergeState mergeState) if (hasPositions || hasOffsets) { docsAndPositionsEnum = termsEnum.DocsAndPositions(null, docsAndPositionsEnum); - Debugging.Assert(() => docsAndPositionsEnum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docsAndPositionsEnum != null); int docID = docsAndPositionsEnum.NextDoc(); - Debugging.Assert(() => docID != DocIdSetIterator.NO_MORE_DOCS); - Debugging.Assert(() => docsAndPositionsEnum.Freq == freq); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => docID != DocIdSetIterator.NO_MORE_DOCS); + Debugging.Assert(() => docsAndPositionsEnum.Freq == freq); + } for (int posUpto = 0; posUpto < freq; posUpto++) { @@ -342,16 +345,16 @@ protected void AddAllDocVectors(Fields vectors, MergeState mergeState) BytesRef payload = docsAndPositionsEnum.GetPayload(); - Debugging.Assert(() => !hasPositions || pos >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !hasPositions || pos >= 0); AddPosition(pos, startOffset, endOffset, payload); } } FinishTerm(); } - Debugging.Assert(() => termCount == numTerms); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termCount == numTerms); FinishField(); } - Debugging.Assert(() => fieldCount == numFields); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldCount == numFields); FinishDocument(); } diff --git a/src/Lucene.Net/Codecs/TermsConsumer.cs b/src/Lucene.Net/Codecs/TermsConsumer.cs index 08f4c4a780..926f92e562 100644 --- a/src/Lucene.Net/Codecs/TermsConsumer.cs +++ b/src/Lucene.Net/Codecs/TermsConsumer.cs @@ -101,7 +101,7 @@ protected internal TermsConsumer() public virtual void Merge(MergeState mergeState, IndexOptions indexOptions, TermsEnum termsEnum) { BytesRef term; - Debugging.Assert(() => termsEnum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termsEnum != null); long sumTotalTermFreq = 0; long sumDocFreq = 0; long sumDFsinceLastAbortCheck = 0; @@ -157,7 +157,7 @@ public virtual void Merge(MergeState mergeState, IndexOptions indexOptions, Term // We can pass null for liveDocs, because the // mapping enum will skip the non-live docs: docsAndFreqsEnumIn = (MultiDocsEnum)termsEnum.Docs(null, docsAndFreqsEnumIn); - Debugging.Assert(() => docsAndFreqsEnumIn != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docsAndFreqsEnumIn != null); docsAndFreqsEnum.Reset(docsAndFreqsEnumIn); PostingsConsumer postingsConsumer = StartTerm(term); TermStats stats = postingsConsumer.Merge(mergeState, indexOptions, docsAndFreqsEnum, visitedDocs); @@ -188,7 +188,7 @@ public virtual void Merge(MergeState mergeState, IndexOptions indexOptions, Term // We can pass null for liveDocs, because the // mapping enum will skip the non-live docs: postingsEnumIn = (MultiDocsAndPositionsEnum)termsEnum.DocsAndPositions(null, postingsEnumIn, DocsAndPositionsFlags.PAYLOADS); - Debugging.Assert(() => postingsEnumIn != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => postingsEnumIn != null); postingsEnum.Reset(postingsEnumIn); PostingsConsumer postingsConsumer = StartTerm(term); @@ -209,7 +209,7 @@ public virtual void Merge(MergeState mergeState, IndexOptions indexOptions, Term } else { - Debugging.Assert(() => indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); + if (Debugging.AssertsEnabled) Debugging.Assert(() => indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); if (postingsEnum == null) { postingsEnum = new MappingMultiDocsAndPositionsEnum(); @@ -221,7 +221,7 @@ public virtual void Merge(MergeState mergeState, IndexOptions indexOptions, Term // We can pass null for liveDocs, because the // mapping enum will skip the non-live docs: postingsEnumIn = (MultiDocsAndPositionsEnum)termsEnum.DocsAndPositions(null, postingsEnumIn); - Debugging.Assert(() => postingsEnumIn != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => postingsEnumIn != null); postingsEnum.Reset(postingsEnumIn); PostingsConsumer postingsConsumer = StartTerm(term); diff --git a/src/Lucene.Net/Index/AtomicReader.cs b/src/Lucene.Net/Index/AtomicReader.cs index 8639d42a53..d890e7f8d8 100644 --- a/src/Lucene.Net/Index/AtomicReader.cs +++ b/src/Lucene.Net/Index/AtomicReader.cs @@ -211,8 +211,11 @@ public Terms GetTerms(string field) // LUCENENET specific: Renamed from Terms() /// public DocsEnum GetTermDocsEnum(Term term) // LUCENENET specific: Renamed from TermDocsEnum() { - Debugging.Assert(() => term.Field != null); - Debugging.Assert(() => term.Bytes != null); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => term.Field != null); + Debugging.Assert(() => term.Bytes != null); + } Fields fields = Fields; if (fields != null) { @@ -236,8 +239,8 @@ public DocsEnum GetTermDocsEnum(Term term) // LUCENENET specific: Renamed from T /// public DocsAndPositionsEnum GetTermPositionsEnum(Term term) // LUCENENET specific: Renamed from TermPositionsEnum() { - Debugging.Assert(() => term.Field != null); - Debugging.Assert(() => term.Bytes != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => term.Field != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => term.Bytes != null); Fields fields = Fields; if (fields != null) { diff --git a/src/Lucene.Net/Index/AtomicReaderContext.cs b/src/Lucene.Net/Index/AtomicReaderContext.cs index ab0a60c479..28caa90224 100644 --- a/src/Lucene.Net/Index/AtomicReaderContext.cs +++ b/src/Lucene.Net/Index/AtomicReaderContext.cs @@ -63,7 +63,7 @@ public override IList Leaves { throw new NotSupportedException("this is not a top-level context."); } - Debugging.Assert(() => leaves != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => leaves != null); return leaves; } } diff --git a/src/Lucene.Net/Index/AutomatonTermsEnum.cs b/src/Lucene.Net/Index/AutomatonTermsEnum.cs index 553e0be2dd..88978fe709 100644 --- a/src/Lucene.Net/Index/AutomatonTermsEnum.cs +++ b/src/Lucene.Net/Index/AutomatonTermsEnum.cs @@ -91,7 +91,7 @@ public AutomatonTermsEnum(TermsEnum tenum, CompiledAutomaton compiled) { this.finite = compiled.Finite; this.runAutomaton = compiled.RunAutomaton; - Debugging.Assert(() => this.runAutomaton != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => this.runAutomaton != null); this.commonSuffixRef = compiled.CommonSuffixRef; this.allTransitions = compiled.SortedTransitions; @@ -129,7 +129,7 @@ protected override BytesRef NextSeekTerm(BytesRef term) //System.out.println("ATE.nextSeekTerm term=" + term); if (term == null) { - Debugging.Assert(() => seekBytesRef.Length == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => seekBytesRef.Length == 0); // return the empty term, as its valid if (runAutomaton.IsAccept(runAutomaton.InitialState)) { @@ -159,14 +159,14 @@ protected override BytesRef NextSeekTerm(BytesRef term) /// private void SetLinear(int position) { - Debugging.Assert(() => linear == false); + if (Debugging.AssertsEnabled) Debugging.Assert(() => linear == false); int state = runAutomaton.InitialState; int maxInterval = 0xff; for (int i = 0; i < position; i++) { state = runAutomaton.Step(state, seekBytesRef.Bytes[i] & 0xff); - Debugging.Assert(() => state >= 0, () => "state=" + state); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state >= 0, () => "state=" + state); } for (int i = 0; i < allTransitions[state].Length; i++) { diff --git a/src/Lucene.Net/Index/BitsSlice.cs b/src/Lucene.Net/Index/BitsSlice.cs index 965b23432e..1b3caa3956 100644 --- a/src/Lucene.Net/Index/BitsSlice.cs +++ b/src/Lucene.Net/Index/BitsSlice.cs @@ -40,7 +40,7 @@ public BitsSlice(IBits parent, ReaderSlice slice) this.parent = parent; this.start = slice.Start; this.length = slice.Length; - Debugging.Assert(() => length >= 0, () => "length=" + length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => length >= 0, () => "length=" + length); } public bool Get(int doc) @@ -49,7 +49,7 @@ public bool Get(int doc) { throw new Exception("doc " + doc + " is out of bounds 0 .. " + (length - 1)); } - Debugging.Assert(() => doc < length, () => "doc=" + doc + " length=" + length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => doc < length, () => "doc=" + doc + " length=" + length); return parent.Get(doc + start); } diff --git a/src/Lucene.Net/Index/BufferedUpdatesStream.cs b/src/Lucene.Net/Index/BufferedUpdatesStream.cs index f3eedccbb1..58acdd26e2 100644 --- a/src/Lucene.Net/Index/BufferedUpdatesStream.cs +++ b/src/Lucene.Net/Index/BufferedUpdatesStream.cs @@ -88,10 +88,13 @@ public virtual long Push(FrozenBufferedUpdates packet) * since deletes are applied to the wrong segments. */ packet.DelGen = nextGen++; - Debugging.Assert(packet.Any); - Debugging.Assert(CheckDeleteStats); - Debugging.Assert(() => packet.DelGen < nextGen); - Debugging.Assert(() => updates.Count == 0 || updates[updates.Count - 1].DelGen < packet.DelGen, () => "Delete packets must be in order"); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(packet.Any); + Debugging.Assert(CheckDeleteStats); + Debugging.Assert(() => packet.DelGen < nextGen); + Debugging.Assert(() => updates.Count == 0 || updates[updates.Count - 1].DelGen < packet.DelGen, () => "Delete packets must be in order"); + } updates.Add(packet); numTerms.AddAndGet(packet.numTermDeletes); bytesUsed.AddAndGet(packet.bytesUsed); @@ -99,7 +102,7 @@ public virtual long Push(FrozenBufferedUpdates packet) { infoStream.Message("BD", "push deletes " + packet + " delGen=" + packet.DelGen + " packetCount=" + updates.Count + " totBytesUsed=" + bytesUsed); } - Debugging.Assert(CheckDeleteStats); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckDeleteStats); return packet.DelGen; } } @@ -175,7 +178,7 @@ public virtual ApplyDeletesResult ApplyDeletesAndUpdates(IndexWriter.ReaderPool return new ApplyDeletesResult(false, nextGen++, null); } - Debugging.Assert(CheckDeleteStats); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckDeleteStats); if (!Any()) { @@ -236,11 +239,11 @@ public virtual ApplyDeletesResult ApplyDeletesAndUpdates(IndexWriter.ReaderPool } else if (packet != null && segGen == packet.DelGen) { - Debugging.Assert(() => packet.isSegmentPrivate, () => "Packet and Segments deletegen can only match on a segment private del packet gen=" + segGen); + if (Debugging.AssertsEnabled) Debugging.Assert(() => packet.isSegmentPrivate, () => "Packet and Segments deletegen can only match on a segment private del packet gen=" + segGen); //System.out.println(" eq"); // Lock order: IW -> BD -> RP - Debugging.Assert(() => readerPool.InfoIsLive(info)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => readerPool.InfoIsLive(info)); ReadersAndUpdates rld = readerPool.Get(info, true); SegmentReader reader = rld.GetReader(IOContext.READ); int delCount = 0; @@ -267,7 +270,7 @@ public virtual ApplyDeletesResult ApplyDeletesAndUpdates(IndexWriter.ReaderPool rld.WriteFieldUpdates(info.Info.Dir, dvUpdates); } int fullDelCount = rld.Info.DelCount + rld.PendingDeleteCount; - Debugging.Assert(() => fullDelCount <= rld.Info.Info.DocCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fullDelCount <= rld.Info.Info.DocCount); segAllDeletes = fullDelCount == rld.Info.Info.DocCount; } finally @@ -312,7 +315,7 @@ public virtual ApplyDeletesResult ApplyDeletesAndUpdates(IndexWriter.ReaderPool if (coalescedUpdates != null) { // Lock order: IW -> BD -> RP - Debugging.Assert(() => readerPool.InfoIsLive(info)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => readerPool.InfoIsLive(info)); ReadersAndUpdates rld = readerPool.Get(info, true); SegmentReader reader = rld.GetReader(IOContext.READ); int delCount = 0; @@ -329,7 +332,7 @@ public virtual ApplyDeletesResult ApplyDeletesAndUpdates(IndexWriter.ReaderPool rld.WriteFieldUpdates(info.Info.Dir, dvUpdates); } int fullDelCount = rld.Info.DelCount + rld.PendingDeleteCount; - Debugging.Assert(() => fullDelCount <= rld.Info.Info.DocCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fullDelCount <= rld.Info.Info.DocCount); segAllDeletes = fullDelCount == rld.Info.Info.DocCount; } finally @@ -359,7 +362,7 @@ public virtual ApplyDeletesResult ApplyDeletesAndUpdates(IndexWriter.ReaderPool } } - Debugging.Assert(CheckDeleteStats); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckDeleteStats); if (infoStream.IsEnabled("BD")) { infoStream.Message("BD", "applyDeletes took " + (Environment.TickCount - t0) + " msec"); @@ -389,7 +392,7 @@ public virtual void Prune(SegmentInfos segmentInfos) { lock (this) { - Debugging.Assert(CheckDeleteStats); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckDeleteStats); long minGen = long.MaxValue; foreach (SegmentCommitInfo info in segmentInfos.Segments) { @@ -406,15 +409,18 @@ public virtual void Prune(SegmentInfos segmentInfos) if (updates[delIDX].DelGen >= minGen) { Prune(delIDX); - Debugging.Assert(CheckDeleteStats); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckDeleteStats); return; } } // All deletes pruned Prune(limit); - Debugging.Assert(() => !Any()); - Debugging.Assert(CheckDeleteStats); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => !Any()); + Debugging.Assert(CheckDeleteStats); + } } } @@ -432,9 +438,9 @@ private void Prune(int count) { FrozenBufferedUpdates packet = updates[delIDX]; numTerms.AddAndGet(-packet.numTermDeletes); - Debugging.Assert(() => numTerms >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numTerms >= 0); bytesUsed.AddAndGet(-packet.bytesUsed); - Debugging.Assert(() => bytesUsed >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bytesUsed >= 0); } updates.SubList(0, count).Clear(); } @@ -459,7 +465,7 @@ private long ApplyTermDeletes(IEnumerable termsIter, ReadersAndUpdates rld string currentField = null; DocsEnum docs = null; - Debugging.Assert(() => CheckDeleteTerm(null)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => CheckDeleteTerm(null)); bool any = false; @@ -471,7 +477,7 @@ private long ApplyTermDeletes(IEnumerable termsIter, ReadersAndUpdates rld // forwards if (!string.Equals(term.Field, currentField, StringComparison.Ordinal)) { - Debugging.Assert(() => currentField == null || currentField.CompareToOrdinal(term.Field) < 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentField == null || currentField.CompareToOrdinal(term.Field) < 0); currentField = term.Field; Terms terms = fields.GetTerms(currentField); if (terms != null) @@ -488,7 +494,7 @@ private long ApplyTermDeletes(IEnumerable termsIter, ReadersAndUpdates rld { continue; } - Debugging.Assert(() => CheckDeleteTerm(term)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => CheckDeleteTerm(term)); // System.out.println(" term=" + term); @@ -682,7 +688,7 @@ private bool CheckDeleteTerm(Term term) { if (term != null) { - Debugging.Assert(() => lastDeleteTerm == null || term.CompareTo(lastDeleteTerm) > 0, () => "lastTerm=" + lastDeleteTerm + " vs term=" + term); + if (Debugging.AssertsEnabled) Debugging.Assert(() => lastDeleteTerm == null || term.CompareTo(lastDeleteTerm) > 0, () => "lastTerm=" + lastDeleteTerm + " vs term=" + term); } // TODO: we re-use term now in our merged iterable, but we shouldn't clone, instead copy for this assert lastDeleteTerm = term == null ? null : new Term(term.Field, BytesRef.DeepCopyOf(term.Bytes)); @@ -699,8 +705,11 @@ private bool CheckDeleteStats() numTerms2 += packet.numTermDeletes; bytesUsed2 += packet.bytesUsed; } - Debugging.Assert(() => numTerms2 == numTerms, () => "numTerms2=" + numTerms2 + " vs " + numTerms); - Debugging.Assert(() => bytesUsed2 == bytesUsed, () => "bytesUsed2=" + bytesUsed2 + " vs " + bytesUsed); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => numTerms2 == numTerms, () => "numTerms2=" + numTerms2 + " vs " + numTerms); + Debugging.Assert(() => bytesUsed2 == bytesUsed, () => "bytesUsed2=" + bytesUsed2 + " vs " + bytesUsed); + } return true; } } diff --git a/src/Lucene.Net/Index/ByteSliceReader.cs b/src/Lucene.Net/Index/ByteSliceReader.cs index 5380c5d7f3..961f9731f9 100644 --- a/src/Lucene.Net/Index/ByteSliceReader.cs +++ b/src/Lucene.Net/Index/ByteSliceReader.cs @@ -48,9 +48,12 @@ internal ByteSliceReader() { } // LUCENENET specific - made constructor internal public void Init(ByteBlockPool pool, int startIndex, int endIndex) { - Debugging.Assert(() => endIndex - startIndex >= 0); - Debugging.Assert(() => startIndex >= 0); - Debugging.Assert(() => endIndex >= 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => endIndex - startIndex >= 0); + Debugging.Assert(() => startIndex >= 0); + Debugging.Assert(() => endIndex >= 0); + } this.pool = pool; this.EndIndex = endIndex; @@ -76,14 +79,17 @@ public void Init(ByteBlockPool pool, int startIndex, int endIndex) public bool Eof() { - Debugging.Assert(() => upto + BufferOffset <= EndIndex); + if (Debugging.AssertsEnabled) Debugging.Assert(() => upto + BufferOffset <= EndIndex); return upto + BufferOffset == EndIndex; } public override byte ReadByte() { - Debugging.Assert(() => !Eof()); - Debugging.Assert(() => upto <= limit); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => !Eof()); + Debugging.Assert(() => upto <= limit); + } if (upto == limit) { NextSlice(); @@ -98,7 +104,7 @@ public long WriteTo(DataOutput @out) { if (limit + BufferOffset == EndIndex) { - Debugging.Assert(() => EndIndex - BufferOffset >= upto); + if (Debugging.AssertsEnabled) Debugging.Assert(() => EndIndex - BufferOffset >= upto); @out.WriteBytes(buffer, upto, limit - upto); size += limit - upto; break; @@ -131,7 +137,7 @@ public void NextSlice() if (nextIndex + newSize >= EndIndex) { // We are advancing to the final slice - Debugging.Assert(() => EndIndex - nextIndex > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => EndIndex - nextIndex > 0); limit = EndIndex - BufferOffset; } else diff --git a/src/Lucene.Net/Index/ByteSliceWriter.cs b/src/Lucene.Net/Index/ByteSliceWriter.cs index e51789e669..d242d1496e 100644 --- a/src/Lucene.Net/Index/ByteSliceWriter.cs +++ b/src/Lucene.Net/Index/ByteSliceWriter.cs @@ -47,26 +47,26 @@ public ByteSliceWriter(ByteBlockPool pool) public void Init(int address) { slice = pool.Buffers[address >> ByteBlockPool.BYTE_BLOCK_SHIFT]; - Debugging.Assert(() => slice != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => slice != null); upto = address & ByteBlockPool.BYTE_BLOCK_MASK; offset0 = address; - Debugging.Assert(() => upto < slice.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => upto < slice.Length); } /// /// Write byte into byte slice stream public override void WriteByte(byte b) { - Debugging.Assert(() => slice != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => slice != null); if (slice[upto] != 0) { upto = pool.AllocSlice(slice, upto); slice = pool.Buffer; offset0 = pool.ByteOffset; - Debugging.Assert(() => slice != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => slice != null); } slice[upto++] = (byte)b; - Debugging.Assert(() => upto != slice.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => upto != slice.Length); } public override void WriteBytes(byte[] b, int offset, int len) @@ -83,7 +83,7 @@ public override void WriteBytes(byte[] b, int offset, int len) } slice[upto++] = (byte)b[offset++]; - Debugging.Assert(() => upto != slice.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => upto != slice.Length); } } diff --git a/src/Lucene.Net/Index/CheckIndex.cs b/src/Lucene.Net/Index/CheckIndex.cs index e2e30fdf5b..db614b59c7 100644 --- a/src/Lucene.Net/Index/CheckIndex.cs +++ b/src/Lucene.Net/Index/CheckIndex.cs @@ -949,7 +949,7 @@ public static Status.FieldNormStatus TestFieldNorms(AtomicReader reader, TextWri if (info.HasNorms) { #pragma warning disable 612, 618 - Debugging.Assert(() => reader.HasNorms(info.Name)); // deprecated path + if (Debugging.AssertsEnabled) Debugging.Assert(() => reader.HasNorms(info.Name)); // deprecated path #pragma warning restore 612, 618 CheckNorms(info, reader, infoStream); ++status.TotFields; @@ -957,7 +957,7 @@ public static Status.FieldNormStatus TestFieldNorms(AtomicReader reader, TextWri else { #pragma warning disable 612, 618 - Debugging.Assert(() => !reader.HasNorms(info.Name)); // deprecated path + if (Debugging.AssertsEnabled) Debugging.Assert(() => !reader.HasNorms(info.Name)); // deprecated path #pragma warning restore 612, 618 if (reader.GetNormValues(info.Name) != null) { @@ -1102,7 +1102,7 @@ private static Status.TermIndexStatus CheckFields(Fields fields, IBits liveDocs, break; } - Debugging.Assert(term.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(term.IsValid); // make sure terms arrive in order according to // the comp @@ -1238,7 +1238,7 @@ private static Status.TermIndexStatus CheckFields(Fields fields, IBits liveDocs, // LUCENENET specific - restructured to reduce number of checks in production if (!(payload is null)) { - Debugging.Assert(payload.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(payload.IsValid); if (payload.Length < 1) { throw new Exception("term " + term + ": doc " + doc + ": pos " + pos + " payload length is out of bounds " + payload.Length); @@ -1456,7 +1456,7 @@ private static Status.TermIndexStatus CheckFields(Fields fields, IBits liveDocs, if (fieldTerms is BlockTreeTermsReader.FieldReader) { BlockTreeTermsReader.Stats stats = ((BlockTreeTermsReader.FieldReader)fieldTerms).ComputeStats(); - Debugging.Assert(() => stats != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => stats != null); if (status.BlockTreeStats == null) { status.BlockTreeStats = new Dictionary(); @@ -1811,7 +1811,7 @@ private static void CheckBinaryDocValues(string fieldName, AtomicReader reader, for (int i = 0; i < reader.MaxDoc; i++) { dv.Get(i, scratch); - Debugging.Assert(scratch.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(scratch.IsValid); if (docsWithField.Get(i) == false && scratch.Length > 0) { throw new Exception("dv for field: " + fieldName + " is missing but has value=" + scratch + " for doc: " + i); @@ -1862,7 +1862,7 @@ private static void CheckSortedDocValues(string fieldName, AtomicReader reader, for (int i = 0; i <= maxOrd; i++) { dv.LookupOrd(i, scratch); - Debugging.Assert(scratch.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(scratch.IsValid); if (lastValue != null) { if (scratch.CompareTo(lastValue) <= 0) @@ -1954,7 +1954,7 @@ private static void CheckSortedSetDocValues(string fieldName, AtomicReader reade for (long i = 0; i <= maxOrd; i++) { dv.LookupOrd(i, scratch); - Debugging.Assert(scratch.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(scratch.IsValid); if (lastValue != null) { if (scratch.CompareTo(lastValue) <= 0) @@ -2157,25 +2157,25 @@ public static Status.TermVectorStatus TestTermVectors(AtomicReader reader, TextW if (hasProx) { postings = termsEnum.DocsAndPositions(null, postings); - Debugging.Assert(() => postings != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => postings != null); docs = null; } else { docs = termsEnum.Docs(null, docs); - Debugging.Assert(() => docs != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docs != null); postings = null; } DocsEnum docs2; if (hasProx) { - Debugging.Assert(() => postings != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => postings != null); docs2 = postings; } else { - Debugging.Assert(() => docs != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docs != null); docs2 = docs; } @@ -2273,12 +2273,12 @@ public static Status.TermVectorStatus TestTermVectors(AtomicReader reader, TextW if (payload != null) { - Debugging.Assert(() => vectorsHasPayload); + if (Debugging.AssertsEnabled) Debugging.Assert(() => vectorsHasPayload); } if (postingsHasPayload && vectorsHasPayload) { - Debugging.Assert(() => postingsPostings != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => postingsPostings != null); if (payload == null) { @@ -2368,7 +2368,7 @@ public virtual void FixIndex(Status result) //private static bool AssertsOn() //{ - // Debugging.Assert(TestAsserts); + // if (Debugging.AssertsEnabled) Debugging.Assert(TestAsserts); // return assertsOn; //} diff --git a/src/Lucene.Net/Index/CompositeReader.cs b/src/Lucene.Net/Index/CompositeReader.cs index 430efbab54..d22e6c2fcf 100644 --- a/src/Lucene.Net/Index/CompositeReader.cs +++ b/src/Lucene.Net/Index/CompositeReader.cs @@ -83,7 +83,7 @@ public override string ToString() } buffer.Append('('); var subReaders = GetSequentialSubReaders(); - Debugging.Assert(() => subReaders != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => subReaders != null); if (subReaders.Count > 0) { buffer.Append(subReaders[0]); @@ -115,7 +115,7 @@ public override sealed IndexReaderContext Context // lazy init without thread safety for perf reasons: Building the readerContext twice does not hurt! if (readerContext == null) { - Debugging.Assert(() => GetSequentialSubReaders() != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => GetSequentialSubReaders() != null); readerContext = CompositeReaderContext.Create(this); } return readerContext; diff --git a/src/Lucene.Net/Index/CompositeReaderContext.cs b/src/Lucene.Net/Index/CompositeReaderContext.cs index e4188dcd95..18adfac759 100644 --- a/src/Lucene.Net/Index/CompositeReaderContext.cs +++ b/src/Lucene.Net/Index/CompositeReaderContext.cs @@ -70,7 +70,7 @@ public override IList Leaves { throw new NotSupportedException("this is not a top-level context."); } - Debugging.Assert(() => leaves != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => leaves != null); return leaves; } } @@ -126,7 +126,7 @@ internal IndexReaderContext Build(CompositeReaderContext parent, IndexReader rea children[i] = Build(newParent, r, i, newDocBase); newDocBase += r.MaxDoc; } - Debugging.Assert(() => newDocBase == cr.MaxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => newDocBase == cr.MaxDoc); return newParent; } } diff --git a/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs b/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs index c6d0913d06..915e973c62 100644 --- a/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs +++ b/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs @@ -395,7 +395,7 @@ public override void Merge(IndexWriter writer, MergeTrigger trigger, bool newMer { lock (this) { - Debugging.Assert(() => !Monitor.IsEntered(writer)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !Monitor.IsEntered(writer)); this.m_writer = writer; diff --git a/src/Lucene.Net/Index/DirectoryReader.cs b/src/Lucene.Net/Index/DirectoryReader.cs index 4bb5997e4c..759c8f0bce 100644 --- a/src/Lucene.Net/Index/DirectoryReader.cs +++ b/src/Lucene.Net/Index/DirectoryReader.cs @@ -171,7 +171,7 @@ public abstract class DirectoryReader : BaseCompositeReader public static DirectoryReader OpenIfChanged(DirectoryReader oldReader) { DirectoryReader newReader = oldReader.DoOpenIfChanged(); - Debugging.Assert(() => newReader != oldReader); + if (Debugging.AssertsEnabled) Debugging.Assert(() => newReader != oldReader); return newReader; } @@ -184,7 +184,7 @@ public static DirectoryReader OpenIfChanged(DirectoryReader oldReader) public static DirectoryReader OpenIfChanged(DirectoryReader oldReader, IndexCommit commit) { DirectoryReader newReader = oldReader.DoOpenIfChanged(commit); - Debugging.Assert(() => newReader != oldReader); + if (Debugging.AssertsEnabled) Debugging.Assert(() => newReader != oldReader); return newReader; } @@ -251,7 +251,7 @@ public static DirectoryReader OpenIfChanged(DirectoryReader oldReader, IndexComm public static DirectoryReader OpenIfChanged(DirectoryReader oldReader, IndexWriter writer, bool applyAllDeletes) { DirectoryReader newReader = oldReader.DoOpenIfChanged(writer, applyAllDeletes); - Debugging.Assert(() => newReader != oldReader); + if (Debugging.AssertsEnabled) Debugging.Assert(() => newReader != oldReader); return newReader; } diff --git a/src/Lucene.Net/Index/DocFieldProcessor.cs b/src/Lucene.Net/Index/DocFieldProcessor.cs index 177b450185..fa1e820a7d 100644 --- a/src/Lucene.Net/Index/DocFieldProcessor.cs +++ b/src/Lucene.Net/Index/DocFieldProcessor.cs @@ -79,7 +79,7 @@ public override void Flush(SegmentWriteState state) childFields[f.FieldInfo.Name] = f; } - Debugging.Assert(() => fields.Count == totalFieldCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fields.Count == totalFieldCount); storedConsumer.Flush(state); consumer.Flush(childFields, state); @@ -166,14 +166,14 @@ public ICollection Fields() field = field.next; } } - Debugging.Assert(() => fields.Count == totalFieldCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fields.Count == totalFieldCount); return fields; } private void Rehash() { int newHashSize = (fieldHash.Length * 2); - Debugging.Assert(() => newHashSize > fieldHash.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => newHashSize > fieldHash.Length); DocFieldProcessorPerField[] newHashArray = new DocFieldProcessorPerField[newHashSize]; @@ -246,7 +246,7 @@ public override void ProcessDocument(FieldInfos.Builder fieldInfos) // need to addOrUpdate so that FieldInfos can update globalFieldNumbers // with the correct DocValue type (LUCENE-5192) FieldInfo fi = fieldInfos.AddOrUpdate(fieldName, field.IndexableFieldType); - Debugging.Assert(() => fi == fp.fieldInfo, () => "should only have updated an existing FieldInfo instance"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fi == fp.fieldInfo, () => "should only have updated an existing FieldInfo instance"); } if (thisFieldGen != fp.lastGen) diff --git a/src/Lucene.Net/Index/DocTermOrds.cs b/src/Lucene.Net/Index/DocTermOrds.cs index 79121c45d3..68f948b354 100644 --- a/src/Lucene.Net/Index/DocTermOrds.cs +++ b/src/Lucene.Net/Index/DocTermOrds.cs @@ -752,7 +752,7 @@ public OrdWrappedTermsEnum(DocTermOrds outerInstance, AtomicReader reader) this.outerInstance = outerInstance; InitializeInstanceFields(); - Debugging.Assert(() => outerInstance.m_indexedTermsArray != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.m_indexedTermsArray != null); termsEnum = reader.Fields.GetTerms(outerInstance.m_field).GetIterator(null); } @@ -804,10 +804,10 @@ public override SeekStatus SeekCeil(BytesRef target) { // we hit the term exactly... lucky us! TermsEnum.SeekStatus seekStatus = termsEnum.SeekCeil(target); - Debugging.Assert(() => seekStatus == TermsEnum.SeekStatus.FOUND); + if (Debugging.AssertsEnabled) Debugging.Assert(() => seekStatus == TermsEnum.SeekStatus.FOUND); ord = startIdx << outerInstance.indexIntervalBits; SetTerm(); - Debugging.Assert(() => term != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => term != null); return SeekStatus.FOUND; } @@ -818,10 +818,10 @@ public override SeekStatus SeekCeil(BytesRef target) { // our target occurs *before* the first term TermsEnum.SeekStatus seekStatus = termsEnum.SeekCeil(target); - Debugging.Assert(() => seekStatus == TermsEnum.SeekStatus.NOT_FOUND); + if (Debugging.AssertsEnabled) Debugging.Assert(() => seekStatus == TermsEnum.SeekStatus.NOT_FOUND); ord = 0; SetTerm(); - Debugging.Assert(() => term != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => term != null); return SeekStatus.NOT_FOUND; } @@ -837,10 +837,10 @@ public override SeekStatus SeekCeil(BytesRef target) { // seek to the right block TermsEnum.SeekStatus seekStatus = termsEnum.SeekCeil(outerInstance.m_indexedTermsArray[startIdx]); - Debugging.Assert(() => seekStatus == TermsEnum.SeekStatus.FOUND); + if (Debugging.AssertsEnabled) Debugging.Assert(() => seekStatus == TermsEnum.SeekStatus.FOUND); ord = startIdx << outerInstance.indexIntervalBits; SetTerm(); - Debugging.Assert(() => term != null); // should be non-null since it's in the index + if (Debugging.AssertsEnabled) Debugging.Assert(() => term != null); // should be non-null since it's in the index } while (term != null && term.CompareTo(target) < 0) @@ -874,7 +874,7 @@ public override void SeekExact(long targetOrd) ord = idx << outerInstance.indexIntervalBits; delta = (int)(targetOrd - ord); TermsEnum.SeekStatus seekStatus = termsEnum.SeekCeil(@base); - Debugging.Assert(() => seekStatus == TermsEnum.SeekStatus.FOUND); + if (Debugging.AssertsEnabled) Debugging.Assert(() => seekStatus == TermsEnum.SeekStatus.FOUND); } else { @@ -886,14 +886,14 @@ public override void SeekExact(long targetOrd) BytesRef br = termsEnum.Next(); if (br == null) { - Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false); return; } ord++; } SetTerm(); - Debugging.Assert(() => term != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => term != null); } private BytesRef SetTerm() diff --git a/src/Lucene.Net/Index/DocValuesFieldUpdates.cs b/src/Lucene.Net/Index/DocValuesFieldUpdates.cs index 983914a8fd..9f6d5d8ee1 100644 --- a/src/Lucene.Net/Index/DocValuesFieldUpdates.cs +++ b/src/Lucene.Net/Index/DocValuesFieldUpdates.cs @@ -114,14 +114,14 @@ internal virtual DocValuesFieldUpdates NewUpdates(string field, DocValuesFieldUp { case DocValuesFieldUpdatesType.NUMERIC: NumericDocValuesFieldUpdates numericUpdates; - Debugging.Assert(() => !numericDVUpdates.ContainsKey(field)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !numericDVUpdates.ContainsKey(field)); numericUpdates = new NumericDocValuesFieldUpdates(field, maxDoc); numericDVUpdates[field] = numericUpdates; return numericUpdates; case DocValuesFieldUpdatesType.BINARY: BinaryDocValuesFieldUpdates binaryUpdates; - Debugging.Assert(() => !binaryDVUpdates.ContainsKey(field)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !binaryDVUpdates.ContainsKey(field)); binaryUpdates = new BinaryDocValuesFieldUpdates(field, maxDoc); binaryDVUpdates[field] = binaryUpdates; return binaryUpdates; diff --git a/src/Lucene.Net/Index/DocValuesProcessor.cs b/src/Lucene.Net/Index/DocValuesProcessor.cs index 56c6eabbd7..9c9fae887c 100644 --- a/src/Lucene.Net/Index/DocValuesProcessor.cs +++ b/src/Lucene.Net/Index/DocValuesProcessor.cs @@ -82,7 +82,7 @@ public override void AddField(int docID, IIndexableField field, FieldInfo fieldI } else { - Debugging.Assert(() => false, () => "unrecognized DocValues.Type: " + dvType); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "unrecognized DocValues.Type: " + dvType); } } } @@ -219,7 +219,7 @@ private string GetTypeDesc(DocValuesWriter obj) } else { - Debugging.Assert(() => obj is SortedDocValuesWriter); + if (Debugging.AssertsEnabled) Debugging.Assert(() => obj is SortedDocValuesWriter); return "sorted"; } } diff --git a/src/Lucene.Net/Index/DocumentsWriter.cs b/src/Lucene.Net/Index/DocumentsWriter.cs index 4707b40453..5e53b18d5c 100644 --- a/src/Lucene.Net/Index/DocumentsWriter.cs +++ b/src/Lucene.Net/Index/DocumentsWriter.cs @@ -244,7 +244,7 @@ internal void Abort(IndexWriter writer) { lock (this) { - Debugging.Assert(() => !Monitor.IsEntered(writer), () => "IndexWriter lock should never be hold when aborting"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !Monitor.IsEntered(writer), () => "IndexWriter lock should never be hold when aborting"); bool success = false; JCG.HashSet newFilesSet = new JCG.HashSet(); try @@ -287,7 +287,7 @@ internal void LockAndAbortAll(IndexWriter indexWriter) { lock (this) { - Debugging.Assert(() => indexWriter.HoldsFullFlushLock); + if (Debugging.AssertsEnabled) Debugging.Assert(() => indexWriter.HoldsFullFlushLock); if (infoStream.IsEnabled("DW")) { infoStream.Message("DW", "lockAndAbortAll"); @@ -327,7 +327,7 @@ internal void LockAndAbortAll(IndexWriter indexWriter) private void AbortThreadState(ThreadState perThread, ISet newFiles) { - Debugging.Assert(() => perThread.IsHeldByCurrentThread); + if (Debugging.AssertsEnabled) Debugging.Assert(() => perThread.IsHeldByCurrentThread); if (perThread.IsActive) // we might be closed { if (perThread.IsInitialized) @@ -350,7 +350,7 @@ private void AbortThreadState(ThreadState perThread, ISet newFiles) } else { - Debugging.Assert(() => closed); + if (Debugging.AssertsEnabled) Debugging.Assert(() => closed); } } @@ -358,7 +358,7 @@ internal void UnlockAllAfterAbortAll(IndexWriter indexWriter) { lock (this) { - Debugging.Assert(() => indexWriter.HoldsFullFlushLock); + if (Debugging.AssertsEnabled) Debugging.Assert(() => indexWriter.HoldsFullFlushLock); if (infoStream.IsEnabled("DW")) { infoStream.Message("DW", "unlockAll"); @@ -498,10 +498,10 @@ internal bool UpdateDocuments(IEnumerable> docs, An if (!perThread.IsActive) { EnsureOpen(); - Debugging.Assert(() => false, () => "perThread is not active but we are still open"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "perThread is not active but we are still open"); } EnsureInitialized(perThread); - Debugging.Assert(() => perThread.IsInitialized); + if (Debugging.AssertsEnabled) Debugging.Assert(() => perThread.IsInitialized); DocumentsWriterPerThread dwpt = perThread.dwpt; int dwptNumDocs = dwpt.NumDocsInRAM; try @@ -544,10 +544,10 @@ internal bool UpdateDocument(IEnumerable doc, Analyzer analyzer if (!perThread.IsActive) { EnsureOpen(); - Debugging.Assert(() => false, () => "perThread is not active but we are still open"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "perThread is not active but we are still open"); } EnsureInitialized(perThread); - Debugging.Assert(() => perThread.IsInitialized); + if (Debugging.AssertsEnabled) Debugging.Assert(() => perThread.IsInitialized); DocumentsWriterPerThread dwpt = perThread.dwpt; int dwptNumDocs = dwpt.NumDocsInRAM; try @@ -588,7 +588,7 @@ private bool DoFlush(DocumentsWriterPerThread flushingDWPT) SegmentFlushTicket ticket = null; try { - Debugging.Assert(() => currentFullFlushDelQueue == null || flushingDWPT.deleteQueue == currentFullFlushDelQueue, () => "expected: " + currentFullFlushDelQueue + "but was: " + flushingDWPT.deleteQueue + " " + flushControl.IsFullFlush); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentFullFlushDelQueue == null || flushingDWPT.deleteQueue == currentFullFlushDelQueue, () => "expected: " + currentFullFlushDelQueue + "but was: " + flushingDWPT.deleteQueue + " " + flushControl.IsFullFlush); /* * Since with DWPT the flush process is concurrent and several DWPT * could flush at the same time we must maintain the order of the @@ -735,10 +735,13 @@ internal bool FlushAllThreads(IndexWriter indexWriter) * otherwise a new DWPT could sneak into the loop with an already flushing * delete queue */ flushControl.MarkForFullFlush(); // swaps the delQueue synced on FlushControl - Debugging.Assert(() => SetFlushingDeleteQueue(flushingDeleteQueue)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => SetFlushingDeleteQueue(flushingDeleteQueue)); + } + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => currentFullFlushDelQueue != null); + Debugging.Assert(() => currentFullFlushDelQueue != deleteQueue); } - Debugging.Assert(() => currentFullFlushDelQueue != null); - Debugging.Assert(() => currentFullFlushDelQueue != deleteQueue); bool anythingFlushed = false; try @@ -760,11 +763,11 @@ internal bool FlushAllThreads(IndexWriter indexWriter) ticketQueue.AddDeletes(flushingDeleteQueue); } ticketQueue.ForcePurge(indexWriter); - Debugging.Assert(() => !flushingDeleteQueue.AnyChanges() && !ticketQueue.HasTickets); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !flushingDeleteQueue.AnyChanges() && !ticketQueue.HasTickets); } finally { - Debugging.Assert(() => flushingDeleteQueue == currentFullFlushDelQueue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => flushingDeleteQueue == currentFullFlushDelQueue); } return anythingFlushed; } @@ -777,7 +780,7 @@ internal void FinishFullFlush(bool success) { infoStream.Message("DW", Thread.CurrentThread.Name + " finishFullFlush success=" + success); } - Debugging.Assert(() => SetFlushingDeleteQueue(null)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => SetFlushingDeleteQueue(null)); if (success) { // Release the flush lock @@ -810,7 +813,7 @@ internal sealed class ApplyDeletesEvent : IEvent internal ApplyDeletesEvent() { - Debugging.Assert(() => instCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => instCount == 0); instCount++; } @@ -827,7 +830,7 @@ internal sealed class MergePendingEvent : IEvent internal MergePendingEvent() { - Debugging.Assert(() => instCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => instCount == 0); instCount++; } @@ -844,7 +847,7 @@ internal sealed class ForcedPurgeEvent : IEvent internal ForcedPurgeEvent() { - Debugging.Assert(() => instCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => instCount == 0); instCount++; } diff --git a/src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs b/src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs index b79a7be7e2..99afb2039e 100644 --- a/src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs +++ b/src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs @@ -144,7 +144,7 @@ internal void Add(Term term, DeleteSlice slice) * competing updates wins! */ slice.sliceTail = termNode; - Debugging.Assert(() => slice.sliceHead != slice.sliceTail, () => "slice head and tail must differ after add"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => slice.sliceHead != slice.sliceTail, () => "slice head and tail must differ after add"); TryApplyGlobalSlice(); // TODO doing this each time is not necessary maybe // we can do it just every n times or so? } @@ -293,7 +293,7 @@ internal class DeleteSlice internal DeleteSlice(Node currentTail) { - Debugging.Assert(() => currentTail != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentTail != null); /* * Initially this is a 0 length slice pointing to the 'current' tail of * the queue. Once we update the slice we only need to assign the tail and @@ -319,7 +319,7 @@ internal virtual void Apply(BufferedUpdates del, int docIDUpto) do { current = current.next; - Debugging.Assert(() => current != null, () => "slice property violated between the head on the tail must not be a null node"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => current != null, () => "slice property violated between the head on the tail must not be a null node"); current.Apply(del, docIDUpto); // System.out.println(Thread.currentThread().getName() + ": pull " + current + " docIDUpto=" + docIDUpto); } while (current != sliceTail); diff --git a/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs b/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs index 20582bb0a2..32630b3d51 100644 --- a/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs +++ b/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs @@ -152,7 +152,7 @@ private bool AssertMemory() * fail. To prevent this we only assert if the the largest document seen * is smaller than the 1/2 of the maxRamBufferMB */ - Debugging.Assert(() => ram <= expected, () => "actual mem: " + ram + " byte, expected mem: " + expected + if (Debugging.AssertsEnabled) Debugging.Assert(() => ram <= expected, () => "actual mem: " + ram + " byte, expected mem: " + expected + " byte, flush mem: " + flushBytes + ", active mem: " + activeBytes + ", pending DWPT: " + numPending + ", flushing DWPT: " + NumFlushingDWPT + ", blocked DWPT: " + NumBlockedFlushes @@ -179,7 +179,7 @@ private void CommitPerThreadBytes(ThreadState perThread) { activeBytes += delta; } - Debugging.Assert(() => UpdatePeaks(delta)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => UpdatePeaks(delta)); } // only for asserts @@ -239,7 +239,7 @@ internal DocumentsWriterPerThread DoAfterDocument(ThreadState perThread, bool is finally { bool stalled = UpdateStallState(); - Debugging.Assert(() => AssertNumDocsSinceStalled(stalled) && AssertMemory()); + if (Debugging.AssertsEnabled) Debugging.Assert(() => AssertNumDocsSinceStalled(stalled) && AssertMemory()); } } } @@ -268,14 +268,14 @@ internal void DoAfterFlush(DocumentsWriterPerThread dwpt) { lock (this) { - Debugging.Assert(() => flushingWriters.ContainsKey(dwpt)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => flushingWriters.ContainsKey(dwpt)); try { long? bytes = flushingWriters[dwpt]; flushingWriters.Remove(dwpt); flushBytes -= (long)bytes; perThreadPool.Recycle(dwpt); - Debugging.Assert(AssertMemory); + if (Debugging.AssertsEnabled) Debugging.Assert(AssertMemory); } finally { @@ -293,7 +293,7 @@ internal void DoAfterFlush(DocumentsWriterPerThread dwpt) private bool UpdateStallState() { - Debugging.Assert(() => Monitor.IsEntered(this)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Monitor.IsEntered(this)); long limit = StallLimitBytes; /* * we block indexing threads if net byte grows due to slow flushes @@ -338,7 +338,7 @@ public void SetFlushPending(ThreadState perThread) { lock (this) { - Debugging.Assert(() => !perThread.flushPending); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !perThread.flushPending); if (perThread.dwpt.NumDocsInRAM > 0) { perThread.flushPending = true; // write access synced @@ -346,7 +346,7 @@ public void SetFlushPending(ThreadState perThread) flushBytes += bytes; activeBytes -= bytes; numPending++; // write access synced - Debugging.Assert(AssertMemory); + if (Debugging.AssertsEnabled) Debugging.Assert(AssertMemory); } // don't assert on numDocs since we could hit an abort excp. while selecting that dwpt for flushing } } @@ -365,7 +365,7 @@ internal void DoOnAbort(ThreadState state) { activeBytes -= state.bytesUsed; } - Debugging.Assert(AssertMemory); + if (Debugging.AssertsEnabled) Debugging.Assert(AssertMemory); // Take it out of the loop this DWPT is stale perThreadPool.Reset(state, closed); } @@ -380,7 +380,7 @@ internal DocumentsWriterPerThread TryCheckoutForFlush(ThreadState perThread) { lock (this) { - Debugging.Assert(() => perThread.IsHeldByCurrentThread); // LUCENENET specific: Since .NET Core doesn't use unfair locking, we need to ensure the current thread has a lock before calling InternalTryCheckoutForFlush. + if (Debugging.AssertsEnabled) Debugging.Assert(() => perThread.IsHeldByCurrentThread); // LUCENENET specific: Since .NET Core doesn't use unfair locking, we need to ensure the current thread has a lock before calling InternalTryCheckoutForFlush. return perThread.flushPending ? InternalTryCheckOutForFlush(perThread) : null; } } @@ -390,8 +390,11 @@ private void CheckoutAndBlock(ThreadState perThread) perThread.@Lock(); try { - Debugging.Assert(() => perThread.flushPending, () => "can not block non-pending threadstate"); - Debugging.Assert(() => fullFlush, () => "can not block if fullFlush == false"); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => perThread.flushPending, () => "can not block non-pending threadstate"); + Debugging.Assert(() => fullFlush, () => "can not block if fullFlush == false"); + } DocumentsWriterPerThread dwpt; long bytes = perThread.bytesUsed; dwpt = perThreadPool.Reset(perThread, closed); @@ -406,10 +409,13 @@ private void CheckoutAndBlock(ThreadState perThread) private DocumentsWriterPerThread InternalTryCheckOutForFlush(ThreadState perThread) { - // LUCENENET specific - Since we need to mimic the unfair behavior of ReentrantLock, we need to ensure that all threads that enter here hold the lock. - Debugging.Assert(() => perThread.IsHeldByCurrentThread); - Debugging.Assert(() => Monitor.IsEntered(this)); - Debugging.Assert(() => perThread.flushPending); + if (Debugging.AssertsEnabled) + { + // LUCENENET specific - Since we need to mimic the unfair behavior of ReentrantLock, we need to ensure that all threads that enter here hold the lock. + Debugging.Assert(() => perThread.IsHeldByCurrentThread); + Debugging.Assert(() => Monitor.IsEntered(this)); + Debugging.Assert(() => perThread.flushPending); + } try { // LUCENENET specific - We removed the call to perThread.TryLock() and the try-finally below as they are no longer needed. @@ -417,12 +423,12 @@ private DocumentsWriterPerThread InternalTryCheckOutForFlush(ThreadState perThre // We are pending so all memory is already moved to flushBytes if (perThread.IsInitialized) { - Debugging.Assert(() => perThread.IsHeldByCurrentThread); + if (Debugging.AssertsEnabled) Debugging.Assert(() => perThread.IsHeldByCurrentThread); DocumentsWriterPerThread dwpt; long bytes = perThread.bytesUsed; // do that before // replace! dwpt = perThreadPool.Reset(perThread, closed); - Debugging.Assert(() => !flushingWriters.ContainsKey(dwpt), () => "DWPT is already flushing"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !flushingWriters.ContainsKey(dwpt), () => "DWPT is already flushing"); // Record the flushing DWPT to reduce flushBytes in doAfterFlush flushingWriters[dwpt] = bytes; numPending--; // write access synced @@ -617,8 +623,11 @@ internal void MarkForFullFlush() DocumentsWriterDeleteQueue flushingQueue; lock (this) { - Debugging.Assert(() => !fullFlush, () => "called DWFC#markForFullFlush() while full flush is still running"); - Debugging.Assert(() => fullFlushBuffer.Count == 0, () => "full flush buffer should be empty: " + fullFlushBuffer); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => !fullFlush, () => "called DWFC#markForFullFlush() while full flush is still running"); + Debugging.Assert(() => fullFlushBuffer.Count == 0, () => "full flush buffer should be empty: " + fullFlushBuffer); + } fullFlush = true; flushingQueue = documentsWriter.deleteQueue; // Set a new delete queue - all subsequent DWPT will use this queue until @@ -641,7 +650,7 @@ internal void MarkForFullFlush() } continue; } - Debugging.Assert(() => next.dwpt.deleteQueue == flushingQueue || next.dwpt.deleteQueue == documentsWriter.deleteQueue, () => " flushingQueue: " + flushingQueue + " currentqueue: " + documentsWriter.deleteQueue + " perThread queue: " + next.dwpt.deleteQueue + " numDocsInRam: " + next.dwpt.NumDocsInRAM); + if (Debugging.AssertsEnabled) Debugging.Assert(() => next.dwpt.deleteQueue == flushingQueue || next.dwpt.deleteQueue == documentsWriter.deleteQueue, () => " flushingQueue: " + flushingQueue + " currentqueue: " + documentsWriter.deleteQueue + " perThread queue: " + next.dwpt.deleteQueue + " numDocsInRam: " + next.dwpt.NumDocsInRAM); if (next.dwpt.deleteQueue != flushingQueue) { // this one is already a new DWPT @@ -661,7 +670,7 @@ internal void MarkForFullFlush() * a chance that this happens since we marking DWPT for full flush without * blocking indexing.*/ PruneBlockedQueue(flushingQueue); - Debugging.Assert(() => AssertBlockedFlushes(documentsWriter.deleteQueue)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => AssertBlockedFlushes(documentsWriter.deleteQueue)); //FlushQueue.AddAll(FullFlushBuffer); foreach (var dwpt in fullFlushBuffer) { @@ -670,7 +679,7 @@ internal void MarkForFullFlush() fullFlushBuffer.Clear(); UpdateStallState(); } - Debugging.Assert(() => AssertActiveDeleteQueue(documentsWriter.deleteQueue)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => AssertActiveDeleteQueue(documentsWriter.deleteQueue)); } private bool AssertActiveDeleteQueue(DocumentsWriterDeleteQueue queue) @@ -682,7 +691,7 @@ private bool AssertActiveDeleteQueue(DocumentsWriterDeleteQueue queue) next.@Lock(); try { - Debugging.Assert(() => !next.IsInitialized || next.dwpt.deleteQueue == queue, () => "isInitialized: " + next.IsInitialized + " numDocs: " + (next.IsInitialized ? next.dwpt.NumDocsInRAM : 0)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !next.IsInitialized || next.dwpt.deleteQueue == queue, () => "isInitialized: " + next.IsInitialized + " numDocs: " + (next.IsInitialized ? next.dwpt.NumDocsInRAM : 0)); } finally { @@ -701,10 +710,13 @@ internal void AddFlushableState(ThreadState perThread) infoStream.Message("DWFC", "addFlushableState " + perThread.dwpt); } DocumentsWriterPerThread dwpt = perThread.dwpt; - Debugging.Assert(() => perThread.IsHeldByCurrentThread); - Debugging.Assert(() => perThread.IsInitialized); - Debugging.Assert(() => fullFlush); - Debugging.Assert(() => dwpt.deleteQueue != documentsWriter.deleteQueue); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => perThread.IsHeldByCurrentThread); + Debugging.Assert(() => perThread.IsInitialized); + Debugging.Assert(() => fullFlush); + Debugging.Assert(() => dwpt.deleteQueue != documentsWriter.deleteQueue); + } if (dwpt.NumDocsInRAM > 0) { lock (this) @@ -714,8 +726,11 @@ internal void AddFlushableState(ThreadState perThread) SetFlushPending(perThread); } DocumentsWriterPerThread flushingDWPT = InternalTryCheckOutForFlush(perThread); - Debugging.Assert(() => flushingDWPT != null, () => "DWPT must never be null here since we hold the lock and it holds documents"); - Debugging.Assert(() => dwpt == flushingDWPT, () => "flushControl returned different DWPT"); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => flushingDWPT != null, () => "DWPT must never be null here since we hold the lock and it holds documents"); + Debugging.Assert(() => dwpt == flushingDWPT, () => "flushControl returned different DWPT"); + } fullFlushBuffer.Add(flushingDWPT); } } @@ -738,7 +753,7 @@ private void PruneBlockedQueue(DocumentsWriterDeleteQueue flushingQueue) if (blockedFlush.Dwpt.deleteQueue == flushingQueue) { blockedFlushes.Remove(node); - Debugging.Assert(() => !flushingWriters.ContainsKey(blockedFlush.Dwpt), () => "DWPT is already flushing"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !flushingWriters.ContainsKey(blockedFlush.Dwpt), () => "DWPT is already flushing"); // Record the flushing DWPT to reduce flushBytes in doAfterFlush flushingWriters[blockedFlush.Dwpt] = blockedFlush.Bytes; // don't decr pending here - its already done when DWPT is blocked @@ -752,16 +767,19 @@ internal void FinishFullFlush() { lock (this) { - Debugging.Assert(() => fullFlush); - Debugging.Assert(() => flushQueue.Count == 0); - Debugging.Assert(() => flushingWriters.Count == 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => fullFlush); + Debugging.Assert(() => flushQueue.Count == 0); + Debugging.Assert(() => flushingWriters.Count == 0); + } try { if (blockedFlushes.Count > 0) { - Debugging.Assert(() => AssertBlockedFlushes(documentsWriter.deleteQueue)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => AssertBlockedFlushes(documentsWriter.deleteQueue)); PruneBlockedQueue(documentsWriter.deleteQueue); - Debugging.Assert(() => blockedFlushes.Count == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => blockedFlushes.Count == 0); } } finally @@ -776,7 +794,7 @@ internal bool AssertBlockedFlushes(DocumentsWriterDeleteQueue flushingQueue) { foreach (BlockedFlush blockedFlush in blockedFlushes) { - Debugging.Assert(() => blockedFlush.Dwpt.deleteQueue == flushingQueue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => blockedFlush.Dwpt.deleteQueue == flushingQueue); } return true; } diff --git a/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs b/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs index 49f535bb29..4d18732137 100644 --- a/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs +++ b/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs @@ -63,13 +63,13 @@ internal virtual void AddDeletes(DocumentsWriterDeleteQueue deleteQueue) private void IncTickets() { int numTickets = ticketCount.IncrementAndGet(); - Debugging.Assert(() => numTickets > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numTickets > 0); } private void DecTickets() { int numTickets = ticketCount.DecrementAndGet(); - Debugging.Assert(() => numTickets >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numTickets >= 0); } internal virtual SegmentFlushTicket AddFlushTicket(DocumentsWriterPerThread dwpt) @@ -121,14 +121,14 @@ internal virtual bool HasTickets { get { - Debugging.Assert(() => ticketCount >= 0, () => "ticketCount should be >= 0 but was: " + ticketCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ticketCount >= 0, () => "ticketCount should be >= 0 but was: " + ticketCount); return ticketCount != 0; } } private int InnerPurge(IndexWriter writer) { - Debugging.Assert(() => purgeLock.IsHeldByCurrentThread); + if (Debugging.AssertsEnabled) Debugging.Assert(() => purgeLock.IsHeldByCurrentThread); int numPurged = 0; while (true) { @@ -159,7 +159,7 @@ private int InnerPurge(IndexWriter writer) // finally remove the published ticket from the queue FlushTicket poll = queue.Dequeue(); ticketCount.DecrementAndGet(); - Debugging.Assert(() => poll == head); + if (Debugging.AssertsEnabled) Debugging.Assert(() => poll == head); } } } @@ -173,8 +173,11 @@ private int InnerPurge(IndexWriter writer) internal virtual int ForcePurge(IndexWriter writer) { - Debugging.Assert(() => !Monitor.IsEntered(this)); - Debugging.Assert(() => !Monitor.IsEntered(writer)); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => !Monitor.IsEntered(this)); + Debugging.Assert(() => !Monitor.IsEntered(writer)); + } purgeLock.@Lock(); try { @@ -188,8 +191,11 @@ internal virtual int ForcePurge(IndexWriter writer) internal virtual int TryPurge(IndexWriter writer) { - Debugging.Assert(() => !Monitor.IsEntered(this)); - Debugging.Assert(() => !Monitor.IsEntered(writer)); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => !Monitor.IsEntered(this)); + Debugging.Assert(() => !Monitor.IsEntered(writer)); + } if (purgeLock.TryLock()) { try @@ -222,7 +228,7 @@ internal abstract class FlushTicket protected FlushTicket(FrozenBufferedUpdates frozenUpdates) { - Debugging.Assert(() => frozenUpdates != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => frozenUpdates != null); this.m_frozenUpdates = frozenUpdates; } @@ -238,8 +244,11 @@ protected FlushTicket(FrozenBufferedUpdates frozenUpdates) /// protected void PublishFlushedSegment(IndexWriter indexWriter, FlushedSegment newSegment, FrozenBufferedUpdates globalPacket) { - Debugging.Assert(() => newSegment != null); - Debugging.Assert(() => newSegment.segmentInfo != null); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => newSegment != null); + Debugging.Assert(() => newSegment.segmentInfo != null); + } FrozenBufferedUpdates segmentUpdates = newSegment.segmentUpdates; //System.out.println("FLUSH: " + newSegment.segmentInfo.info.name); if (indexWriter.infoStream.IsEnabled("DW")) @@ -260,7 +269,7 @@ protected void FinishFlush(IndexWriter indexWriter, FlushedSegment newSegment, F // Finish the flushed segment and publish it to IndexWriter if (newSegment == null) { - Debugging.Assert(() => bufferedUpdates != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bufferedUpdates != null); if (bufferedUpdates != null && bufferedUpdates.Any()) { indexWriter.PublishFrozenUpdates(bufferedUpdates); @@ -286,7 +295,7 @@ internal GlobalDeletesTicket(FrozenBufferedUpdates frozenUpdates) // LUCENENET N protected internal override void Publish(IndexWriter writer) { - Debugging.Assert(() => !m_published, () => "ticket was already publised - can not publish twice"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !m_published, () => "ticket was already publised - can not publish twice"); m_published = true; // its a global ticket - no segment to publish FinishFlush(writer, null, m_frozenUpdates); @@ -307,20 +316,20 @@ internal SegmentFlushTicket(FrozenBufferedUpdates frozenDeletes) // LUCENENET NO protected internal override void Publish(IndexWriter writer) { - Debugging.Assert(() => !m_published, () => "ticket was already publised - can not publish twice"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !m_published, () => "ticket was already publised - can not publish twice"); m_published = true; FinishFlush(writer, segment, m_frozenUpdates); } internal void SetSegment(FlushedSegment segment) // LUCENENET NOTE: Made internal rather than protected because class is sealed { - Debugging.Assert(() => !failed); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !failed); this.segment = segment; } internal void SetFailed() // LUCENENET NOTE: Made internal rather than protected because class is sealed { - Debugging.Assert(() => segment == null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => segment == null); failed = true; } diff --git a/src/Lucene.Net/Index/DocumentsWriterPerThread.cs b/src/Lucene.Net/Index/DocumentsWriterPerThread.cs index 349848eaac..3e66603568 100644 --- a/src/Lucene.Net/Index/DocumentsWriterPerThread.cs +++ b/src/Lucene.Net/Index/DocumentsWriterPerThread.cs @@ -236,12 +236,12 @@ public DocumentsWriterPerThread(string segmentName, Directory directory, LiveInd pendingUpdates = new BufferedUpdates(); intBlockAllocator = new Int32BlockAllocator(bytesUsed); this.deleteQueue = deleteQueue; - Debugging.Assert(() => numDocsInRAM == 0, () => "num docs " + numDocsInRAM); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocsInRAM == 0, () => "num docs " + numDocsInRAM); pendingUpdates.Clear(); deleteSlice = deleteQueue.NewSlice(); segmentInfo = new SegmentInfo(directoryOrig, Constants.LUCENE_MAIN_VERSION, segmentName, -1, false, codec, null); - Debugging.Assert(() => numDocsInRAM == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocsInRAM == 0); if (INFO_VERBOSE && infoStream.IsEnabled("DWPT")) { infoStream.Message("DWPT", Thread.CurrentThread.Name + " init seg=" + segmentName + " delQueue=" + deleteQueue); @@ -274,8 +274,11 @@ internal bool TestPoint(string message) public virtual void UpdateDocument(IEnumerable doc, Analyzer analyzer, Term delTerm) { - Debugging.Assert(() => TestPoint("DocumentsWriterPerThread addDocument start")); - Debugging.Assert(() => deleteQueue != null); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => TestPoint("DocumentsWriterPerThread addDocument start")); + Debugging.Assert(() => deleteQueue != null); + } docState.doc = doc; docState.analyzer = analyzer; docState.docID = numDocsInRAM; @@ -330,8 +333,11 @@ public virtual void UpdateDocument(IEnumerable doc, Analyzer an public virtual int UpdateDocuments(IEnumerable> docs, Analyzer analyzer, Term delTerm) { - Debugging.Assert(() => TestPoint("DocumentsWriterPerThread addDocuments start")); - Debugging.Assert(() => deleteQueue != null); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => TestPoint("DocumentsWriterPerThread addDocuments start")); + Debugging.Assert(() => deleteQueue != null); + } docState.analyzer = analyzer; if (INFO_VERBOSE && infoStream.IsEnabled("DWPT")) { @@ -394,7 +400,7 @@ public virtual int UpdateDocuments(IEnumerable> doc if (delTerm != null) { deleteQueue.Add(delTerm, deleteSlice); - Debugging.Assert(() => deleteSlice.IsTailItem(delTerm), () => "expected the delete term as the tail item"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => deleteSlice.IsTailItem(delTerm), () => "expected the delete term as the tail item"); deleteSlice.Apply(pendingUpdates, numDocsInRAM - docCount); } } @@ -433,7 +439,7 @@ private void FinishDocument(Term delTerm) if (delTerm != null) { deleteQueue.Add(delTerm, deleteSlice); - Debugging.Assert(() => deleteSlice.IsTailItem(delTerm), () => "expected the delete term as the tail item"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => deleteSlice.IsTailItem(delTerm), () => "expected the delete term as the tail item"); } else { @@ -484,7 +490,7 @@ internal virtual void DeleteDocID(int docIDUpto) /// internal virtual FrozenBufferedUpdates PrepareFlush() { - Debugging.Assert(() => numDocsInRAM > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocsInRAM > 0); FrozenBufferedUpdates globalUpdates = deleteQueue.FreezeGlobalBuffer(deleteSlice); /* deleteSlice can possibly be null if we have hit non-aborting exceptions during indexing and never succeeded adding a document. */ @@ -492,7 +498,7 @@ adding a document. */ { // apply all deletes before we flush and release the delete slice deleteSlice.Apply(pendingUpdates, numDocsInRAM); - Debugging.Assert(() => deleteSlice.IsEmpty); + if (Debugging.AssertsEnabled) Debugging.Assert(() => deleteSlice.IsEmpty); deleteSlice.Reset(); } return globalUpdates; @@ -503,8 +509,11 @@ adding a document. */ [MethodImpl(MethodImplOptions.NoInlining)] internal virtual FlushedSegment Flush() { - Debugging.Assert(() => numDocsInRAM > 0); - Debugging.Assert(() => deleteSlice.IsEmpty, () => "all deletes must be applied in prepareFlush"); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => numDocsInRAM > 0); + Debugging.Assert(() => deleteSlice.IsEmpty, () => "all deletes must be applied in prepareFlush"); + } segmentInfo.DocCount = numDocsInRAM; SegmentWriteState flushState = new SegmentWriteState(infoStream, directory, segmentInfo, fieldInfos.Finish(), indexWriterConfig.TermIndexInterval, pendingUpdates, new IOContext(new FlushInfo(numDocsInRAM, BytesUsed))); double startMBUsed = BytesUsed / 1024.0 / 1024.0; @@ -572,7 +581,7 @@ internal virtual FlushedSegment Flush() infoStream.Message("DWPT", "flushed: segment=" + segmentInfo.Name + " ramUsed=" + startMBUsed.ToString(nf) + " MB" + " newFlushedSize(includes docstores)=" + newSegmentSize.ToString(nf) + " MB" + " docs/MB=" + (flushState.SegmentInfo.DocCount / newSegmentSize).ToString(nf)); } - Debugging.Assert(() => segmentInfo != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => segmentInfo != null); FlushedSegment fs = new FlushedSegment(segmentInfoPerCommit, flushState.FieldInfos, segmentDeletes, flushState.LiveDocs, flushState.DelCountOnFlush); SealFlushedSegment(fs); @@ -600,7 +609,7 @@ internal virtual FlushedSegment Flush() [MethodImpl(MethodImplOptions.NoInlining)] internal virtual void SealFlushedSegment(FlushedSegment flushedSegment) { - Debugging.Assert(() => flushedSegment != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => flushedSegment != null); SegmentCommitInfo newSegment = flushedSegment.segmentInfo; @@ -632,7 +641,7 @@ internal virtual void SealFlushedSegment(FlushedSegment flushedSegment) if (flushedSegment.liveDocs != null) { int delCount = flushedSegment.delCount; - Debugging.Assert(() => delCount > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => delCount > 0); if (infoStream.IsEnabled("DWPT")) { infoStream.Message("DWPT", "flush: write " + delCount + " deletes gen=" + flushedSegment.segmentInfo.DelGen); diff --git a/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs b/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs index ef4deb7e0d..2dacc4c2ef 100644 --- a/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs +++ b/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs @@ -80,14 +80,14 @@ internal ThreadState(DocumentsWriterPerThread dpwt) /// internal void Deactivate() // LUCENENET NOTE: Made internal because it is called outside of this context { - Debugging.Assert(() => this.IsHeldByCurrentThread); + if (Debugging.AssertsEnabled) Debugging.Assert(() => this.IsHeldByCurrentThread); isActive = false; Reset(); } internal void Reset() // LUCENENET NOTE: Made internal because it is called outside of this context { - Debugging.Assert(() => this.IsHeldByCurrentThread); + if (Debugging.AssertsEnabled) Debugging.Assert(() => this.IsHeldByCurrentThread); this.dwpt = null; this.bytesUsed = 0; this.flushPending = false; @@ -102,7 +102,7 @@ internal bool IsActive { get { - Debugging.Assert(() => this.IsHeldByCurrentThread); + if (Debugging.AssertsEnabled) Debugging.Assert(() => this.IsHeldByCurrentThread); return isActive; } @@ -112,7 +112,7 @@ internal bool IsInitialized { get { - Debugging.Assert(() => this.IsHeldByCurrentThread); + if (Debugging.AssertsEnabled) Debugging.Assert(() => this.IsHeldByCurrentThread); return IsActive && dwpt != null; } } @@ -126,7 +126,7 @@ public long BytesUsedPerThread { get { - Debugging.Assert(() => this.IsHeldByCurrentThread); + if (Debugging.AssertsEnabled) Debugging.Assert(() => this.IsHeldByCurrentThread); // public for FlushPolicy return bytesUsed; } @@ -139,7 +139,7 @@ public DocumentsWriterPerThread DocumentsWriterPerThread { get { - Debugging.Assert(() => this.IsHeldByCurrentThread); + if (Debugging.AssertsEnabled) Debugging.Assert(() => this.IsHeldByCurrentThread); // public for FlushPolicy return dwpt; } @@ -226,12 +226,12 @@ public virtual ThreadState NewThreadState() { // unreleased thread states are deactivated during DW#close() numThreadStatesActive++; // increment will publish the ThreadState - Debugging.Assert(() => threadState.dwpt == null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => threadState.dwpt == null); unlock = false; return threadState; } // unlock since the threadstate is not active anymore - we are closed! - Debugging.Assert(AssertUnreleasedThreadStatesInactive); + if (Debugging.AssertsEnabled) Debugging.Assert(AssertUnreleasedThreadStatesInactive); return null; } finally @@ -253,10 +253,10 @@ private bool AssertUnreleasedThreadStatesInactive() { for (int i = numThreadStatesActive; i < threadStates.Length; i++) { - Debugging.Assert(() => threadStates[i].TryLock(), () => "unreleased threadstate should not be locked"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => threadStates[i].TryLock(), () => "unreleased threadstate should not be locked"); try { - Debugging.Assert(() => !threadStates[i].IsInitialized, () => "expected unreleased thread state to be inactive"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !threadStates[i].IsInitialized, () => "expected unreleased thread state to be inactive"); } finally { @@ -292,7 +292,7 @@ internal virtual void DeactivateUnreleasedStates() internal virtual DocumentsWriterPerThread Reset(ThreadState threadState, bool closed) { - Debugging.Assert(() => threadState.IsHeldByCurrentThread); + if (Debugging.AssertsEnabled) Debugging.Assert(() => threadState.IsHeldByCurrentThread); DocumentsWriterPerThread dwpt = threadState.dwpt; if (!closed) { @@ -382,7 +382,7 @@ internal virtual int NumDeactivatedThreadStates() /// the state to deactivate internal virtual void DeactivateThreadState(ThreadState threadState) { - Debugging.Assert(() => threadState.IsActive); + if (Debugging.AssertsEnabled) Debugging.Assert(() => threadState.IsActive); threadState.Deactivate(); } } diff --git a/src/Lucene.Net/Index/DocumentsWriterStallControl.cs b/src/Lucene.Net/Index/DocumentsWriterStallControl.cs index ab3bb0dd4a..81c16ce064 100644 --- a/src/Lucene.Net/Index/DocumentsWriterStallControl.cs +++ b/src/Lucene.Net/Index/DocumentsWriterStallControl.cs @@ -88,10 +88,10 @@ internal void WaitIfStalled() // LUCENENET: make sure not to run IncWaiters / DecrWaiters in Debugging.Assert as that gets // disabled in production var result = IncWaiters(); - Debugging.Assert(() => result); + if (Debugging.AssertsEnabled) Debugging.Assert(() => result); Monitor.Wait(this); result = DecrWaiters(); - Debugging.Assert(() => result); + if (Debugging.AssertsEnabled) Debugging.Assert(() => result); //#if !NETSTANDARD1_6 // LUCENENET NOTE: Senseless to catch and rethrow the same exception type // } // catch (ThreadInterruptedException e) @@ -112,7 +112,7 @@ internal bool AnyStalledThreads() private bool IncWaiters() { numWaiting++; - Debugging.Assert(() => !waiting.ContainsKey(ThreadJob.CurrentThread)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !waiting.ContainsKey(ThreadJob.CurrentThread)); waiting[ThreadJob.CurrentThread] = true; return numWaiting > 0; @@ -122,7 +122,7 @@ private bool DecrWaiters() { numWaiting--; bool removed = waiting.Remove(ThreadJob.CurrentThread); - Debugging.Assert(() => removed); + if (Debugging.AssertsEnabled) Debugging.Assert(() => removed); return numWaiting >= 0; } diff --git a/src/Lucene.Net/Index/FieldInfo.cs b/src/Lucene.Net/Index/FieldInfo.cs index 841f800bc4..9a61c25c07 100644 --- a/src/Lucene.Net/Index/FieldInfo.cs +++ b/src/Lucene.Net/Index/FieldInfo.cs @@ -87,28 +87,31 @@ public FieldInfo(string name, bool indexed, int number, bool storeTermVector, bo this.normType = DocValuesType.NONE; } this.attributes = attributes; - Debugging.Assert(CheckConsistency); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckConsistency); } private bool CheckConsistency() { if (!indexed) { - Debugging.Assert(() => !storeTermVector); - Debugging.Assert(() => !storePayloads); - Debugging.Assert(() => !omitNorms); - Debugging.Assert(() => normType == DocValuesType.NONE); - Debugging.Assert(() => indexOptions == IndexOptions.NONE); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => !storeTermVector); + Debugging.Assert(() => !storePayloads); + Debugging.Assert(() => !omitNorms); + Debugging.Assert(() => normType == DocValuesType.NONE); + Debugging.Assert(() => indexOptions == IndexOptions.NONE); + } } else { - Debugging.Assert(() => indexOptions != IndexOptions.NONE); + if (Debugging.AssertsEnabled) Debugging.Assert(() => indexOptions != IndexOptions.NONE); if (omitNorms) { - Debugging.Assert(() => normType == DocValuesType.NONE); + if (Debugging.AssertsEnabled) Debugging.Assert(() => normType == DocValuesType.NONE); } // Cannot store payloads unless positions are indexed: - Debugging.Assert(() => indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !this.storePayloads); + if (Debugging.AssertsEnabled) Debugging.Assert(() => indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !this.storePayloads); } return true; @@ -160,7 +163,7 @@ internal void Update(bool indexed, bool storeTermVector, bool omitNorms, bool st } } } - Debugging.Assert(CheckConsistency); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckConsistency); } public DocValuesType DocValuesType @@ -173,7 +176,7 @@ internal set throw new ArgumentException("cannot change DocValues type from " + docValueType + " to " + value + " for field \"" + Name + "\""); } docValueType = value; - Debugging.Assert(CheckConsistency); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckConsistency); } } @@ -207,14 +210,14 @@ internal set throw new ArgumentException("cannot change Norm type from " + normType + " to " + value + " for field \"" + Name + "\""); } normType = value; - Debugging.Assert(CheckConsistency); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckConsistency); } } internal void SetStoreTermVectors() { storeTermVector = true; - Debugging.Assert(CheckConsistency); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckConsistency); } internal void SetStorePayloads() @@ -223,7 +226,7 @@ internal void SetStorePayloads() { storePayloads = true; } - Debugging.Assert(CheckConsistency); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckConsistency); } /// diff --git a/src/Lucene.Net/Index/FieldInfos.cs b/src/Lucene.Net/Index/FieldInfos.cs index 5c5ba9b87d..64ae7e437c 100644 --- a/src/Lucene.Net/Index/FieldInfos.cs +++ b/src/Lucene.Net/Index/FieldInfos.cs @@ -136,7 +136,7 @@ public virtual int Count { get { - Debugging.Assert(() => byNumber.Count == byName.Count); + if (Debugging.AssertsEnabled) Debugging.Assert(() => byNumber.Count == byName.Count); return byNumber.Count; } } @@ -314,7 +314,7 @@ internal void SetDocValuesType(int number, string name, DocValuesType dvType) { lock (this) { - Debugging.Assert(() => ContainsConsistent(number, name, dvType)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ContainsConsistent(number, name, dvType)); docValuesType[name] = dvType; } } @@ -335,7 +335,7 @@ internal Builder() /// internal Builder(FieldNumbers globalFieldNumbers) { - Debugging.Assert(() => globalFieldNumbers != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => globalFieldNumbers != null); this.globalFieldNumbers = globalFieldNumbers; } @@ -376,8 +376,11 @@ private FieldInfo AddOrUpdateInternal(string name, int preferredFieldNumber, boo // else we'll allocate a new one: int fieldNumber = globalFieldNumbers.AddOrGet(name, preferredFieldNumber, docValues); fi = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, docValues, normType, null); - Debugging.Assert(() => !byName.ContainsKey(fi.Name)); - Debugging.Assert(() => globalFieldNumbers.ContainsConsistent(fi.Number, fi.Name, fi.DocValuesType)); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => !byName.ContainsKey(fi.Name)); + Debugging.Assert(() => globalFieldNumbers.ContainsConsistent(fi.Number, fi.Name, fi.DocValuesType)); + } byName[fi.Name] = fi; } else diff --git a/src/Lucene.Net/Index/FilteredTermsEnum.cs b/src/Lucene.Net/Index/FilteredTermsEnum.cs index d2deb3b2c8..209605804d 100644 --- a/src/Lucene.Net/Index/FilteredTermsEnum.cs +++ b/src/Lucene.Net/Index/FilteredTermsEnum.cs @@ -97,7 +97,7 @@ public FilteredTermsEnum(TermsEnum tenum) /// start with seek public FilteredTermsEnum(TermsEnum tenum, bool startWithSeek) { - Debugging.Assert(() => tenum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => tenum != null); this.tenum = tenum; doSeek = startWithSeek; } @@ -207,7 +207,7 @@ public override void SeekExact(BytesRef term, TermState state) /// public override TermState GetTermState() { - Debugging.Assert(() => tenum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => tenum != null); return tenum.GetTermState(); } @@ -224,7 +224,7 @@ public override BytesRef Next() BytesRef t = NextSeekTerm(actualTerm); //System.out.println(" seek to t=" + (t == null ? "null" : t.utf8ToString()) + " tenum=" + tenum); // Make sure we always seek forward: - Debugging.Assert(() => actualTerm == null || t == null || Comparer.Compare(t, actualTerm) > 0, () => "curTerm=" + actualTerm + " seekTerm=" + t); + if (Debugging.AssertsEnabled) Debugging.Assert(() => actualTerm == null || t == null || Comparer.Compare(t, actualTerm) > 0, () => "curTerm=" + actualTerm + " seekTerm=" + t); if (t == null || tenum.SeekCeil(t) == SeekStatus.END) { // no more terms to seek to or enum exhausted diff --git a/src/Lucene.Net/Index/FlushPolicy.cs b/src/Lucene.Net/Index/FlushPolicy.cs index dfef6ee36f..d525a77e55 100644 --- a/src/Lucene.Net/Index/FlushPolicy.cs +++ b/src/Lucene.Net/Index/FlushPolicy.cs @@ -113,11 +113,11 @@ protected internal virtual void Init(LiveIndexWriterConfig indexWriterConfig) /// protected virtual ThreadState FindLargestNonPendingWriter(DocumentsWriterFlushControl control, ThreadState perThreadState) { - Debugging.Assert(() => perThreadState.dwpt.NumDocsInRAM > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => perThreadState.dwpt.NumDocsInRAM > 0); long maxRamSoFar = perThreadState.bytesUsed; // the dwpt which needs to be flushed eventually ThreadState maxRamUsingThreadState = perThreadState; - Debugging.Assert(() => !perThreadState.flushPending, () => "DWPT should have flushed"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !perThreadState.flushPending, () => "DWPT should have flushed"); IEnumerator activePerThreadsIterator = control.AllActiveThreadStates(); while (activePerThreadsIterator.MoveNext()) { @@ -132,7 +132,7 @@ protected virtual ThreadState FindLargestNonPendingWriter(DocumentsWriterFlushCo } } } - Debugging.Assert(() => AssertMessage("set largest ram consuming thread pending on lower watermark")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => AssertMessage("set largest ram consuming thread pending on lower watermark")); return maxRamUsingThreadState; } diff --git a/src/Lucene.Net/Index/FreqProxTermsWriter.cs b/src/Lucene.Net/Index/FreqProxTermsWriter.cs index 4f1b2c5954..90a2fbf456 100644 --- a/src/Lucene.Net/Index/FreqProxTermsWriter.cs +++ b/src/Lucene.Net/Index/FreqProxTermsWriter.cs @@ -89,7 +89,7 @@ public override void Flush(IDictionary fields fieldWriter.Flush(fieldInfo.Name, consumer, state); TermsHashPerField perField = fieldWriter.termsHashPerField; - Debugging.Assert(() => termsHash == null || termsHash == perField.termsHash); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termsHash == null || termsHash == perField.termsHash); termsHash = perField.termsHash; int numPostings = perField.bytesHash.Count; perField.Reset(); diff --git a/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs b/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs index e5757ea3be..0257751762 100644 --- a/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs +++ b/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs @@ -153,7 +153,7 @@ internal override void Start(IIndexableField f) internal void WriteProx(int termID, int proxCode) { //System.out.println("writeProx termID=" + termID + " proxCode=" + proxCode); - Debugging.Assert(() => hasProx); + if (Debugging.AssertsEnabled) Debugging.Assert(() => hasProx); BytesRef payload; if (payloadAttribute == null) { @@ -182,11 +182,11 @@ internal void WriteProx(int termID, int proxCode) internal void WriteOffsets(int termID, int offsetAccum) { - Debugging.Assert(() => hasOffsets); + if (Debugging.AssertsEnabled) Debugging.Assert(() => hasOffsets); int startOffset = offsetAccum + offsetAttribute.StartOffset; int endOffset = offsetAccum + offsetAttribute.EndOffset; FreqProxPostingsArray postings = (FreqProxPostingsArray)termsHashPerField.postingsArray; - Debugging.Assert(() => startOffset - postings.lastOffsets[termID] >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => startOffset - postings.lastOffsets[termID] >= 0); termsHashPerField.WriteVInt32(1, startOffset - postings.lastOffsets[termID]); termsHashPerField.WriteVInt32(1, endOffset - startOffset); @@ -197,7 +197,7 @@ internal override void NewTerm(int termID) { // First time we're seeing this term since the last // flush - Debugging.Assert(() => docState.TestPoint("FreqProxTermsWriterPerField.newTerm start")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docState.TestPoint("FreqProxTermsWriterPerField.newTerm start")); FreqProxPostingsArray postings = (FreqProxPostingsArray)termsHashPerField.postingsArray; postings.lastDocIDs[termID] = docState.docID; @@ -219,7 +219,7 @@ internal override void NewTerm(int termID) } else { - Debugging.Assert(() => !hasOffsets); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !hasOffsets); } } fieldState.MaxTermFrequency = Math.Max(1, fieldState.MaxTermFrequency); @@ -228,18 +228,18 @@ internal override void NewTerm(int termID) internal override void AddTerm(int termID) { - Debugging.Assert(() => docState.TestPoint("FreqProxTermsWriterPerField.addTerm start")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docState.TestPoint("FreqProxTermsWriterPerField.addTerm start")); FreqProxPostingsArray postings = (FreqProxPostingsArray)termsHashPerField.postingsArray; - Debugging.Assert(() => !hasFreq || postings.termFreqs[termID] > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !hasFreq || postings.termFreqs[termID] > 0); if (!hasFreq) { - Debugging.Assert(() => postings.termFreqs == null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => postings.termFreqs == null); if (docState.docID != postings.lastDocIDs[termID]) { - Debugging.Assert(() => docState.docID > postings.lastDocIDs[termID]); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docState.docID > postings.lastDocIDs[termID]); termsHashPerField.WriteVInt32(0, postings.lastDocCodes[termID]); postings.lastDocCodes[termID] = docState.docID - postings.lastDocIDs[termID]; postings.lastDocIDs[termID] = docState.docID; @@ -248,7 +248,7 @@ internal override void AddTerm(int termID) } else if (docState.docID != postings.lastDocIDs[termID]) { - Debugging.Assert(() => docState.docID > postings.lastDocIDs[termID], () => "id: " + docState.docID + " postings ID: " + postings.lastDocIDs[termID] + " termID: " + termID); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docState.docID > postings.lastDocIDs[termID], () => "id: " + docState.docID + " postings ID: " + postings.lastDocIDs[termID] + " termID: " + termID); // Term not yet seen in the current doc but previously // seen in other doc(s) since the last flush @@ -278,7 +278,7 @@ internal override void AddTerm(int termID) } else { - Debugging.Assert(() => !hasOffsets); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !hasOffsets); } fieldState.UniqueTermCount++; } @@ -322,7 +322,7 @@ public FreqProxPostingsArray(int size, bool writeFreqs, bool writeProx, bool wri } else { - Debugging.Assert(() => !writeOffsets); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !writeOffsets); } //System.out.println("PA init freqs=" + writeFreqs + " pos=" + writeProx + " offs=" + writeOffsets); } @@ -340,7 +340,7 @@ internal override ParallelPostingsArray NewInstance(int size) internal override void CopyTo(ParallelPostingsArray toArray, int numToCopy) { - Debugging.Assert(() => toArray is FreqProxPostingsArray); + if (Debugging.AssertsEnabled) Debugging.Assert(() => toArray is FreqProxPostingsArray); FreqProxPostingsArray to = (FreqProxPostingsArray)toArray; base.CopyTo(toArray, numToCopy); @@ -349,17 +349,17 @@ internal override void CopyTo(ParallelPostingsArray toArray, int numToCopy) Array.Copy(lastDocCodes, 0, to.lastDocCodes, 0, numToCopy); if (lastPositions != null) { - Debugging.Assert(() => to.lastPositions != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => to.lastPositions != null); Array.Copy(lastPositions, 0, to.lastPositions, 0, numToCopy); } if (lastOffsets != null) { - Debugging.Assert(() => to.lastOffsets != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => to.lastOffsets != null); Array.Copy(lastOffsets, 0, to.lastOffsets, 0, numToCopy); } if (termFreqs != null) { - Debugging.Assert(() => to.termFreqs != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => to.termFreqs != null); Array.Copy(termFreqs, 0, to.termFreqs, 0, numToCopy); } } @@ -416,7 +416,7 @@ internal void Flush(string fieldName, FieldsConsumer consumer, SegmentWriteState // new segment to the directory according to // currentFieldIndexOptions: IndexOptions currentFieldIndexOptions = fieldInfo.IndexOptions; - Debugging.Assert(() => currentFieldIndexOptions != IndexOptions.NONE); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentFieldIndexOptions != IndexOptions.NONE); bool writeTermFreq = currentFieldIndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS) >= 0; bool writePositions = currentFieldIndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; @@ -429,11 +429,14 @@ internal void Flush(string fieldName, FieldsConsumer consumer, SegmentWriteState //System.out.println("flush readTF=" + readTermFreq + " readPos=" + readPositions + " readOffs=" + readOffsets); // Make sure FieldInfo.update is working correctly!: - Debugging.Assert(() => !writeTermFreq || readTermFreq); - Debugging.Assert(() => !writePositions || readPositions); - Debugging.Assert(() => !writeOffsets || readOffsets); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => !writeTermFreq || readTermFreq); + Debugging.Assert(() => !writePositions || readPositions); + Debugging.Assert(() => !writeOffsets || readOffsets); - Debugging.Assert(() => !writeOffsets || writePositions); + Debugging.Assert(() => !writeOffsets || writePositions); + } IDictionary segDeletes; if (state.SegUpdates != null && state.SegUpdates.terms.Count > 0) @@ -552,11 +555,11 @@ internal void Flush(string fieldName, FieldsConsumer consumer, SegmentWriteState } } - Debugging.Assert(() => docID != postings.lastDocIDs[termID]); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docID != postings.lastDocIDs[termID]); } docFreq++; - Debugging.Assert(() => docID < state.SegmentInfo.DocCount, () => "doc=" + docID + " maxDoc=" + state.SegmentInfo.DocCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docID < state.SegmentInfo.DocCount, () => "doc=" + docID + " maxDoc=" + state.SegmentInfo.DocCount); // NOTE: we could check here if the docID was // deleted, and skip it. However, this is somewhat @@ -641,7 +644,7 @@ internal void Flush(string fieldName, FieldsConsumer consumer, SegmentWriteState { if (writeOffsets) { - Debugging.Assert(() => startOffset >= 0 && endOffset >= startOffset, () => "startOffset=" + startOffset + ",endOffset=" + endOffset + ",offset=" + offset); + if (Debugging.AssertsEnabled) Debugging.Assert(() => startOffset >= 0 && endOffset >= startOffset, () => "startOffset=" + startOffset + ",endOffset=" + endOffset + ",offset=" + offset); postingsConsumer.AddPosition(position, thisPayload, startOffset, endOffset); } else diff --git a/src/Lucene.Net/Index/FrozenBufferedUpdates.cs b/src/Lucene.Net/Index/FrozenBufferedUpdates.cs index fc085a4b06..0c8d0a5a2f 100644 --- a/src/Lucene.Net/Index/FrozenBufferedUpdates.cs +++ b/src/Lucene.Net/Index/FrozenBufferedUpdates.cs @@ -67,7 +67,7 @@ internal class FrozenBufferedUpdates public FrozenBufferedUpdates(BufferedUpdates deletes, bool isSegmentPrivate) { this.isSegmentPrivate = isSegmentPrivate; - Debugging.Assert(() => !isSegmentPrivate || deletes.terms.Count == 0, () => "segment private package should only have del queries"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !isSegmentPrivate || deletes.terms.Count == 0, () => "segment private package should only have del queries"); Term[] termsArray = deletes.terms.Keys.ToArray(/*new Term[deletes.terms.Count]*/); termCount = termsArray.Length; @@ -140,12 +140,12 @@ public virtual long DelGen { set { - Debugging.Assert(() => this.gen == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => this.gen == -1); this.gen = value; } get { - Debugging.Assert(() => gen != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => gen != -1); return gen; } } diff --git a/src/Lucene.Net/Index/IndexFileDeleter.cs b/src/Lucene.Net/Index/IndexFileDeleter.cs index df29f181fa..cf239c6ed4 100644 --- a/src/Lucene.Net/Index/IndexFileDeleter.cs +++ b/src/Lucene.Net/Index/IndexFileDeleter.cs @@ -405,7 +405,7 @@ private void DeleteCommits() /// public void Refresh(string segmentName) { - Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); string[] files = directory.ListAll(); string segmentPrefix1; @@ -446,7 +446,7 @@ public void Refresh() // Set to null so that we regenerate the list of pending // files; else we can accumulate same file more than // once - Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); deletable = null; Refresh(null); } @@ -454,7 +454,7 @@ public void Refresh() public void Dispose() { // DecRef old files from the last checkpoint, if any: - Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); if (lastFiles.Count > 0) { @@ -476,7 +476,7 @@ public void Dispose() /// internal void RevisitPolicy() { - Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); if (infoStream.IsEnabled("IFD")) { infoStream.Message("IFD", "now revisitPolicy"); @@ -491,7 +491,7 @@ internal void RevisitPolicy() public void DeletePendingFiles() { - Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); if (deletable != null) { IList oldDeletable = deletable; @@ -530,9 +530,9 @@ public void DeletePendingFiles() /// public void Checkpoint(SegmentInfos segmentInfos, bool isCommit) { - Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); - Debugging.Assert(() => Monitor.IsEntered(writer)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Monitor.IsEntered(writer)); long t0 = 0; if (infoStream.IsEnabled("IFD")) { @@ -576,7 +576,7 @@ public void Checkpoint(SegmentInfos segmentInfos, bool isCommit) internal void IncRef(SegmentInfos segmentInfos, bool isCommit) { - Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); // If this is a commit point, also incRef the // segments_N file: foreach (string fileName in segmentInfos.GetFiles(directory, isCommit)) @@ -587,7 +587,7 @@ internal void IncRef(SegmentInfos segmentInfos, bool isCommit) internal void IncRef(ICollection files) { - Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); foreach (string file in files) { IncRef(file); @@ -596,7 +596,7 @@ internal void IncRef(ICollection files) internal void IncRef(string fileName) { - Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); RefCount rc = GetRefCount(fileName); if (infoStream.IsEnabled("IFD")) { @@ -610,7 +610,7 @@ internal void IncRef(string fileName) internal void DecRef(ICollection files) { - Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); foreach (string file in files) { DecRef(file); @@ -619,7 +619,7 @@ internal void DecRef(ICollection files) internal void DecRef(string fileName) { - Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); RefCount rc = GetRefCount(fileName); if (infoStream.IsEnabled("IFD")) { @@ -639,7 +639,7 @@ internal void DecRef(string fileName) internal void DecRef(SegmentInfos segmentInfos) { - Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); foreach (string file in segmentInfos.GetFiles(directory, false)) { DecRef(file); @@ -648,14 +648,14 @@ internal void DecRef(SegmentInfos segmentInfos) public bool Exists(string fileName) { - Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); // LUCENENET: Using TryGetValue to eliminate extra lookup return refCounts.TryGetValue(fileName, out RefCount value) && value.count > 0; } private RefCount GetRefCount(string fileName) { - Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); // LUCENENET: Using TryGetValue to eliminate extra lookup if (!refCounts.TryGetValue(fileName, out RefCount rc)) { @@ -667,7 +667,7 @@ private RefCount GetRefCount(string fileName) internal void DeleteFiles(IList files) { - Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); foreach (string file in files) { DeleteFile(file); @@ -680,7 +680,7 @@ internal void DeleteFiles(IList files) /// internal void DeleteNewFiles(ICollection files) { - Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); foreach (string fileName in files) { // NOTE: it's very unusual yet possible for the @@ -705,7 +705,7 @@ internal void DeleteNewFiles(ICollection files) internal void DeleteFile(string fileName) { - Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); EnsureOpen(); try { @@ -724,7 +724,7 @@ internal void DeleteFile(string fileName) // the file is open in another process, and queue // the file for subsequent deletion. - //Debugging.Assert(() => e.Message.Contains("cannot delete")); + //if (Debugging.AssertsEnabled) Debugging.Assert(() => e.Message.Contains("cannot delete")); if (infoStream.IsEnabled("IFD")) { @@ -764,14 +764,14 @@ public int IncRef() } else { - Debugging.Assert(() => count > 0, () => Thread.CurrentThread.Name + ": RefCount is 0 pre-increment for file \"" + fileName + "\""); + if (Debugging.AssertsEnabled) Debugging.Assert(() => count > 0, () => Thread.CurrentThread.Name + ": RefCount is 0 pre-increment for file \"" + fileName + "\""); } return ++count; } public int DecRef() { - Debugging.Assert(() => count > 0, () => Thread.CurrentThread.Name + ": RefCount is 0 pre-decrement for file \"" + fileName + "\""); + if (Debugging.AssertsEnabled) Debugging.Assert(() => count > 0, () => Thread.CurrentThread.Name + ": RefCount is 0 pre-decrement for file \"" + fileName + "\""); return --count; } } diff --git a/src/Lucene.Net/Index/IndexFileNames.cs b/src/Lucene.Net/Index/IndexFileNames.cs index 1891373915..0aabe9a461 100644 --- a/src/Lucene.Net/Index/IndexFileNames.cs +++ b/src/Lucene.Net/Index/IndexFileNames.cs @@ -106,7 +106,7 @@ public static string FileNameFromGeneration(string @base, string ext, long gen) } else { - Debugging.Assert(() => gen > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => gen > 0); // The '6' part in the length is: 1 for '.', 1 for '_' and 4 as estimate // to the gen length as string (hopefully an upper limit so SB won't // expand in the middle. @@ -139,7 +139,7 @@ public static string SegmentFileName(string segmentName, string segmentSuffix, s { if (ext.Length > 0 || segmentSuffix.Length > 0) { - Debugging.Assert(() => !ext.StartsWith(".", StringComparison.Ordinal)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !ext.StartsWith(".", StringComparison.Ordinal)); StringBuilder sb = new StringBuilder(segmentName.Length + 2 + segmentSuffix.Length + ext.Length); sb.Append(segmentName); if (segmentSuffix.Length > 0) diff --git a/src/Lucene.Net/Index/IndexFormatTooNewException.cs b/src/Lucene.Net/Index/IndexFormatTooNewException.cs index 4b6779407c..ebd4f6f16c 100644 --- a/src/Lucene.Net/Index/IndexFormatTooNewException.cs +++ b/src/Lucene.Net/Index/IndexFormatTooNewException.cs @@ -48,7 +48,7 @@ public class IndexFormatTooNewException : CorruptIndexException public IndexFormatTooNewException(string resourceDesc, int version, int minVersion, int maxVersion) : base("Format version is not supported (resource: " + resourceDesc + "): " + version + " (needs to be between " + minVersion + " and " + maxVersion + ")") { - Debugging.Assert(() => resourceDesc != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => resourceDesc != null); } /// diff --git a/src/Lucene.Net/Index/IndexFormatTooOldException.cs b/src/Lucene.Net/Index/IndexFormatTooOldException.cs index 7cec690d0d..6153bfcb4b 100644 --- a/src/Lucene.Net/Index/IndexFormatTooOldException.cs +++ b/src/Lucene.Net/Index/IndexFormatTooOldException.cs @@ -46,7 +46,7 @@ public class IndexFormatTooOldException : CorruptIndexException public IndexFormatTooOldException(string resourceDesc, string version) : base("Format version is not supported (resource: " + resourceDesc + "): " + version + ". this version of Lucene only supports indexes created with release 3.0 and later.") { - Debugging.Assert(() => resourceDesc != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => resourceDesc != null); } /// @@ -73,7 +73,7 @@ public IndexFormatTooOldException(DataInput input, string version) public IndexFormatTooOldException(string resourceDesc, int version, int minVersion, int maxVersion) : base("Format version is not supported (resource: " + resourceDesc + "): " + version + " (needs to be between " + minVersion + " and " + maxVersion + "). this version of Lucene only supports indexes created with release 3.0 and later.") { - Debugging.Assert(() => resourceDesc != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => resourceDesc != null); } /// diff --git a/src/Lucene.Net/Index/IndexWriter.cs b/src/Lucene.Net/Index/IndexWriter.cs index 8d3d47fd07..8c9cc5dc94 100644 --- a/src/Lucene.Net/Index/IndexWriter.cs +++ b/src/Lucene.Net/Index/IndexWriter.cs @@ -476,7 +476,7 @@ public virtual void Drop(SegmentCommitInfo info) readerMap.TryGetValue(info, out rld); if (rld != null) { - Debugging.Assert(() => info == rld.Info); + if (Debugging.AssertsEnabled) Debugging.Assert(() => info == rld.Info); // System.out.println("[" + Thread.currentThread().getName() + "] ReaderPool.drop: " + info); readerMap.Remove(info); rld.DropReaders(); @@ -516,7 +516,7 @@ public virtual void Release(ReadersAndUpdates rld, bool assertInfoLive) rld.DecRef(); // Pool still holds a ref: - Debugging.Assert(() => rld.RefCount() >= 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => rld.RefCount() >= 1); if (!outerInstance.poolReaders && rld.RefCount() == 1) { @@ -526,7 +526,7 @@ public virtual void Release(ReadersAndUpdates rld, bool assertInfoLive) if (rld.WriteLiveDocs(outerInstance.directory)) { // Make sure we only write del docs for a live segment: - Debugging.Assert(() => assertInfoLive == false || InfoIsLive(rld.Info)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => assertInfoLive == false || InfoIsLive(rld.Info)); // Must checkpoint because we just // created new _X_N.del and field updates files; // don't call IW.checkpoint because that also @@ -575,7 +575,7 @@ internal virtual void DropAll(bool doSave) if (doSave && rld.WriteLiveDocs(outerInstance.directory)) // Throws IOException { // Make sure we only write del docs and field updates for a live segment: - Debugging.Assert(() => InfoIsLive(rld.Info)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => InfoIsLive(rld.Info)); // Must checkpoint because we just // created new _X_N.del and field updates files; // don't call IW.checkpoint because that also @@ -644,7 +644,7 @@ internal virtual void DropAll(bool doSave) // before possibly throwing an exception. readerMap.RemoveAll(toDelete); - Debugging.Assert(() => readerMap.Count == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => readerMap.Count == 0); IOUtils.ReThrow(priorE); } } @@ -663,11 +663,11 @@ public virtual void Commit(SegmentInfos infos) ReadersAndUpdates rld; if (readerMap.TryGetValue(info, out rld)) { - Debugging.Assert(() => rld.Info == info); + if (Debugging.AssertsEnabled) Debugging.Assert(() => rld.Info == info); if (rld.WriteLiveDocs(outerInstance.directory)) { // Make sure we only write del docs for a live segment: - Debugging.Assert(() => InfoIsLive(info)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => InfoIsLive(info)); // Must checkpoint because we just // created new _X_N.del and field updates files; // don't call IW.checkpoint because that also @@ -691,7 +691,7 @@ public virtual ReadersAndUpdates Get(SegmentCommitInfo info, bool create) { lock (this) { - Debugging.Assert(() => info.Info.Dir == outerInstance.directory, () => "info.dir=" + info.Info.Dir + " vs " + outerInstance.directory); + if (Debugging.AssertsEnabled) Debugging.Assert(() => info.Info.Dir == outerInstance.directory, () => "info.dir=" + info.Info.Dir + " vs " + outerInstance.directory); ReadersAndUpdates rld; readerMap.TryGetValue(info, out rld); @@ -707,7 +707,7 @@ public virtual ReadersAndUpdates Get(SegmentCommitInfo info, bool create) } else { - Debugging.Assert(() => rld.Info == info, () => "rld.info=" + rld.Info + " info=" + info + " isLive?=" + InfoIsLive(rld.Info) + " vs " + InfoIsLive(info)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => rld.Info == info, () => "rld.info=" + rld.Info + " info=" + info + " isLive?=" + InfoIsLive(rld.Info) + " vs " + InfoIsLive(info)); } if (create) @@ -716,7 +716,7 @@ public virtual ReadersAndUpdates Get(SegmentCommitInfo info, bool create) rld.IncRef(); } - Debugging.Assert(NoDups); + if (Debugging.AssertsEnabled) Debugging.Assert(NoDups); return rld; } @@ -731,7 +731,7 @@ private bool NoDups() JCG.HashSet seen = new JCG.HashSet(); foreach (SegmentCommitInfo info in readerMap.Keys) { - Debugging.Assert(() => !seen.Contains(info.Info.Name)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !seen.Contains(info.Info.Name)); seen.Add(info.Info.Name); } return true; @@ -1095,7 +1095,7 @@ public virtual void Dispose(bool waitForMerges) // LUCENENET TODO: API - mark pr else { CloseInternal(waitForMerges, true); - Debugging.Assert(AssertEventQueueAfterClose); + if (Debugging.AssertsEnabled) Debugging.Assert(AssertEventQueueAfterClose); } } } @@ -1109,7 +1109,7 @@ private bool AssertEventQueueAfterClose() } foreach (IEvent e in eventQueue) { - Debugging.Assert(() => e is DocumentsWriter.MergePendingEvent, () => e.ToString()); + if (Debugging.AssertsEnabled) Debugging.Assert(() => e is DocumentsWriter.MergePendingEvent, () => e.ToString()); } return true; } @@ -1276,7 +1276,7 @@ private void CloseInternal(bool waitForMerges, bool doFlush) { closed = true; } - Debugging.Assert(() => docWriter.perThreadPool.NumDeactivatedThreadStates() == docWriter.perThreadPool.MaxThreadStates, () => "" + docWriter.perThreadPool.NumDeactivatedThreadStates() + " " + docWriter.perThreadPool.MaxThreadStates); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docWriter.perThreadPool.NumDeactivatedThreadStates() == docWriter.perThreadPool.MaxThreadStates, () => "" + docWriter.perThreadPool.NumDeactivatedThreadStates() + " " + docWriter.perThreadPool.MaxThreadStates); } catch (OutOfMemoryException oom) { @@ -1642,8 +1642,11 @@ public virtual bool TryDeleteDocument(IndexReader readerIn, int docID) int subIndex = ReaderUtil.SubIndex(docID, leaves); reader = leaves[subIndex].AtomicReader; docID -= leaves[subIndex].DocBase; - Debugging.Assert(() => docID >= 0); - Debugging.Assert(() => docID < reader.MaxDoc); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => docID >= 0); + Debugging.Assert(() => docID < reader.MaxDoc); + } } if (!(reader is SegmentReader)) @@ -2383,8 +2386,8 @@ private bool UpdatePendingMerges(MergeTrigger trigger, int maxNumSegments) { lock (this) { - Debugging.Assert(() => maxNumSegments == -1 || maxNumSegments > 0); - //Debugging.Assert(trigger != null); // LUCENENET NOTE: Enum cannot be null in .NET + if (Debugging.AssertsEnabled) Debugging.Assert(() => maxNumSegments == -1 || maxNumSegments > 0); + //if (Debugging.AssertsEnabled) Debugging.Assert(trigger != null); // LUCENENET NOTE: Enum cannot be null in .NET if (stopMerges) { return false; @@ -2399,7 +2402,7 @@ private bool UpdatePendingMerges(MergeTrigger trigger, int maxNumSegments) MergePolicy.MergeSpecification spec; if (maxNumSegments != UNBOUNDED_MAX_MERGE_SEGMENTS) { - Debugging.Assert(() => trigger == MergeTrigger.EXPLICIT || trigger == MergeTrigger.MERGE_FINISHED, () => "Expected EXPLICT or MERGE_FINISHED as trigger even with maxNumSegments set but was: " + trigger.ToString()); + if (Debugging.AssertsEnabled) Debugging.Assert(() => trigger == MergeTrigger.EXPLICIT || trigger == MergeTrigger.MERGE_FINISHED, () => "Expected EXPLICT or MERGE_FINISHED as trigger even with maxNumSegments set but was: " + trigger.ToString()); spec = mergePolicy.FindForcedMerges(segmentInfos, maxNumSegments, segmentsToMerge); newMergesFound = spec != null; if (newMergesFound) @@ -2569,7 +2572,7 @@ private void RollbackInternal() infoStream.Message("IW", "rollback: infos=" + SegString(segmentInfos.Segments)); } - Debugging.Assert(() => TestPoint("rollback before checkpoint")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => TestPoint("rollback before checkpoint")); // Ask deleter to locate unreferenced files & remove // them: @@ -2584,7 +2587,7 @@ private void RollbackInternal() IOUtils.Dispose(writeLock); // release write lock writeLock = null; - Debugging.Assert(() => docWriter.perThreadPool.NumDeactivatedThreadStates() == docWriter.perThreadPool.MaxThreadStates, () => "" + docWriter.perThreadPool.NumDeactivatedThreadStates() + " " + docWriter.perThreadPool.MaxThreadStates); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docWriter.perThreadPool.NumDeactivatedThreadStates() == docWriter.perThreadPool.MaxThreadStates, () => "" + docWriter.perThreadPool.NumDeactivatedThreadStates() + " " + docWriter.perThreadPool.MaxThreadStates); } success = true; @@ -2769,7 +2772,7 @@ private void FinishMerges(bool waitForMerges) stopMerges = false; Monitor.PulseAll(this); - Debugging.Assert(() => 0 == mergingSegments.Count); + if (Debugging.AssertsEnabled) Debugging.Assert(() => 0 == mergingSegments.Count); if (infoStream.IsEnabled("IW")) { @@ -2809,7 +2812,7 @@ public virtual void WaitForMerges() } // sanity check - Debugging.Assert(() => 0 == mergingSegments.Count); + if (Debugging.AssertsEnabled) Debugging.Assert(() => 0 == mergingSegments.Count); if (infoStream.IsEnabled("IW")) { @@ -2862,7 +2865,7 @@ internal virtual void PublishFrozenUpdates(FrozenBufferedUpdates packet) { lock (this) { - Debugging.Assert(() => packet != null && packet.Any()); + if (Debugging.AssertsEnabled) Debugging.Assert(() => packet != null && packet.Any()); lock (bufferedUpdatesStream) { bufferedUpdatesStream.Push(packet); @@ -3058,7 +3061,7 @@ public virtual void AddIndexes(params Directory[] dirs) JCG.HashSet copiedFiles = new JCG.HashSet(); foreach (SegmentCommitInfo info in sis.Segments) { - Debugging.Assert(() => !infos.Contains(info), () => "dup info dir=" + info.Info.Dir + " name=" + info.Info.Name); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !infos.Contains(info), () => "dup info dir=" + info.Info.Dir + " name=" + info.Info.Name); string newSegName = NewSegmentName(); @@ -3333,7 +3336,7 @@ private SegmentCommitInfo CopySegmentAsIs(SegmentCommitInfo info, string segName // because the DS might have been copied already, in which case we // just want to update the DS name of this SegmentInfo. string dsName = Lucene3xSegmentInfoFormat.GetDocStoreSegment(info.Info); - Debugging.Assert(() => dsName != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => dsName != null); // LUCENENET: Eliminated extra lookup by using TryGetValue instead of ContainsKey if (!dsNames.TryGetValue(dsName, out string newDsName)) { @@ -3445,8 +3448,11 @@ private SegmentCommitInfo CopySegmentAsIs(SegmentCommitInfo info, string segName continue; } - Debugging.Assert(() => !SlowFileExists(directory, newFileName), () => "file \"" + newFileName + "\" already exists; siFiles=" + string.Format(J2N.Text.StringFormatter.InvariantCulture, "{0}", siFiles)); - Debugging.Assert(() => !copiedFiles.Contains(file), () => "file \"" + file + "\" is being copied more than once"); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => !SlowFileExists(directory, newFileName), () => "file \"" + newFileName + "\" already exists; siFiles=" + string.Format(J2N.Text.StringFormatter.InvariantCulture, "{0}", siFiles)); + Debugging.Assert(() => !copiedFiles.Contains(file), () => "file \"" + file + "\" is being copied more than once"); + } copiedFiles.Add(file); info.Info.Dir.Copy(directory, file, newFileName, context); } @@ -4012,7 +4018,7 @@ private void SkipDeletedDoc(DocValuesFieldUpdates.Iterator[] updatesIters, int d // when entering the method, all iterators must already be beyond the // deleted document, or right on it, in which case we advance them over // and they must be beyond it now. - Debugging.Assert(() => iter.Doc > deletedDoc, () => "updateDoc=" + iter.Doc + " deletedDoc=" + deletedDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => iter.Doc > deletedDoc, () => "updateDoc=" + iter.Doc + " deletedDoc=" + deletedDoc); } } @@ -4032,7 +4038,7 @@ internal void Init(ReaderPool readerPool, MergePolicy.OneMerge merge, MergeState { mergedDeletesAndUpdates = readerPool.Get(merge.info, true); docMap = merge.GetDocMap(mergeState); - Debugging.Assert(() => docMap.IsConsistent(merge.info.Info.DocCount)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docMap.IsConsistent(merge.info.Info.DocCount)); } if (initWritableLiveDocs && !initializedWritableLiveDocs) { @@ -4064,7 +4070,7 @@ private void MaybeApplyMergedDVUpdates(MergePolicy.OneMerge merge, MergeState me } else { - Debugging.Assert(() => updatesIter.Doc > curDoc, () => "field=" + mergingFields[idx] + " updateDoc=" + updatesIter.Doc + " curDoc=" + curDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => updatesIter.Doc > curDoc, () => "field=" + mergingFields[idx] + " updateDoc=" + updatesIter.Doc + " curDoc=" + curDoc); } } } @@ -4083,7 +4089,7 @@ private ReadersAndUpdates CommitMergedDeletesAndUpdates(MergePolicy.OneMerge mer { lock (this) { - Debugging.Assert(() => TestPoint("startCommitMergeDeletes")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => TestPoint("startCommitMergeDeletes")); IList sourceSegments = merge.Segments; @@ -4109,7 +4115,7 @@ private ReadersAndUpdates CommitMergedDeletesAndUpdates(MergePolicy.OneMerge mer IBits prevLiveDocs = merge.readers[i].LiveDocs; ReadersAndUpdates rld = readerPool.Get(info, false); // We hold a ref so it should still be in the pool: - Debugging.Assert(() => rld != null, () => "seg=" + info.Info.Name); + if (Debugging.AssertsEnabled) Debugging.Assert(() => rld != null, () => "seg=" + info.Info.Name); IBits currentLiveDocs = rld.LiveDocs; IDictionary mergingFieldUpdates = rld.MergingFieldUpdates; string[] mergingFields; @@ -4148,9 +4154,12 @@ private ReadersAndUpdates CommitMergedDeletesAndUpdates(MergePolicy.OneMerge mer { // If we had deletions on starting the merge we must // still have deletions now: - Debugging.Assert(() => currentLiveDocs != null); - Debugging.Assert(() => prevLiveDocs.Length == docCount); - Debugging.Assert(() => currentLiveDocs.Length == docCount); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => currentLiveDocs != null); + Debugging.Assert(() => prevLiveDocs.Length == docCount); + Debugging.Assert(() => currentLiveDocs.Length == docCount); + } // There were deletes on this segment when the merge // started. The merge has collapsed away those @@ -4173,7 +4182,7 @@ private ReadersAndUpdates CommitMergedDeletesAndUpdates(MergePolicy.OneMerge mer { if (!prevLiveDocs.Get(j)) { - Debugging.Assert(() => !currentLiveDocs.Get(j)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !currentLiveDocs.Get(j)); } else { @@ -4223,7 +4232,7 @@ private ReadersAndUpdates CommitMergedDeletesAndUpdates(MergePolicy.OneMerge mer } else if (currentLiveDocs != null) { - Debugging.Assert(() => currentLiveDocs.Length == docCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentLiveDocs.Length == docCount); // this segment had no deletes before but now it // does: for (int j = 0; j < docCount; j++) @@ -4264,7 +4273,7 @@ private ReadersAndUpdates CommitMergedDeletesAndUpdates(MergePolicy.OneMerge mer } } - Debugging.Assert(() => docUpto == merge.info.Info.DocCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docUpto == merge.info.Info.DocCount); if (mergedDVUpdates.Any()) { @@ -4319,7 +4328,7 @@ private bool CommitMerge(MergePolicy.OneMerge merge, MergeState mergeState) { lock (this) { - Debugging.Assert(() => TestPoint("startCommitMerge")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => TestPoint("startCommitMerge")); if (hitOOM) { @@ -4331,7 +4340,7 @@ private bool CommitMerge(MergePolicy.OneMerge merge, MergeState mergeState) infoStream.Message("IW", "commitMerge: " + SegString(merge.Segments) + " index=" + SegString()); } - Debugging.Assert(() => merge.registerDone); + if (Debugging.AssertsEnabled) Debugging.Assert(() => merge.registerDone); // If merge was explicitly aborted, or, if rollback() or // rollbackTransaction() had been called since our merge @@ -4367,7 +4376,7 @@ private bool CommitMerge(MergePolicy.OneMerge merge, MergeState mergeState) // started), then we will switch to the compound // format as well: - Debugging.Assert(() => !segmentInfos.Contains(merge.info)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !segmentInfos.Contains(merge.info)); bool allDeleted = merge.Segments.Count == 0 || merge.info.Info.DocCount == 0 || (mergedUpdates != null && mergedUpdates.PendingDeleteCount == merge.info.Info.DocCount); @@ -4383,9 +4392,9 @@ private bool CommitMerge(MergePolicy.OneMerge merge, MergeState mergeState) // If we merged no segments then we better be dropping // the new segment: - Debugging.Assert(() => merge.Segments.Count > 0 || dropSegment); + if (Debugging.AssertsEnabled) Debugging.Assert(() => merge.Segments.Count > 0 || dropSegment); - Debugging.Assert(() => merge.info.Info.DocCount != 0 || keepFullyDeletedSegments || dropSegment); + if (Debugging.AssertsEnabled) Debugging.Assert(() => merge.info.Info.DocCount != 0 || keepFullyDeletedSegments || dropSegment); if (mergedUpdates != null) { @@ -4420,7 +4429,7 @@ private bool CommitMerge(MergePolicy.OneMerge merge, MergeState mergeState) if (dropSegment) { - Debugging.Assert(() => !segmentInfos.Contains(merge.info)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !segmentInfos.Contains(merge.info)); readerPool.Drop(merge.info); deleter.DeleteNewFiles(merge.info.GetFiles()); } @@ -4609,7 +4618,7 @@ internal bool RegisterMerge(MergePolicy.OneMerge merge) { return true; } - Debugging.Assert(() => merge.Segments.Count > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => merge.Segments.Count > 0); if (stopMerges) { @@ -4686,14 +4695,17 @@ internal bool RegisterMerge(MergePolicy.OneMerge merge) mergingSegments.Add(info); } - Debugging.Assert(() => merge.EstimatedMergeBytes == 0); - Debugging.Assert(() => merge.totalMergeBytes == 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => merge.EstimatedMergeBytes == 0); + Debugging.Assert(() => merge.totalMergeBytes == 0); + } foreach (SegmentCommitInfo info in merge.Segments) { if (info.Info.DocCount > 0) { int delCount = NumDeletedDocs(info); - Debugging.Assert(() => delCount <= info.Info.DocCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => delCount <= info.Info.DocCount); double delRatio = ((double)delCount) / info.Info.DocCount; merge.EstimatedMergeBytes += (long)(info.GetSizeInBytes() * (1.0 - delRatio)); merge.totalMergeBytes += info.GetSizeInBytes(); @@ -4739,10 +4751,13 @@ private void MergeInitImpl(MergePolicy.OneMerge merge) // LUCENENET specific: re { lock (this) { - Debugging.Assert(() => TestPoint("startMergeInit")); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => TestPoint("startMergeInit")); - Debugging.Assert(() => merge.registerDone); - Debugging.Assert(() => merge.MaxNumSegments == -1 || merge.MaxNumSegments > 0); + Debugging.Assert(() => merge.registerDone); + Debugging.Assert(() => merge.MaxNumSegments == -1 || merge.MaxNumSegments > 0); + } if (hitOOM) { @@ -4884,7 +4899,7 @@ private void CloseMergeReaders(MergePolicy.OneMerge merge, bool suppressExceptio { ReadersAndUpdates rld = readerPool.Get(sr.SegmentInfo, false); // We still hold a ref so it should not have been removed: - Debugging.Assert(() => rld != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => rld != null); if (drop) { rld.DropChanges(); @@ -4971,8 +4986,11 @@ private int MergeMiddle(MergePolicy.OneMerge merge) liveDocs = rld.GetReadOnlyLiveDocs(); delCount = rld.PendingDeleteCount + info.DelCount; - Debugging.Assert(() => reader != null); - Debugging.Assert(rld.VerifyDocCounts); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => reader != null); + Debugging.Assert(rld.VerifyDocCounts); + } if (infoStream.IsEnabled("IW")) { @@ -4998,7 +5016,7 @@ private int MergeMiddle(MergePolicy.OneMerge merge) if (reader.NumDeletedDocs != delCount) { // fix the reader's live docs and del count - Debugging.Assert(() => delCount > reader.NumDeletedDocs); // beware of zombies + if (Debugging.AssertsEnabled) Debugging.Assert(() => delCount > reader.NumDeletedDocs); // beware of zombies SegmentReader newReader = new SegmentReader(info, reader, liveDocs, info.Info.DocCount - delCount); bool released = false; @@ -5019,7 +5037,7 @@ private int MergeMiddle(MergePolicy.OneMerge merge) } merge.readers.Add(reader); - Debugging.Assert(() => delCount <= info.Info.DocCount, () => "delCount=" + delCount + " info.docCount=" + info.Info.DocCount + " rld.pendingDeleteCount=" + rld.PendingDeleteCount + " info.getDelCount()=" + info.DelCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => delCount <= info.Info.DocCount, () => "delCount=" + delCount + " info.docCount=" + info.Info.DocCount + " rld.pendingDeleteCount=" + rld.PendingDeleteCount + " info.getDelCount()=" + info.DelCount); segUpto++; } @@ -5057,7 +5075,7 @@ private int MergeMiddle(MergePolicy.OneMerge merge) } } } - Debugging.Assert(() => mergeState.SegmentInfo == merge.info.Info); + if (Debugging.AssertsEnabled) Debugging.Assert(() => mergeState.SegmentInfo == merge.info.Info); merge.info.Info.SetFiles(new JCG.HashSet(dirWrapper.CreatedFiles)); // Record which codec was used to write the segment @@ -5243,7 +5261,7 @@ internal virtual void AddMergeException(MergePolicy.OneMerge merge) { lock (this) { - Debugging.Assert(() => merge.Exception != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => merge.Exception != null); if (!mergeExceptions.Contains(merge) && mergeGen == merge.mergeGen) { mergeExceptions.Add(merge); @@ -5361,13 +5379,13 @@ private bool FilesExist(SegmentInfos toSync) ICollection files = toSync.GetFiles(directory, false); foreach (string fileName in files) { - Debugging.Assert(() => SlowFileExists(directory, fileName), () => "file " + fileName + " does not exist; files=" + Arrays.ToString(directory.ListAll())); + if (Debugging.AssertsEnabled) Debugging.Assert(() => SlowFileExists(directory, fileName), () => "file " + fileName + " does not exist; files=" + Arrays.ToString(directory.ListAll())); // If this trips it means we are missing a call to // .checkpoint somewhere, because by the time we // are called, deleter should know about every // file referenced by the current head // segmentInfos: - Debugging.Assert(() => deleter.Exists(fileName), () => "IndexFileDeleter doesn't know about file " + fileName); + if (Debugging.AssertsEnabled) Debugging.Assert(() => deleter.Exists(fileName), () => "IndexFileDeleter doesn't know about file " + fileName); } return true; } @@ -5407,8 +5425,11 @@ internal virtual SegmentInfos ToLiveInfos(SegmentInfos sis) /// private void StartCommit(SegmentInfos toSync) { - Debugging.Assert(() => TestPoint("startStartCommit")); - Debugging.Assert(() => pendingCommit == null); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => TestPoint("startStartCommit")); + Debugging.Assert(() => pendingCommit == null); + } if (hitOOM) { @@ -5424,7 +5445,7 @@ private void StartCommit(SegmentInfos toSync) lock (this) { - Debugging.Assert(() => lastCommitChangeCount <= changeCount, () => "lastCommitChangeCount=" + lastCommitChangeCount + " changeCount=" + changeCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => lastCommitChangeCount <= changeCount, () => "lastCommitChangeCount=" + lastCommitChangeCount + " changeCount=" + changeCount); if (pendingCommitChangeCount == lastCommitChangeCount) { @@ -5442,22 +5463,22 @@ private void StartCommit(SegmentInfos toSync) infoStream.Message("IW", "startCommit index=" + SegString(ToLiveInfos(toSync).Segments) + " changeCount=" + changeCount); } - Debugging.Assert(() => FilesExist(toSync)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => FilesExist(toSync)); } - Debugging.Assert(() => TestPoint("midStartCommit")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => TestPoint("midStartCommit")); bool pendingCommitSet = false; try { - Debugging.Assert(() => TestPoint("midStartCommit2")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => TestPoint("midStartCommit2")); lock (this) { - Debugging.Assert(() => pendingCommit == null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pendingCommit == null); - Debugging.Assert(() => segmentInfos.Generation == toSync.Generation); + if (Debugging.AssertsEnabled) Debugging.Assert(() => segmentInfos.Generation == toSync.Generation); // Exception here means nothing is prepared // (this method unwinds everything it did on @@ -5494,7 +5515,7 @@ private void StartCommit(SegmentInfos toSync) infoStream.Message("IW", "done all syncs: " + string.Format(J2N.Text.StringFormatter.InvariantCulture, "{0}", filesToSync)); } - Debugging.Assert(() => TestPoint("midStartCommitSuccess")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => TestPoint("midStartCommitSuccess")); } finally { @@ -5524,7 +5545,7 @@ private void StartCommit(SegmentInfos toSync) { HandleOOM(oom, "startCommit"); } - Debugging.Assert(() => TestPoint("finishStartCommit")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => TestPoint("finishStartCommit")); } /// @@ -5694,7 +5715,7 @@ internal static ICollection CreateCompoundFile(InfoStream infoStream, Di { infoStream.Message("IW", "create compound file " + fileName); } - Debugging.Assert(() => Lucene3xSegmentInfoFormat.GetDocStoreOffset(info) == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Lucene3xSegmentInfoFormat.GetDocStoreOffset(info) == -1); // Now merge all added files ICollection files = info.GetFiles(); CompoundFileDirectory cfsDir = new CompoundFileDirectory(directory, fileName, context, true); diff --git a/src/Lucene.Net/Index/LogMergePolicy.cs b/src/Lucene.Net/Index/LogMergePolicy.cs index 74e4f30d98..251980f288 100644 --- a/src/Lucene.Net/Index/LogMergePolicy.cs +++ b/src/Lucene.Net/Index/LogMergePolicy.cs @@ -192,7 +192,7 @@ protected virtual long SizeDocs(SegmentCommitInfo info) if (m_calibrateSizeByDeletes) { int delCount = m_writer.Get().NumDeletedDocs(info); - Debugging.Assert(() => delCount <= info.Info.DocCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => delCount <= info.Info.DocCount); return (info.Info.DocCount - (long)delCount); } else @@ -378,7 +378,7 @@ private MergeSpecification FindForcedMergesMaxNumSegments(SegmentInfos infos, in /// public override MergeSpecification FindForcedMerges(SegmentInfos infos, int maxNumSegments, IDictionary segmentsToMerge) { - Debugging.Assert(() => maxNumSegments > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => maxNumSegments > 0); if (IsVerbose) { Message("findForcedMerges: maxNumSegs=" + maxNumSegments + " segsToMerge=" + @@ -469,7 +469,7 @@ public override MergeSpecification FindForcedDeletesMerges(SegmentInfos segmentI var spec = new MergeSpecification(); int firstSegmentWithDeletions = -1; IndexWriter w = m_writer.Get(); - Debugging.Assert(() => w != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => w != null); for (int i = 0; i < numSegments; i++) { SegmentCommitInfo info = segmentInfos.Info(i); @@ -692,7 +692,7 @@ public override MergeSpecification FindMerges(MergeTrigger mergeTrigger, Segment for (int i = start; i < end; i++) { mergeInfos.Add(levels[i].info); - Debugging.Assert(() => infos.Contains(levels[i].info)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => infos.Contains(levels[i].info)); } if (IsVerbose) { diff --git a/src/Lucene.Net/Index/MergePolicy.cs b/src/Lucene.Net/Index/MergePolicy.cs index 877553ac66..c31f8551ff 100644 --- a/src/Lucene.Net/Index/MergePolicy.cs +++ b/src/Lucene.Net/Index/MergePolicy.cs @@ -93,12 +93,12 @@ internal virtual bool IsConsistent(int maxDoc) int target = Map(i); if (target < 0 || target >= maxDoc) { - Debugging.Assert(() => false, () => "out of range: " + target + " not in [0-" + maxDoc + "["); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "out of range: " + target + " not in [0-" + maxDoc + "["); return false; } else if (targets.Get(target)) { - Debugging.Assert(() => false, () => target + " is already taken (" + i + ")"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => target + " is already taken (" + i + ")"); return false; } } @@ -721,7 +721,7 @@ protected virtual long Size(SegmentCommitInfo info) long byteSize = info.GetSizeInBytes(); int delCount = m_writer.Get().NumDeletedDocs(info); double delRatio = (info.Info.DocCount <= 0 ? 0.0f : ((float)delCount / (float)info.Info.DocCount)); - Debugging.Assert(() => delRatio <= 1.0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => delRatio <= 1.0); return (info.Info.DocCount <= 0 ? byteSize : (long)(byteSize * (1.0 - delRatio))); } @@ -733,7 +733,7 @@ protected virtual long Size(SegmentCommitInfo info) protected bool IsMerged(SegmentInfos infos, SegmentCommitInfo info) { IndexWriter w = m_writer.Get(); - Debugging.Assert(() => w != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => w != null); bool hasDeletions = w.NumDeletedDocs(info) > 0; return !hasDeletions #pragma warning disable 612, 618 diff --git a/src/Lucene.Net/Index/MergeState.cs b/src/Lucene.Net/Index/MergeState.cs index 8b7da8937d..5a7af1bce7 100644 --- a/src/Lucene.Net/Index/MergeState.cs +++ b/src/Lucene.Net/Index/MergeState.cs @@ -82,7 +82,7 @@ public static DocMap Build(AtomicReader reader) internal static DocMap Build(int maxDoc, IBits liveDocs) { - Debugging.Assert(() => liveDocs != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => liveDocs != null); MonotonicAppendingInt64Buffer docMap = new MonotonicAppendingInt64Buffer(); int del = 0; for (int i = 0; i < maxDoc; ++i) @@ -95,7 +95,7 @@ internal static DocMap Build(int maxDoc, IBits liveDocs) } docMap.Freeze(); int numDeletedDocs = del; - Debugging.Assert(() => docMap.Count == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docMap.Count == maxDoc); return new DocMapAnonymousInnerClassHelper(maxDoc, liveDocs, docMap, numDeletedDocs); } diff --git a/src/Lucene.Net/Index/MultiBits.cs b/src/Lucene.Net/Index/MultiBits.cs index 6873dfed6a..27d11d315e 100644 --- a/src/Lucene.Net/Index/MultiBits.cs +++ b/src/Lucene.Net/Index/MultiBits.cs @@ -41,7 +41,7 @@ internal sealed class MultiBits : IBits public MultiBits(IBits[] subs, int[] starts, bool defaultValue) { - Debugging.Assert(() => starts.Length == 1 + subs.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => starts.Length == 1 + subs.Length); this.subs = subs; this.starts = starts; this.sefaultValue = defaultValue; @@ -50,14 +50,14 @@ public MultiBits(IBits[] subs, int[] starts, bool defaultValue) private bool CheckLength(int reader, int doc) { int length = starts[1 + reader] - starts[reader]; - Debugging.Assert(() => doc - starts[reader] < length, () => "doc=" + doc + " reader=" + reader + " starts[reader]=" + starts[reader] + " length=" + length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => doc - starts[reader] < length, () => "doc=" + doc + " reader=" + reader + " starts[reader]=" + starts[reader] + " length=" + length); return true; } public bool Get(int doc) { int reader = ReaderUtil.SubIndex(doc, starts); - Debugging.Assert(() => reader != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => reader != -1); IBits bits = subs[reader]; if (bits == null) { @@ -65,7 +65,7 @@ public bool Get(int doc) } else { - Debugging.Assert(() => CheckLength(reader, doc)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => CheckLength(reader, doc)); return bits.Get(doc - starts[reader]); } } @@ -114,8 +114,11 @@ public sealed class SubResult public SubResult GetMatchingSub(ReaderSlice slice) { int reader = ReaderUtil.SubIndex(slice.Start, starts); - Debugging.Assert(() => reader != -1); - Debugging.Assert(() => reader < subs.Length, () => "slice=" + slice + " starts[-1]=" + starts[starts.Length - 1]); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => reader != -1); + Debugging.Assert(() => reader < subs.Length, () => "slice=" + slice + " starts[-1]=" + starts[starts.Length - 1]); + } SubResult subResult = new SubResult(); if (starts[reader] == slice.Start && starts[1 + reader] == slice.Start + slice.Length) { diff --git a/src/Lucene.Net/Index/MultiDocValues.cs b/src/Lucene.Net/Index/MultiDocValues.cs index 881b003a4c..adb0f70aea 100644 --- a/src/Lucene.Net/Index/MultiDocValues.cs +++ b/src/Lucene.Net/Index/MultiDocValues.cs @@ -99,7 +99,7 @@ public static NumericDocValues GetNormValues(IndexReader r, string field) } starts[size] = r.MaxDoc; - Debugging.Assert(() => anyReal); + if (Debugging.AssertsEnabled) Debugging.Assert(() => anyReal); return new NumericDocValuesAnonymousInnerClassHelper(values, starts); } @@ -591,8 +591,11 @@ public class MultiSortedDocValues : SortedDocValues /// Creates a new over internal MultiSortedDocValues(SortedDocValues[] values, int[] docStarts, OrdinalMap mapping) { - Debugging.Assert(() => values.Length == mapping.ordDeltas.Length); - Debugging.Assert(() => docStarts.Length == values.Length + 1); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => values.Length == mapping.ordDeltas.Length); + Debugging.Assert(() => docStarts.Length == values.Length + 1); + } this.values = values; this.docStarts = docStarts; this.mapping = mapping; @@ -650,8 +653,11 @@ public class MultiSortedSetDocValues : SortedSetDocValues /// Creates a new over internal MultiSortedSetDocValues(SortedSetDocValues[] values, int[] docStarts, OrdinalMap mapping) { - Debugging.Assert(() => values.Length == mapping.ordDeltas.Length); - Debugging.Assert(() => docStarts.Length == values.Length + 1); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => values.Length == mapping.ordDeltas.Length); + Debugging.Assert(() => docStarts.Length == values.Length + 1); + } this.values = values; this.docStarts = docStarts; this.mapping = mapping; diff --git a/src/Lucene.Net/Index/MultiDocsAndPositionsEnum.cs b/src/Lucene.Net/Index/MultiDocsAndPositionsEnum.cs index 73653436d9..f6c0546dbd 100644 --- a/src/Lucene.Net/Index/MultiDocsAndPositionsEnum.cs +++ b/src/Lucene.Net/Index/MultiDocsAndPositionsEnum.cs @@ -90,7 +90,7 @@ public override int Freq { get { - Debugging.Assert(() => current != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => current != null); return current.Freq; } } @@ -99,7 +99,7 @@ public override int Freq public override int Advance(int target) { - Debugging.Assert(() => target > doc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => target > doc); while (true) { if (current != null) diff --git a/src/Lucene.Net/Index/MultiDocsEnum.cs b/src/Lucene.Net/Index/MultiDocsEnum.cs index 500ae66928..a2e947de55 100644 --- a/src/Lucene.Net/Index/MultiDocsEnum.cs +++ b/src/Lucene.Net/Index/MultiDocsEnum.cs @@ -92,7 +92,7 @@ public bool CanReuse(MultiTermsEnum parent) public override int Advance(int target) { - Debugging.Assert(() => target > doc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => target > doc); while (true) { if (current != null) diff --git a/src/Lucene.Net/Index/MultiFields.cs b/src/Lucene.Net/Index/MultiFields.cs index 8e1471ff1e..af358b3c72 100644 --- a/src/Lucene.Net/Index/MultiFields.cs +++ b/src/Lucene.Net/Index/MultiFields.cs @@ -117,7 +117,7 @@ public static IBits GetLiveDocs(IndexReader reader) { IList leaves = reader.Leaves; int size = leaves.Count; - Debugging.Assert(() => size > 0, () => "A reader with deletions must have at least one leave"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => size > 0, () => "A reader with deletions must have at least one leave"); if (size == 1) { return leaves[0].AtomicReader.LiveDocs; @@ -175,8 +175,11 @@ public static DocsEnum GetTermDocsEnum(IndexReader r, IBits liveDocs, string fie /// public static DocsEnum GetTermDocsEnum(IndexReader r, IBits liveDocs, string field, BytesRef term, DocsFlags flags) { - Debugging.Assert(() => field != null); - Debugging.Assert(() => term != null); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => field != null); + Debugging.Assert(() => term != null); + } Terms terms = GetTerms(r, field); if (terms != null) { @@ -210,8 +213,11 @@ public static DocsAndPositionsEnum GetTermPositionsEnum(IndexReader r, IBits liv /// public static DocsAndPositionsEnum GetTermPositionsEnum(IndexReader r, IBits liveDocs, string field, BytesRef term, DocsAndPositionsFlags flags) { - Debugging.Assert(() => field != null); - Debugging.Assert(() => term != null); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => field != null); + Debugging.Assert(() => term != null); + } Terms terms = GetTerms(r, field); if (terms != null) { diff --git a/src/Lucene.Net/Index/MultiTerms.cs b/src/Lucene.Net/Index/MultiTerms.cs index 1c43e27f53..4cbf243e98 100644 --- a/src/Lucene.Net/Index/MultiTerms.cs +++ b/src/Lucene.Net/Index/MultiTerms.cs @@ -54,7 +54,7 @@ public MultiTerms(Terms[] subs, ReaderSlice[] subSlices) this.subSlices = subSlices; IComparer _termComp = null; - Debugging.Assert(() => subs.Length > 0, () => "inefficient: don't use MultiTerms over one sub"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => subs.Length > 0, () => "inefficient: don't use MultiTerms over one sub"); bool _hasFreqs = true; bool _hasOffsets = true; bool _hasPositions = true; diff --git a/src/Lucene.Net/Index/MultiTermsEnum.cs b/src/Lucene.Net/Index/MultiTermsEnum.cs index 7a8e6ff9a2..4ee98d0871 100644 --- a/src/Lucene.Net/Index/MultiTermsEnum.cs +++ b/src/Lucene.Net/Index/MultiTermsEnum.cs @@ -107,7 +107,7 @@ public MultiTermsEnum(ReaderSlice[] slices) /// public TermsEnum Reset(TermsEnumIndex[] termsEnumsIndex) { - Debugging.Assert(() => termsEnumsIndex.Length <= top.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termsEnumsIndex.Length <= top.Length); numSubs = 0; numTop = 0; termComp = null; @@ -115,7 +115,7 @@ public TermsEnum Reset(TermsEnumIndex[] termsEnumsIndex) for (int i = 0; i < termsEnumsIndex.Length; i++) { TermsEnumIndex termsEnumIndex = termsEnumsIndex[i]; - Debugging.Assert(() => termsEnumIndex != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termsEnumIndex != null); // init our term comp if (termComp == null) @@ -213,7 +213,7 @@ public override bool SeekExact(BytesRef term) { top[numTop++] = currentSubs[i]; current = currentSubs[i].Current = currentSubs[i].Terms.Term; - Debugging.Assert(() => term.Equals(currentSubs[i].Current)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => term.Equals(currentSubs[i].Current)); } } @@ -285,7 +285,7 @@ public override SeekStatus SeekCeil(BytesRef term) if (status == SeekStatus.NOT_FOUND) { currentSubs[i].Current = currentSubs[i].Terms.Term; - Debugging.Assert(() => currentSubs[i].Current != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentSubs[i].Current != null); queue.Add(currentSubs[i]); } else @@ -326,7 +326,7 @@ private void PullTop() { // extract all subs from the queue that have the same // top term - Debugging.Assert(() => numTop == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numTop == 0); while (true) { top[numTop++] = queue.Pop(); @@ -367,7 +367,7 @@ public override BytesRef Next() // most impls short-circuit if you SeekCeil to term // they are already on. SeekStatus status = SeekCeil(current); - Debugging.Assert(() => status == SeekStatus.FOUND); + if (Debugging.AssertsEnabled) Debugging.Assert(() => status == SeekStatus.FOUND); lastSeekExact = false; } lastSeek = null; @@ -484,7 +484,7 @@ public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) b = null; } - Debugging.Assert(() => entry.Index < docsEnum.subDocsEnum.Length, () => entry.Index + " vs " + docsEnum.subDocsEnum.Length + "; " + subs.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => entry.Index < docsEnum.subDocsEnum.Length, () => entry.Index + " vs " + docsEnum.subDocsEnum.Length + "; " + subs.Length); DocsEnum subDocsEnum = entry.Terms.Docs(b, docsEnum.subDocsEnum[entry.Index], flags); if (subDocsEnum != null) { @@ -496,7 +496,7 @@ public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) else { // should this be an error? - Debugging.Assert(() => false, () => "One of our subs cannot provide a docsenum"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "One of our subs cannot provide a docsenum"); } } @@ -576,7 +576,7 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPos b = null; } - Debugging.Assert(() => entry.Index < docsAndPositionsEnum.subDocsAndPositionsEnum.Length, () => entry.Index + " vs " + docsAndPositionsEnum.subDocsAndPositionsEnum.Length + "; " + subs.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => entry.Index < docsAndPositionsEnum.subDocsAndPositionsEnum.Length, () => entry.Index + " vs " + docsAndPositionsEnum.subDocsAndPositionsEnum.Length + "; " + subs.Length); DocsAndPositionsEnum subPostings = entry.Terms.DocsAndPositions(b, docsAndPositionsEnum.subDocsAndPositionsEnum[entry.Index], flags); if (subPostings != null) @@ -619,7 +619,7 @@ public TermsEnumWithSlice(int index, ReaderSlice subSlice) { this.SubSlice = subSlice; this.Index = index; - Debugging.Assert(() => subSlice.Length >= 0, () => "length=" + subSlice.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => subSlice.Length >= 0, () => "length=" + subSlice.Length); } public void Reset(TermsEnum terms, BytesRef term) diff --git a/src/Lucene.Net/Index/NormsConsumer.cs b/src/Lucene.Net/Index/NormsConsumer.cs index b1797993d5..5b376409a6 100644 --- a/src/Lucene.Net/Index/NormsConsumer.cs +++ b/src/Lucene.Net/Index/NormsConsumer.cs @@ -50,7 +50,7 @@ internal override void Flush(IDictionary if (state.FieldInfos.HasNorms) { NormsFormat normsFormat = state.SegmentInfo.Codec.NormsFormat; - Debugging.Assert(() => normsFormat != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => normsFormat != null); normsConsumer = normsFormat.NormsConsumer(state); foreach (FieldInfo fi in state.FieldInfos) @@ -63,11 +63,11 @@ internal override void Flush(IDictionary if (toWrite != null && !toWrite.IsEmpty) { toWrite.Flush(state, normsConsumer); - Debugging.Assert(() => fi.NormType == DocValuesType.NUMERIC); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fi.NormType == DocValuesType.NUMERIC); } else if (fi.IsIndexed) { - Debugging.Assert(() => fi.NormType == DocValuesType.NONE, () => "got " + fi.NormType + "; field=" + fi.Name); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fi.NormType == DocValuesType.NONE, () => "got " + fi.NormType + "; field=" + fi.Name); } } } diff --git a/src/Lucene.Net/Index/NumericDocValuesFieldUpdates.cs b/src/Lucene.Net/Index/NumericDocValuesFieldUpdates.cs index 4a4df0e6fa..26e717ab84 100644 --- a/src/Lucene.Net/Index/NumericDocValuesFieldUpdates.cs +++ b/src/Lucene.Net/Index/NumericDocValuesFieldUpdates.cs @@ -206,7 +206,7 @@ protected override int Compare(int i, int j) [MethodImpl(MethodImplOptions.NoInlining)] public override void Merge(DocValuesFieldUpdates other) { - Debugging.Assert(() => other is NumericDocValuesFieldUpdates); + if (Debugging.AssertsEnabled) Debugging.Assert(() => other is NumericDocValuesFieldUpdates); NumericDocValuesFieldUpdates otherUpdates = (NumericDocValuesFieldUpdates)other; if (size + otherUpdates.size > int.MaxValue) { diff --git a/src/Lucene.Net/Index/OrdTermState.cs b/src/Lucene.Net/Index/OrdTermState.cs index 5c0b735aa0..2966943dc5 100644 --- a/src/Lucene.Net/Index/OrdTermState.cs +++ b/src/Lucene.Net/Index/OrdTermState.cs @@ -40,7 +40,7 @@ public OrdTermState() public override void CopyFrom(TermState other) { - Debugging.Assert(() => other is OrdTermState, () => "can not copy from " + other.GetType().Name); + if (Debugging.AssertsEnabled) Debugging.Assert(() => other is OrdTermState, () => "can not copy from " + other.GetType().Name); this.Ord = ((OrdTermState)other).Ord; } diff --git a/src/Lucene.Net/Index/ParallelCompositeReader.cs b/src/Lucene.Net/Index/ParallelCompositeReader.cs index 93eb17aa77..74740374f2 100644 --- a/src/Lucene.Net/Index/ParallelCompositeReader.cs +++ b/src/Lucene.Net/Index/ParallelCompositeReader.cs @@ -145,7 +145,7 @@ private static IndexReader[] PrepareSubReaders(CompositeReader[] readers, Compos } else { - Debugging.Assert(() => firstSubReaders[i] is CompositeReader); + if (Debugging.AssertsEnabled) Debugging.Assert(() => firstSubReaders[i] is CompositeReader); CompositeReader[] compositeSubs = new CompositeReader[readers.Length]; for (int j = 0; j < readers.Length; j++) { diff --git a/src/Lucene.Net/Index/PrefixCodedTerms.cs b/src/Lucene.Net/Index/PrefixCodedTerms.cs index a0f03b9f79..2b01ab0994 100644 --- a/src/Lucene.Net/Index/PrefixCodedTerms.cs +++ b/src/Lucene.Net/Index/PrefixCodedTerms.cs @@ -94,7 +94,7 @@ public virtual bool MoveNext() { // LUCENENET specific - Since there is no way to check for a next element // without calling this method in .NET, the assert is redundant and ineffective. - //Debugging.Assert(() => input.GetFilePointer() < input.Length); // Has next + //if (Debugging.AssertsEnabled) Debugging.Assert(() => input.GetFilePointer() < input.Length); // Has next if (input.GetFilePointer() < input.Length) { try @@ -149,7 +149,7 @@ internal virtual void InitializeInstanceFields() /// add a term public virtual void Add(Term term) { - Debugging.Assert(() => lastTerm.Equals(new Term("")) || term.CompareTo(lastTerm) > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => lastTerm.Equals(new Term("")) || term.CompareTo(lastTerm) > 0); try { diff --git a/src/Lucene.Net/Index/ReadersAndUpdates.cs b/src/Lucene.Net/Index/ReadersAndUpdates.cs index 7f8b360bed..a56e2241ac 100644 --- a/src/Lucene.Net/Index/ReadersAndUpdates.cs +++ b/src/Lucene.Net/Index/ReadersAndUpdates.cs @@ -105,19 +105,19 @@ public ReadersAndUpdates(IndexWriter writer, SegmentCommitInfo info) public virtual void IncRef() { int rc = refCount.IncrementAndGet(); - Debugging.Assert(() => rc > 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => rc > 1); } public virtual void DecRef() { int rc = refCount.DecrementAndGet(); - Debugging.Assert(() => rc >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => rc >= 0); } public virtual int RefCount() { int rc = refCount; - Debugging.Assert(() => rc >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => rc >= 0); return rc; } @@ -154,7 +154,7 @@ public virtual bool VerifyDocCounts() count = Info.Info.DocCount; } - Debugging.Assert(() => Info.Info.DocCount - Info.DelCount - pendingDeleteCount == count, () => "info.docCount=" + Info.Info.DocCount + " info.DelCount=" + Info.DelCount + " pendingDeleteCount=" + pendingDeleteCount + " count=" + count); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Info.Info.DocCount - Info.DelCount - pendingDeleteCount == count, () => "info.docCount=" + Info.Info.DocCount + " info.DelCount=" + Info.DelCount + " pendingDeleteCount=" + pendingDeleteCount + " count=" + count); return true; } } @@ -221,7 +221,7 @@ public virtual void Release(SegmentReader sr) { lock (this) { - Debugging.Assert(() => Info == sr.SegmentInfo); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Info == sr.SegmentInfo); sr.DecRef(); } } @@ -230,10 +230,13 @@ public virtual bool Delete(int docID) { lock (this) { - Debugging.Assert(() => liveDocs != null); - Debugging.Assert(() => Monitor.IsEntered(writer)); - Debugging.Assert(() => docID >= 0 && docID < liveDocs.Length, () => "out of bounds: docid=" + docID + " liveDocsLength=" + liveDocs.Length + " seg=" + Info.Info.Name + " docCount=" + Info.Info.DocCount); - Debugging.Assert(() => !liveDocsShared); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => liveDocs != null); + Debugging.Assert(() => Monitor.IsEntered(writer)); + Debugging.Assert(() => docID >= 0 && docID < liveDocs.Length, () => "out of bounds: docid=" + docID + " liveDocsLength=" + liveDocs.Length + " seg=" + Info.Info.Name + " docCount=" + Info.Info.DocCount); + Debugging.Assert(() => !liveDocsShared); + } bool didDelete = liveDocs.Get(docID); if (didDelete) { @@ -299,7 +302,7 @@ public virtual SegmentReader GetReadOnlyClone(IOContext context) if (reader == null) { GetReader(context).DecRef(); - Debugging.Assert(() => reader != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => reader != null); } liveDocsShared = true; if (liveDocs != null) @@ -308,7 +311,7 @@ public virtual SegmentReader GetReadOnlyClone(IOContext context) } else { - Debugging.Assert(() => reader.LiveDocs == liveDocs); + if (Debugging.AssertsEnabled) Debugging.Assert(() => reader.LiveDocs == liveDocs); reader.IncRef(); return reader; } @@ -319,8 +322,11 @@ public virtual void InitWritableLiveDocs() { lock (this) { - Debugging.Assert(() => Monitor.IsEntered(writer)); - Debugging.Assert(() => Info.Info.DocCount > 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => Monitor.IsEntered(writer)); + Debugging.Assert(() => Info.Info.DocCount > 0); + } //System.out.println("initWritableLivedocs seg=" + info + " liveDocs=" + liveDocs + " shared=" + shared); if (liveDocsShared) { @@ -349,7 +355,7 @@ public virtual IBits LiveDocs { lock (this) { - Debugging.Assert(() => Monitor.IsEntered(writer)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Monitor.IsEntered(writer)); return liveDocs; } } @@ -360,7 +366,7 @@ public virtual IBits GetReadOnlyLiveDocs() lock (this) { //System.out.println("getROLiveDocs seg=" + info); - Debugging.Assert(() => Monitor.IsEntered(writer)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Monitor.IsEntered(writer)); liveDocsShared = true; //if (liveDocs != null) { //System.out.println(" liveCount=" + liveDocs.count()); @@ -394,7 +400,7 @@ public virtual bool WriteLiveDocs(Directory dir) { lock (this) { - Debugging.Assert(() => Monitor.IsEntered(writer)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Monitor.IsEntered(writer)); //System.out.println("rld.writeLiveDocs seg=" + info + " pendingDelCount=" + pendingDeleteCount + " numericUpdates=" + numericUpdates); if (pendingDeleteCount == 0) { @@ -402,7 +408,7 @@ public virtual bool WriteLiveDocs(Directory dir) } // We have new deletes - Debugging.Assert(() => liveDocs.Length == Info.Info.DocCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => liveDocs.Length == Info.Info.DocCount); // Do this so we can delete any created files on // exception; this saves all codecs from having to do @@ -459,10 +465,10 @@ public virtual void WriteFieldUpdates(Directory dir, DocValuesFieldUpdates.Conta { lock (this) { - Debugging.Assert(() => Monitor.IsEntered(writer)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Monitor.IsEntered(writer)); //System.out.println("rld.writeFieldUpdates: seg=" + info + " numericFieldUpdates=" + numericFieldUpdates); - Debugging.Assert(dvUpdates.Any); + if (Debugging.AssertsEnabled) Debugging.Assert(dvUpdates.Any); // Do this so we can delete any created files on // exception; this saves all codecs from having to do @@ -524,7 +530,7 @@ public virtual void WriteFieldUpdates(Directory dir, DocValuesFieldUpdates.Conta string field = e.Key; NumericDocValuesFieldUpdates fieldUpdates = e.Value; FieldInfo fieldInfo = fieldInfos.FieldInfo(field); - Debugging.Assert(() => fieldInfo != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldInfo != null); fieldInfo.DocValuesGen = nextFieldInfosGen; // write the numeric updates to a new gen'd docvalues file @@ -537,7 +543,7 @@ public virtual void WriteFieldUpdates(Directory dir, DocValuesFieldUpdates.Conta string field = e.Key; BinaryDocValuesFieldUpdates dvFieldUpdates = e.Value; FieldInfo fieldInfo = fieldInfos.FieldInfo(field); - Debugging.Assert(() => fieldInfo != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldInfo != null); // System.out.println("[" + Thread.currentThread().getName() + "] RAU.writeFieldUpdates: applying binary updates; seg=" + info + " f=" + dvFieldUpdates + ", updates=" + dvFieldUpdates); @@ -693,7 +699,7 @@ public virtual void WriteFieldUpdates(Directory dir, DocValuesFieldUpdates.Conta } else { // no update for this document - Debugging.Assert(() => curDoc < updateDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => curDoc < updateDoc); if (currentValues != null && DocsWithField.Get(curDoc)) { // only read the current value if the document had a value before @@ -726,7 +732,7 @@ private IEnumerable GetBytesRefEnumerable(SegmentReader reader, string } else { // no update for this document - Debugging.Assert(() => curDoc < updateDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => curDoc < updateDoc); if (currentValues != null && DocsWithField.Get(curDoc)) { // only read the current value if the document had a value before @@ -749,7 +755,7 @@ internal virtual SegmentReader GetReaderForMerge(IOContext context) { lock (this) { - Debugging.Assert(() => Monitor.IsEntered(writer)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Monitor.IsEntered(writer)); // must execute these two statements as atomic operation, otherwise we // could lose updates if e.g. another thread calls writeFieldUpdates in // between, or the updates are applied to the obtained reader, but then diff --git a/src/Lucene.Net/Index/SegmentCoreReaders.cs b/src/Lucene.Net/Index/SegmentCoreReaders.cs index 35fe24c5b3..32aa1cd4ee 100644 --- a/src/Lucene.Net/Index/SegmentCoreReaders.cs +++ b/src/Lucene.Net/Index/SegmentCoreReaders.cs @@ -106,7 +106,7 @@ internal SegmentCoreReaders(SegmentReader owner, Directory dir, SegmentCommitInf SegmentReadState segmentReadState = new SegmentReadState(cfsDir, si.Info, fieldInfos, context, termsIndexDivisor); // Ask codec for its Fields fields = format.FieldsProducer(segmentReadState); - Debugging.Assert(() => fields != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fields != null); // ask codec for its Norms: // TODO: since we don't write any norms file if there are no norms, // kinda jaky to assume the codec handles the case of no norms file at all gracefully?! @@ -114,7 +114,7 @@ internal SegmentCoreReaders(SegmentReader owner, Directory dir, SegmentCommitInf if (fieldInfos.HasNorms) { normsProducer = codec.NormsFormat.NormsProducer(segmentReadState); - Debugging.Assert(() => normsProducer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => normsProducer != null); } else { @@ -160,7 +160,7 @@ internal void IncRef() internal NumericDocValues GetNormValues(FieldInfo fi) { - Debugging.Assert(() => normsProducer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => normsProducer != null); IDictionary normFields = normsLocal.Value; diff --git a/src/Lucene.Net/Index/SegmentDocValues.cs b/src/Lucene.Net/Index/SegmentDocValues.cs index 305949b096..9d6e2743ed 100644 --- a/src/Lucene.Net/Index/SegmentDocValues.cs +++ b/src/Lucene.Net/Index/SegmentDocValues.cs @@ -85,7 +85,7 @@ internal DocValuesProducer GetDocValuesProducer(long? gen, SegmentCommitInfo si, if (!(genDVProducers.TryGetValue(gen, out dvp))) { dvp = NewDocValuesProducer(si, context, dir, dvFormat, gen, infos, termsIndexDivisor); - Debugging.Assert(() => dvp != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => dvp != null); genDVProducers[gen] = dvp; } else @@ -108,7 +108,7 @@ internal void DecRef(IList dvProducersGens) foreach (long? gen in dvProducersGens) { RefCount dvp = genDVProducers[gen]; - Debugging.Assert(() => dvp != null, () => "gen=" + gen); + if (Debugging.AssertsEnabled) Debugging.Assert(() => dvp != null, () => "gen=" + gen); try { dvp.DecRef(); diff --git a/src/Lucene.Net/Index/SegmentInfo.cs b/src/Lucene.Net/Index/SegmentInfo.cs index 6740156866..9321fdb2dd 100644 --- a/src/Lucene.Net/Index/SegmentInfo.cs +++ b/src/Lucene.Net/Index/SegmentInfo.cs @@ -102,7 +102,7 @@ public SegmentInfo(Directory dir, string version, string name, int docCount, boo /// public SegmentInfo(Directory dir, string version, string name, int docCount, bool isCompoundFile, Codec codec, IDictionary diagnostics, IDictionary attributes) { - Debugging.Assert(() => !(dir is TrackingDirectoryWrapper)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !(dir is TrackingDirectoryWrapper)); this.Dir = dir; this.version = version; this.Name = name; @@ -137,7 +137,7 @@ public Codec Codec get => codec; set { - Debugging.Assert(() => this.codec == null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => this.codec == null); if (value == null) { throw new ArgumentException("codec must be non-null"); diff --git a/src/Lucene.Net/Index/SegmentInfos.cs b/src/Lucene.Net/Index/SegmentInfos.cs index 14d0633f03..155e09beae 100644 --- a/src/Lucene.Net/Index/SegmentInfos.cs +++ b/src/Lucene.Net/Index/SegmentInfos.cs @@ -543,7 +543,7 @@ private void Write(Directory directory) segnOutput.WriteInt64(e.Key); segnOutput.WriteStringSet(e.Value); } - Debugging.Assert(() => si.Dir == directory); + if (Debugging.AssertsEnabled) Debugging.Assert(() => si.Dir == directory); // If this segment is pre-4.x, perform a one-time // "ugprade" to write the .si file for it: @@ -710,7 +710,7 @@ public object Clone() sis.segments = new List(Count); foreach (SegmentCommitInfo info in segments) { - Debugging.Assert(() => info.Info.Codec != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => info.Info.Codec != null); // dont directly access segments, use add method!!! sis.Add((SegmentCommitInfo)(info.Clone())); } @@ -1153,7 +1153,7 @@ public ICollection GetFiles(Directory dir, bool includeSegmentsFile) for (int i = 0; i < size; i++) { var info = Info(i); - Debugging.Assert(() => info.Info.Dir == dir); + if (Debugging.AssertsEnabled) Debugging.Assert(() => info.Info.Dir == dir); if (info.Info.Dir == dir) { files.UnionWith(info.GetFiles()); @@ -1341,7 +1341,7 @@ internal void ApplyMergeChanges(MergePolicy.OneMerge merge, bool dropSegment) int newSegIdx = 0; for (int segIdx = 0, cnt = segments.Count; segIdx < cnt; segIdx++) { - Debugging.Assert(() => segIdx >= newSegIdx); + if (Debugging.AssertsEnabled) Debugging.Assert(() => segIdx >= newSegIdx); SegmentCommitInfo info = segments[segIdx]; if (mergedAway.Contains(info)) { @@ -1378,7 +1378,7 @@ internal IList CreateBackupSegmentInfos() var list = new List(Count); foreach (var info in segments) { - Debugging.Assert(() => info.Info.Codec != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => info.Info.Codec != null); list.Add((SegmentCommitInfo)(info.Clone())); } return list; diff --git a/src/Lucene.Net/Index/SegmentMerger.cs b/src/Lucene.Net/Index/SegmentMerger.cs index d1828651a1..d6518fa7d9 100644 --- a/src/Lucene.Net/Index/SegmentMerger.cs +++ b/src/Lucene.Net/Index/SegmentMerger.cs @@ -110,7 +110,7 @@ internal MergeState Merge() long t1 = Time.NanoTime(); mergeState.InfoStream.Message("SM", ((t1 - t0) / 1000000) + " msec to merge stored fields [" + numMerged + " docs]"); } - Debugging.Assert(() => numMerged == mergeState.SegmentInfo.DocCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numMerged == mergeState.SegmentInfo.DocCount); SegmentWriteState segmentWriteState = new SegmentWriteState(mergeState.InfoStream, directory, mergeState.SegmentInfo, mergeState.FieldInfos, termIndexInterval, null, context); if (mergeState.InfoStream.IsEnabled("SM")) @@ -164,7 +164,7 @@ internal MergeState Merge() long t1 = Time.NanoTime(); mergeState.InfoStream.Message("SM", ((t1 - t0) / 1000000) + " msec to merge vectors [" + numMerged + " docs]"); } - Debugging.Assert(() => numMerged == mergeState.SegmentInfo.DocCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numMerged == mergeState.SegmentInfo.DocCount); } // write the merged infos diff --git a/src/Lucene.Net/Index/SegmentReader.cs b/src/Lucene.Net/Index/SegmentReader.cs index 9f393dfc78..32f7392e5a 100644 --- a/src/Lucene.Net/Index/SegmentReader.cs +++ b/src/Lucene.Net/Index/SegmentReader.cs @@ -100,7 +100,7 @@ public SegmentReader(SegmentCommitInfo si, int termInfosIndexDivisor, IOContext } else { - Debugging.Assert(() => si.DelCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => si.DelCount == 0); liveDocs = null; } numDocs = si.Info.DocCount - si.DelCount; @@ -464,7 +464,7 @@ public override NumericDocValues GetNumericDocValues(string field) { DocValuesProducer dvProducer; dvProducersByField.TryGetValue(field, out dvProducer); - Debugging.Assert(() => dvProducer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => dvProducer != null); dvs = dvProducer.GetNumeric(fi); dvFields[field] = dvs; } @@ -494,7 +494,7 @@ public override IBits GetDocsWithField(string field) { DocValuesProducer dvProducer; dvProducersByField.TryGetValue(field, out dvProducer); - Debugging.Assert(() => dvProducer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => dvProducer != null); dvs = dvProducer.GetDocsWithField(fi); dvFields[field] = dvs; } @@ -520,7 +520,7 @@ public override BinaryDocValues GetBinaryDocValues(string field) if (dvs == null) { dvProducersByField.TryGetValue(field, out DocValuesProducer dvProducer); - Debugging.Assert(() => dvProducer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => dvProducer != null); dvs = dvProducer.GetBinary(fi); dvFields[field] = dvs; } @@ -546,7 +546,7 @@ public override SortedDocValues GetSortedDocValues(string field) if (dvs == null) { dvProducersByField.TryGetValue(field, out DocValuesProducer dvProducer); - Debugging.Assert(() => dvProducer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => dvProducer != null); dvs = dvProducer.GetSorted(fi); dvFields[field] = dvs; } @@ -572,7 +572,7 @@ public override SortedSetDocValues GetSortedSetDocValues(string field) if (dvs == null) { dvProducersByField.TryGetValue(field, out DocValuesProducer dvProducer); - Debugging.Assert(() => dvProducer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => dvProducer != null); dvs = dvProducer.GetSortedSet(fi); dvFields[field] = dvs; } diff --git a/src/Lucene.Net/Index/SimpleMergedSegmentWarmer.cs b/src/Lucene.Net/Index/SimpleMergedSegmentWarmer.cs index aff4f93ea8..0534ac5142 100644 --- a/src/Lucene.Net/Index/SimpleMergedSegmentWarmer.cs +++ b/src/Lucene.Net/Index/SimpleMergedSegmentWarmer.cs @@ -80,7 +80,7 @@ public override void Warm(AtomicReader reader) break; default: - Debugging.Assert(() => false); // unknown dv type + if (Debugging.AssertsEnabled) Debugging.Assert(() => false); // unknown dv type break; } docValuesCount++; diff --git a/src/Lucene.Net/Index/SingletonSortedSetDocValues.cs b/src/Lucene.Net/Index/SingletonSortedSetDocValues.cs index de57702b43..44c2710c1a 100644 --- a/src/Lucene.Net/Index/SingletonSortedSetDocValues.cs +++ b/src/Lucene.Net/Index/SingletonSortedSetDocValues.cs @@ -39,7 +39,7 @@ internal sealed class SingletonSortedSetDocValues : SortedSetDocValues public SingletonSortedSetDocValues(SortedDocValues @in) { this.@in = @in; - Debugging.Assert(() => NO_MORE_ORDS == -1); // this allows our nextOrd() to work for missing values without a check + if (Debugging.AssertsEnabled) Debugging.Assert(() => NO_MORE_ORDS == -1); // this allows our nextOrd() to work for missing values without a check } /// diff --git a/src/Lucene.Net/Index/SlowCompositeReaderWrapper.cs b/src/Lucene.Net/Index/SlowCompositeReaderWrapper.cs index 428ef0b718..9fd3a524de 100644 --- a/src/Lucene.Net/Index/SlowCompositeReaderWrapper.cs +++ b/src/Lucene.Net/Index/SlowCompositeReaderWrapper.cs @@ -60,7 +60,7 @@ public static AtomicReader Wrap(IndexReader reader) } else { - Debugging.Assert(() => reader is AtomicReader); + if (Debugging.AssertsEnabled) Debugging.Assert(() => reader is AtomicReader); return (AtomicReader)reader; } } @@ -174,7 +174,7 @@ public override SortedSetDocValues GetSortedSetDocValues(string field) { return null; } - Debugging.Assert(() => map != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => map != null); int size = @in.Leaves.Count; var values = new SortedSetDocValues[size]; int[] starts = new int[size + 1]; diff --git a/src/Lucene.Net/Index/SnapshotDeletionPolicy.cs b/src/Lucene.Net/Index/SnapshotDeletionPolicy.cs index c1378a3d63..b017f0bba4 100644 --- a/src/Lucene.Net/Index/SnapshotDeletionPolicy.cs +++ b/src/Lucene.Net/Index/SnapshotDeletionPolicy.cs @@ -130,7 +130,7 @@ protected internal virtual void ReleaseGen(long gen) throw new ArgumentException("commit gen=" + gen + " is not currently snapshotted"); } int refCountInt = (int)refCount; - Debugging.Assert(() => refCountInt > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => refCountInt > 0); refCountInt--; if (refCountInt == 0) { diff --git a/src/Lucene.Net/Index/SortedDocValuesTermsEnum.cs b/src/Lucene.Net/Index/SortedDocValuesTermsEnum.cs index 6619ca6dd2..fa08b86b74 100644 --- a/src/Lucene.Net/Index/SortedDocValuesTermsEnum.cs +++ b/src/Lucene.Net/Index/SortedDocValuesTermsEnum.cs @@ -94,7 +94,7 @@ public override bool SeekExact(BytesRef text) public override void SeekExact(long ord) { - Debugging.Assert(() => ord >= 0 && ord < values.ValueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ord >= 0 && ord < values.ValueCount); currentOrd = (int)ord; values.LookupOrd(currentOrd, term); } @@ -132,7 +132,7 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPos public override void SeekExact(BytesRef term, TermState state) { - Debugging.Assert(() => state != null && state is OrdTermState); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != null && state is OrdTermState); this.SeekExact(((OrdTermState)state).Ord); } diff --git a/src/Lucene.Net/Index/SortedDocValuesWriter.cs b/src/Lucene.Net/Index/SortedDocValuesWriter.cs index 08c7985932..4ee8226353 100644 --- a/src/Lucene.Net/Index/SortedDocValuesWriter.cs +++ b/src/Lucene.Net/Index/SortedDocValuesWriter.cs @@ -114,7 +114,7 @@ public override void Flush(SegmentWriteState state, DocValuesConsumer dvConsumer { int maxDoc = state.SegmentInfo.DocCount; - Debugging.Assert(() => pending.Count == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pending.Count == maxDoc); int valueCount = hash.Count; int[] sortedValues = hash.Sort(BytesRef.UTF8SortedAsUnicodeComparer); @@ -148,7 +148,7 @@ private IEnumerable GetBytesRefEnumberable(int valueCount, int[] sorte private IEnumerable GetOrdsEnumberable(int maxDoc, int[] ordMap) { AppendingDeltaPackedInt64Buffer.Iterator iter = pending.GetIterator(); - Debugging.Assert(() => pending.Count == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pending.Count == maxDoc); for (int i = 0; i < maxDoc; ++i) { diff --git a/src/Lucene.Net/Index/SortedSetDocValuesTermsEnum.cs b/src/Lucene.Net/Index/SortedSetDocValuesTermsEnum.cs index fe2fbfbba3..1bf62a1d67 100644 --- a/src/Lucene.Net/Index/SortedSetDocValuesTermsEnum.cs +++ b/src/Lucene.Net/Index/SortedSetDocValuesTermsEnum.cs @@ -94,7 +94,7 @@ public override bool SeekExact(BytesRef text) public override void SeekExact(long ord) { - Debugging.Assert(() => ord >= 0 && ord < values.ValueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ord >= 0 && ord < values.ValueCount); currentOrd = (int)ord; values.LookupOrd(currentOrd, term); } @@ -132,7 +132,7 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPos public override void SeekExact(BytesRef term, TermState state) { - Debugging.Assert(() => state != null && state is OrdTermState); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != null && state is OrdTermState); this.SeekExact(((OrdTermState)state).Ord); } diff --git a/src/Lucene.Net/Index/SortedSetDocValuesWriter.cs b/src/Lucene.Net/Index/SortedSetDocValuesWriter.cs index 3a4460aa7f..c27c230e0d 100644 --- a/src/Lucene.Net/Index/SortedSetDocValuesWriter.cs +++ b/src/Lucene.Net/Index/SortedSetDocValuesWriter.cs @@ -164,7 +164,7 @@ public override void Flush(SegmentWriteState state, DocValuesConsumer dvConsumer { int maxDoc = state.SegmentInfo.DocCount; int maxCountPerDoc = maxCount; - Debugging.Assert(() => pendingCounts.Count == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pendingCounts.Count == maxDoc); int valueCount = hash.Count; int[] sortedValues = hash.Sort(BytesRef.UTF8SortedAsUnicodeComparer); @@ -203,7 +203,7 @@ private IEnumerable GetBytesRefEnumberable(int valueCount, int[] sorte { AppendingDeltaPackedInt64Buffer.Iterator iter = pendingCounts.GetIterator(); - Debugging.Assert(() => pendingCounts.Count == maxDoc, () => "MaxDoc: " + maxDoc + ", pending.Count: " + pending.Count); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pendingCounts.Count == maxDoc, () => "MaxDoc: " + maxDoc + ", pending.Count: " + pending.Count); for (int i = 0; i < maxDoc; ++i) { diff --git a/src/Lucene.Net/Index/StandardDirectoryReader.cs b/src/Lucene.Net/Index/StandardDirectoryReader.cs index 86776c7a8f..a86a6fb301 100644 --- a/src/Lucene.Net/Index/StandardDirectoryReader.cs +++ b/src/Lucene.Net/Index/StandardDirectoryReader.cs @@ -117,7 +117,7 @@ internal static DirectoryReader Open(IndexWriter writer, SegmentInfos infos, boo // actual instance of SegmentInfoPerCommit in // IndexWriter's segmentInfos: SegmentCommitInfo info = infos.Info(i); - Debugging.Assert(() => info.Info.Dir == dir); + if (Debugging.AssertsEnabled) Debugging.Assert(() => info.Info.Dir == dir); ReadersAndUpdates rld = writer.readerPool.Get(info, true); try { @@ -235,8 +235,11 @@ private static DirectoryReader Open(Directory directory, SegmentInfos infos, ILi // there are changes to the reader, either liveDocs or DV updates readerShared[i] = false; // Steal the ref returned by SegmentReader ctor: - Debugging.Assert(() => infos.Info(i).Info.Dir == newReaders[i].SegmentInfo.Info.Dir); - Debugging.Assert(() => infos.Info(i).HasDeletions || infos.Info(i).HasFieldUpdates); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => infos.Info(i).Info.Dir == newReaders[i].SegmentInfo.Info.Dir); + Debugging.Assert(() => infos.Info(i).HasDeletions || infos.Info(i).HasFieldUpdates); + } if (newReaders[i].SegmentInfo.DelGen == infos.Info(i).DelGen) { // only DV updates diff --git a/src/Lucene.Net/Index/StoredFieldsProcessor.cs b/src/Lucene.Net/Index/StoredFieldsProcessor.cs index fb75321d3b..52eb040cbf 100644 --- a/src/Lucene.Net/Index/StoredFieldsProcessor.cs +++ b/src/Lucene.Net/Index/StoredFieldsProcessor.cs @@ -139,7 +139,7 @@ internal void Fill(int docID) [MethodImpl(MethodImplOptions.NoInlining)] internal override void FinishDocument() { - Debugging.Assert(() => docWriter.TestPoint("StoredFieldsWriter.finishDocument start")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docWriter.TestPoint("StoredFieldsWriter.finishDocument start")); InitFieldsWriter(IOContext.DEFAULT); Fill(docState.docID); @@ -156,7 +156,7 @@ internal override void FinishDocument() } Reset(); - Debugging.Assert(() => docWriter.TestPoint("StoredFieldsWriter.finishDocument end")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docWriter.TestPoint("StoredFieldsWriter.finishDocument end")); } public override void AddField(int docID, IIndexableField field, FieldInfo fieldInfo) @@ -179,7 +179,7 @@ public override void AddField(int docID, IIndexableField field, FieldInfo fieldI fieldInfos[numStoredFields] = fieldInfo; numStoredFields++; - Debugging.Assert(() => docState.TestPoint("StoredFieldsWriterPerThread.processFields.writeField")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docState.TestPoint("StoredFieldsWriterPerThread.processFields.writeField")); } } } diff --git a/src/Lucene.Net/Index/TermContext.cs b/src/Lucene.Net/Index/TermContext.cs index 922b9ae6de..a275042ef1 100644 --- a/src/Lucene.Net/Index/TermContext.cs +++ b/src/Lucene.Net/Index/TermContext.cs @@ -57,7 +57,7 @@ public sealed class TermContext /// public TermContext(IndexReaderContext context) { - Debugging.Assert(() => context != null && context.IsTopLevel); + if (Debugging.AssertsEnabled) Debugging.Assert(() => context != null && context.IsTopLevel); TopReaderContext = context; docFreq = 0; int len; @@ -92,7 +92,7 @@ public TermContext(IndexReaderContext context, TermState state, int ord, int doc /// public static TermContext Build(IndexReaderContext context, Term term) { - Debugging.Assert(() => context != null && context.IsTopLevel); + if (Debugging.AssertsEnabled) Debugging.Assert(() => context != null && context.IsTopLevel); string field = term.Field; BytesRef bytes = term.Bytes; TermContext perReaderTermState = new TermContext(context); @@ -135,9 +135,12 @@ public void Clear() /// public void Register(TermState state, int ord, int docFreq, long totalTermFreq) { - Debugging.Assert(() => state != null, () => "state must not be null"); - Debugging.Assert(() => ord >= 0 && ord < states.Length); - Debugging.Assert(() => states[ord] == null, () => "state for ord: " + ord + " already registered"); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => state != null, () => "state must not be null"); + Debugging.Assert(() => ord >= 0 && ord < states.Length); + Debugging.Assert(() => states[ord] == null, () => "state for ord: " + ord + " already registered"); + } this.docFreq += docFreq; if (this.totalTermFreq >= 0 && totalTermFreq >= 0) { @@ -160,7 +163,7 @@ public void Register(TermState state, int ord, int docFreq, long totalTermFreq) /// for the reader was registered public TermState Get(int ord) { - Debugging.Assert(() => ord >= 0 && ord < states.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ord >= 0 && ord < states.Length); return states[ord]; } diff --git a/src/Lucene.Net/Index/TermVectorsConsumer.cs b/src/Lucene.Net/Index/TermVectorsConsumer.cs index 30fb1dab10..f46dbca0b1 100644 --- a/src/Lucene.Net/Index/TermVectorsConsumer.cs +++ b/src/Lucene.Net/Index/TermVectorsConsumer.cs @@ -61,12 +61,12 @@ public override void Flush(IDictionary fields if (writer != null) { int numDocs = state.SegmentInfo.DocCount; - Debugging.Assert(() => numDocs > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocs > 0); // At least one doc in this run had term vectors enabled try { Fill(numDocs); - Debugging.Assert(() => state.SegmentInfo != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state.SegmentInfo != null); writer.Finish(state.FieldInfos, numDocs); } finally @@ -114,7 +114,7 @@ private void InitTermVectorsWriter() [MethodImpl(MethodImplOptions.NoInlining)] internal override void FinishDocument(TermsHash termsHash) { - Debugging.Assert(() => docWriter.TestPoint("TermVectorsTermsWriter.finishDocument start")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docWriter.TestPoint("TermVectorsTermsWriter.finishDocument start")); if (!hasVectors) { @@ -133,13 +133,13 @@ internal override void FinishDocument(TermsHash termsHash) } writer.FinishDocument(); - Debugging.Assert(() => lastDocID == docState.docID, () => "lastDocID=" + lastDocID + " docState.docID=" + docState.docID); + if (Debugging.AssertsEnabled) Debugging.Assert(() => lastDocID == docState.docID, () => "lastDocID=" + lastDocID + " docState.docID=" + docState.docID); lastDocID++; termsHash.Reset(); Reset(); - Debugging.Assert(() => docWriter.TestPoint("TermVectorsTermsWriter.finishDocument end")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docWriter.TestPoint("TermVectorsTermsWriter.finishDocument end")); } [MethodImpl(MethodImplOptions.NoInlining)] @@ -184,7 +184,7 @@ internal void AddFieldToFlush(TermVectorsConsumerPerField fieldToFlush) internal override void StartDocument() { - Debugging.Assert(ClearLastVectorFieldName); + if (Debugging.AssertsEnabled) Debugging.Assert(ClearLastVectorFieldName); Reset(); } diff --git a/src/Lucene.Net/Index/TermVectorsConsumerPerField.cs b/src/Lucene.Net/Index/TermVectorsConsumerPerField.cs index 032bb04b4f..71ebdb0512 100644 --- a/src/Lucene.Net/Index/TermVectorsConsumerPerField.cs +++ b/src/Lucene.Net/Index/TermVectorsConsumerPerField.cs @@ -164,13 +164,13 @@ internal override void Finish() [MethodImpl(MethodImplOptions.NoInlining)] internal void FinishDocument() { - Debugging.Assert(() => docState.TestPoint("TermVectorsTermsWriterPerField.finish start")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docState.TestPoint("TermVectorsTermsWriterPerField.finish start")); int numPostings = termsHashPerField.bytesHash.Count; BytesRef flushTerm = termsWriter.flushTerm; - Debugging.Assert(() => numPostings >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numPostings >= 0); if (numPostings > maxNumPostings) { @@ -181,7 +181,7 @@ internal void FinishDocument() // of a given field in the doc. At this point we flush // our hash into the DocWriter. - Debugging.Assert(() => termsWriter.VectorFieldsInOrder(fieldInfo)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termsWriter.VectorFieldsInOrder(fieldInfo)); TermVectorsPostingsArray postings = (TermVectorsPostingsArray)termsHashPerField.postingsArray; TermVectorsWriter tv = termsWriter.writer; @@ -293,7 +293,7 @@ internal void WriteProx(TermVectorsPostingsArray postings, int termID) internal override void NewTerm(int termID) { - Debugging.Assert(() => docState.TestPoint("TermVectorsTermsWriterPerField.newTerm start")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docState.TestPoint("TermVectorsTermsWriterPerField.newTerm start")); TermVectorsPostingsArray postings = (TermVectorsPostingsArray)termsHashPerField.postingsArray; postings.freqs[termID] = 1; @@ -305,7 +305,7 @@ internal override void NewTerm(int termID) internal override void AddTerm(int termID) { - Debugging.Assert(() => docState.TestPoint("TermVectorsTermsWriterPerField.addTerm start")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docState.TestPoint("TermVectorsTermsWriterPerField.addTerm start")); TermVectorsPostingsArray postings = (TermVectorsPostingsArray)termsHashPerField.postingsArray; postings.freqs[termID]++; @@ -344,7 +344,7 @@ internal override ParallelPostingsArray NewInstance(int size) internal override void CopyTo(ParallelPostingsArray toArray, int numToCopy) { - Debugging.Assert(() => toArray is TermVectorsPostingsArray); + if (Debugging.AssertsEnabled) Debugging.Assert(() => toArray is TermVectorsPostingsArray); TermVectorsPostingsArray to = (TermVectorsPostingsArray)toArray; base.CopyTo(toArray, numToCopy); diff --git a/src/Lucene.Net/Index/TermsHashPerField.cs b/src/Lucene.Net/Index/TermsHashPerField.cs index 1e08470823..0f135ac74d 100644 --- a/src/Lucene.Net/Index/TermsHashPerField.cs +++ b/src/Lucene.Net/Index/TermsHashPerField.cs @@ -112,7 +112,7 @@ public override void Abort() public void InitReader(ByteSliceReader reader, int termID, int stream) { - Debugging.Assert(() => stream < streamCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => stream < streamCount); int intStart = postingsArray.intStarts[termID]; int[] ints = intPool.Buffers[intStart >> Int32BlockPool.INT32_BLOCK_SHIFT]; int upto = intStart & Int32BlockPool.INT32_BLOCK_MASK; @@ -291,7 +291,7 @@ internal void WriteByte(int stream, byte b) { int upto = intUptos[intUptoStart + stream]; var bytes = bytePool.Buffers[upto >> ByteBlockPool.BYTE_BLOCK_SHIFT]; - Debugging.Assert(() => bytes != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bytes != null); int offset = upto & ByteBlockPool.BYTE_BLOCK_MASK; if (bytes[offset] != 0) { @@ -319,7 +319,7 @@ public void WriteBytes(int stream, byte[] b, int offset, int len) /// internal void WriteVInt32(int stream, int i) { - Debugging.Assert(() => stream < streamCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => stream < streamCount); while ((i & ~0x7F) != 0) { WriteByte(stream, (sbyte)((i & 0x7f) | 0x80)); diff --git a/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs b/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs index 5b46c72597..4f07784fd0 100644 --- a/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs +++ b/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs @@ -45,7 +45,7 @@ internal class ThreadAffinityDocumentsWriterThreadPool : DocumentsWriterPerThrea public ThreadAffinityDocumentsWriterThreadPool(int maxNumPerThreads) : base(maxNumPerThreads) { - Debugging.Assert(() => MaxThreadStates >= 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => MaxThreadStates >= 1); } public override ThreadState GetAndLock(Thread requestingThread, DocumentsWriter documentsWriter) @@ -65,7 +65,7 @@ we should somehow prevent this. */ ThreadState newState = NewThreadState(); // state is already locked if non-null if (newState != null) { - Debugging.Assert(() => newState.IsHeldByCurrentThread); + if (Debugging.AssertsEnabled) Debugging.Assert(() => newState.IsHeldByCurrentThread); threadBindings[requestingThread] = newState; return newState; } @@ -79,7 +79,7 @@ we should somehow prevent this. */ minThreadState = MinContendedThreadState(); } } - Debugging.Assert(() => minThreadState != null, () => "ThreadState is null"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => minThreadState != null, () => "ThreadState is null"); minThreadState.@Lock(); return minThreadState; diff --git a/src/Lucene.Net/Search/CachingWrapperFilter.cs b/src/Lucene.Net/Search/CachingWrapperFilter.cs index 683c9f17e3..b48a8bf592 100644 --- a/src/Lucene.Net/Search/CachingWrapperFilter.cs +++ b/src/Lucene.Net/Search/CachingWrapperFilter.cs @@ -120,7 +120,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo { missCount++; docIdSet = DocIdSetToCache(_filter.GetDocIdSet(context, null), reader); - Debugging.Assert(() => docIdSet.IsCacheable); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docIdSet.IsCacheable); #if FEATURE_CONDITIONALWEAKTABLE_ADDORUPDATE _cache.AddOrUpdate(key, docIdSet); #else diff --git a/src/Lucene.Net/Search/CollectionStatistics.cs b/src/Lucene.Net/Search/CollectionStatistics.cs index 107aefedda..c2940d2abf 100644 --- a/src/Lucene.Net/Search/CollectionStatistics.cs +++ b/src/Lucene.Net/Search/CollectionStatistics.cs @@ -37,10 +37,13 @@ public class CollectionStatistics /// public CollectionStatistics(string field, long maxDoc, long docCount, long sumTotalTermFreq, long sumDocFreq) { - Debugging.Assert(() => maxDoc >= 0); - Debugging.Assert(() => docCount >= -1 && docCount <= maxDoc); // #docs with field must be <= #docs - Debugging.Assert(() => sumDocFreq == -1 || sumDocFreq >= docCount); // #postings must be >= #docs with field - Debugging.Assert(() => sumTotalTermFreq == -1 || sumTotalTermFreq >= sumDocFreq); // #positions must be >= #postings + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => maxDoc >= 0); + Debugging.Assert(() => docCount >= -1 && docCount <= maxDoc); // #docs with field must be <= #docs + Debugging.Assert(() => sumDocFreq == -1 || sumDocFreq >= docCount); // #postings must be >= #docs with field + Debugging.Assert(() => sumTotalTermFreq == -1 || sumTotalTermFreq >= sumDocFreq); // #positions must be >= #postings + } this.field = field; this.maxDoc = maxDoc; this.docCount = docCount; diff --git a/src/Lucene.Net/Search/ConstantScoreAutoRewrite.cs b/src/Lucene.Net/Search/ConstantScoreAutoRewrite.cs index 11b4ac6cbf..dfb0ab11d9 100644 --- a/src/Lucene.Net/Search/ConstantScoreAutoRewrite.cs +++ b/src/Lucene.Net/Search/ConstantScoreAutoRewrite.cs @@ -163,7 +163,7 @@ public override bool Collect(BytesRef bytes) } TermState termState = termsEnum.GetTermState(); - Debugging.Assert(() => termState != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termState != null); if (pos < 0) { pos = (-pos) - 1; @@ -235,7 +235,7 @@ public override int[] Init() { int[] ord = base.Init(); termState = new TermContext[ArrayUtil.Oversize(ord.Length, RamUsageEstimator.NUM_BYTES_OBJECT_REF)]; - Debugging.Assert(() => termState.Length >= ord.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termState.Length >= ord.Length); return ord; } @@ -248,7 +248,7 @@ public override int[] Grow() Array.Copy(termState, 0, tmpTermState, 0, termState.Length); termState = tmpTermState; } - Debugging.Assert(() => termState.Length >= ord.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termState.Length >= ord.Length); return ord; } diff --git a/src/Lucene.Net/Search/ConstantScoreQuery.cs b/src/Lucene.Net/Search/ConstantScoreQuery.cs index 1234c6d447..20dda8a0ec 100644 --- a/src/Lucene.Net/Search/ConstantScoreQuery.cs +++ b/src/Lucene.Net/Search/ConstantScoreQuery.cs @@ -84,7 +84,7 @@ public override Query Rewrite(IndexReader reader) } else { - Debugging.Assert(() => m_filter != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m_filter != null); // Fix outdated usage pattern from Lucene 2.x/early-3.x: // because ConstantScoreQuery only accepted filters, // QueryWrapperFilter was used to wrap queries. @@ -154,12 +154,12 @@ public override BulkScorer GetBulkScorer(AtomicReaderContext context, bool score //DocIdSetIterator disi; if (outerInstance.m_filter != null) { - Debugging.Assert(() => outerInstance.m_query == null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.m_query == null); return base.GetBulkScorer(context, scoreDocsInOrder, acceptDocs); } else { - Debugging.Assert(() => outerInstance.m_query != null && innerWeight != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.m_query != null && innerWeight != null); BulkScorer bulkScorer = innerWeight.GetBulkScorer(context, scoreDocsInOrder, acceptDocs); if (bulkScorer == null) { @@ -174,7 +174,7 @@ public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) DocIdSetIterator disi; if (outerInstance.m_filter != null) { - Debugging.Assert(() => outerInstance.m_query == null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.m_query == null); DocIdSet dis = outerInstance.m_filter.GetDocIdSet(context, acceptDocs); if (dis == null) { @@ -184,7 +184,7 @@ public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) } else { - Debugging.Assert(() => outerInstance.m_query != null && innerWeight != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.m_query != null && innerWeight != null); disi = innerWeight.GetScorer(context, acceptDocs); } @@ -310,7 +310,7 @@ public override int NextDoc() public override float GetScore() { - Debugging.Assert(() => docIdSetIterator.DocID != NO_MORE_DOCS); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docIdSetIterator.DocID != NO_MORE_DOCS); return theScore; } diff --git a/src/Lucene.Net/Search/DisjunctionScorer.cs b/src/Lucene.Net/Search/DisjunctionScorer.cs index 2c6f3ab02d..cafb16207e 100644 --- a/src/Lucene.Net/Search/DisjunctionScorer.cs +++ b/src/Lucene.Net/Search/DisjunctionScorer.cs @@ -145,7 +145,7 @@ public override long GetCost() public override int NextDoc() { - Debugging.Assert(() => m_doc != NO_MORE_DOCS); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m_doc != NO_MORE_DOCS); while (true) { if (m_subScorers[0].NextDoc() != NO_MORE_DOCS) @@ -170,7 +170,7 @@ public override int NextDoc() public override int Advance(int target) { - Debugging.Assert(() => m_doc != NO_MORE_DOCS); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m_doc != NO_MORE_DOCS); while (true) { if (m_subScorers[0].Advance(target) != NO_MORE_DOCS) diff --git a/src/Lucene.Net/Search/DocIdSetIterator.cs b/src/Lucene.Net/Search/DocIdSetIterator.cs index 7675b5e0b5..c7e26b5462 100644 --- a/src/Lucene.Net/Search/DocIdSetIterator.cs +++ b/src/Lucene.Net/Search/DocIdSetIterator.cs @@ -45,8 +45,11 @@ public DocIdSetIteratorAnonymousInnerClassHelper() public override int Advance(int target) { - Debugging.Assert(() => !exhausted); - Debugging.Assert(() => target >= 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => !exhausted); + Debugging.Assert(() => target >= 0); + } exhausted = true; return NO_MORE_DOCS; } @@ -55,7 +58,7 @@ public override int Advance(int target) public override int NextDoc() { - Debugging.Assert(() => !exhausted); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !exhausted); exhausted = true; return NO_MORE_DOCS; } @@ -139,7 +142,7 @@ public override long GetCost() /// protected internal int SlowAdvance(int target) { - Debugging.Assert(() => DocID == NO_MORE_DOCS || DocID < target); // can happen when the enum is not positioned yet + if (Debugging.AssertsEnabled) Debugging.Assert(() => DocID == NO_MORE_DOCS || DocID < target); // can happen when the enum is not positioned yet int doc; do { diff --git a/src/Lucene.Net/Search/DocTermOrdsRangeFilter.cs b/src/Lucene.Net/Search/DocTermOrdsRangeFilter.cs index 6fe02f16cc..1964fe46c5 100644 --- a/src/Lucene.Net/Search/DocTermOrdsRangeFilter.cs +++ b/src/Lucene.Net/Search/DocTermOrdsRangeFilter.cs @@ -123,7 +123,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo return null; } - Debugging.Assert(() => inclusiveLowerPoint >= 0 && inclusiveUpperPoint >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => inclusiveLowerPoint >= 0 && inclusiveUpperPoint >= 0); return new FieldCacheDocIdSetAnonymousInnerClassHelper(this, context.AtomicReader.MaxDoc, acceptDocs, docTermOrds, inclusiveLowerPoint, inclusiveUpperPoint); } diff --git a/src/Lucene.Net/Search/DocTermOrdsRewriteMethod.cs b/src/Lucene.Net/Search/DocTermOrdsRewriteMethod.cs index b90bac9a5c..fbd9450c35 100644 --- a/src/Lucene.Net/Search/DocTermOrdsRewriteMethod.cs +++ b/src/Lucene.Net/Search/DocTermOrdsRewriteMethod.cs @@ -100,7 +100,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo Int64BitSet termSet = new Int64BitSet(docTermOrds.ValueCount); TermsEnum termsEnum = m_query.GetTermsEnum(new TermsAnonymousInnerClassHelper(this, docTermOrds)); - Debugging.Assert(() => termsEnum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termsEnum != null); if (termsEnum.Next() != null) { // fill into a bitset diff --git a/src/Lucene.Net/Search/ExactPhraseScorer.cs b/src/Lucene.Net/Search/ExactPhraseScorer.cs index 74f86d0773..dbcf7c7af8 100644 --- a/src/Lucene.Net/Search/ExactPhraseScorer.cs +++ b/src/Lucene.Net/Search/ExactPhraseScorer.cs @@ -263,7 +263,7 @@ private int PhraseFreq() cs.LastPos = cs.Pos; int posIndex = cs.Pos - chunkStart; counts[posIndex] = 1; - Debugging.Assert(() => gens[posIndex] != gen); + if (Debugging.AssertsEnabled) Debugging.Assert(() => gens[posIndex] != gen); gens[posIndex] = gen; } diff --git a/src/Lucene.Net/Search/FieldCacheImpl.cs b/src/Lucene.Net/Search/FieldCacheImpl.cs index c4bdc5bc6e..d973e377a1 100644 --- a/src/Lucene.Net/Search/FieldCacheImpl.cs +++ b/src/Lucene.Net/Search/FieldCacheImpl.cs @@ -173,7 +173,7 @@ public ReaderClosedListenerAnonymousInnerClassHelper(FieldCacheImpl outerInstanc public void OnClose(IndexReader owner) { - Debugging.Assert(() => owner is AtomicReader); + if (Debugging.AssertsEnabled) Debugging.Assert(() => owner is AtomicReader); outerInstance.PurgeByCacheKey(((AtomicReader)owner).CoreCacheKey); } } @@ -409,7 +409,7 @@ public virtual void DoUninvert(AtomicReader reader, string field, bool setDocsWi if (setDocsWithField) { int termsDocCount = terms.DocCount; - Debugging.Assert(() => termsDocCount <= maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termsDocCount <= maxDoc); if (termsDocCount == maxDoc) { // Fast case: all docs have this field: @@ -475,7 +475,7 @@ internal virtual void SetDocsWithField(AtomicReader reader, string field, IBits if (numSet >= maxDoc) { // The cardinality of the BitSet is maxDoc if all documents have a value. - Debugging.Assert(() => numSet == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numSet == maxDoc); bits = new Lucene.Net.Util.Bits.MatchAllBits(maxDoc); } else @@ -907,7 +907,7 @@ internal class Int32sFromArray : FieldCache.Int32s public Int32sFromArray(PackedInt32s.Reader values, int minValue) { - Debugging.Assert(() => values.BitsPerValue <= 32); + if (Debugging.AssertsEnabled) Debugging.Assert(() => values.BitsPerValue <= 32); this.values = values; this.minValue = minValue; } @@ -1095,7 +1095,7 @@ protected override object CreateValue(AtomicReader reader, CacheKey key, bool se if (terms != null) { int termsDocCount = terms.DocCount; - Debugging.Assert(() => termsDocCount <= maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termsDocCount <= maxDoc); if (termsDocCount == maxDoc) { // Fast case: all docs have this field: @@ -1137,7 +1137,7 @@ protected override object CreateValue(AtomicReader reader, CacheKey key, bool se if (numSet >= maxDoc) { // The cardinality of the BitSet is maxDoc if all documents have a value. - Debugging.Assert(() => numSet == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numSet == maxDoc); return new Lucene.Net.Util.Bits.MatchAllBits(maxDoc); } return res; diff --git a/src/Lucene.Net/Search/FieldCacheRangeFilter.cs b/src/Lucene.Net/Search/FieldCacheRangeFilter.cs index de70b6f85b..d7a309af99 100644 --- a/src/Lucene.Net/Search/FieldCacheRangeFilter.cs +++ b/src/Lucene.Net/Search/FieldCacheRangeFilter.cs @@ -142,7 +142,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo return null; } - Debugging.Assert(() => inclusiveLowerPoint >= 0 && inclusiveUpperPoint >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => inclusiveLowerPoint >= 0 && inclusiveUpperPoint >= 0); return new AnonymousClassFieldCacheDocIdSet(fcsi, inclusiveLowerPoint, inclusiveUpperPoint, context.Reader.MaxDoc, acceptDocs); } @@ -230,7 +230,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo return null; ; } - Debugging.Assert(() => inclusiveLowerPoint >= 0 && inclusiveUpperPoint >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => inclusiveLowerPoint >= 0 && inclusiveUpperPoint >= 0); return new AnonymousClassFieldCacheDocIdSet(fcsi, inclusiveLowerPoint, inclusiveUpperPoint, context.AtomicReader.MaxDoc, acceptDocs); } diff --git a/src/Lucene.Net/Search/FieldCacheRewriteMethod.cs b/src/Lucene.Net/Search/FieldCacheRewriteMethod.cs index ac0bd8c60d..b12412060d 100644 --- a/src/Lucene.Net/Search/FieldCacheRewriteMethod.cs +++ b/src/Lucene.Net/Search/FieldCacheRewriteMethod.cs @@ -100,7 +100,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo Int64BitSet termSet = new Int64BitSet(fcsi.ValueCount); TermsEnum termsEnum = m_query.GetTermsEnum(new TermsAnonymousInnerClassHelper(this, fcsi)); - Debugging.Assert(() => termsEnum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termsEnum != null); if (termsEnum.Next() != null) { // fill into a bitset diff --git a/src/Lucene.Net/Search/FieldComparator.cs b/src/Lucene.Net/Search/FieldComparator.cs index 9555bee4e0..92ab322fda 100644 --- a/src/Lucene.Net/Search/FieldComparator.cs +++ b/src/Lucene.Net/Search/FieldComparator.cs @@ -927,7 +927,7 @@ public override int Compare(int slot1, int slot2) public override int CompareBottom(int doc) { float score = scorer.GetScore(); - Debugging.Assert(() => !float.IsNaN(score)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !float.IsNaN(score)); // LUCENENET specific special case: // In case of zero, we may have a "positive 0" or "negative 0" @@ -938,7 +938,7 @@ public override int CompareBottom(int doc) public override void Copy(int slot, int doc) { scores[slot] = scorer.GetScore(); - Debugging.Assert(() => !float.IsNaN(scores[slot])); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !float.IsNaN(scores[slot])); } public override FieldComparer SetNextReader(AtomicReaderContext context) @@ -988,7 +988,7 @@ public override int CompareValues(float first, float second) public override int CompareTop(int doc) { float docValue = scorer.GetScore(); - Debugging.Assert(() => !float.IsNaN(docValue)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !float.IsNaN(docValue)); // LUCENENET specific special case: // In case of zero, we may have a "positive 0" or "negative 0" @@ -1216,7 +1216,7 @@ public override int Compare(int slot1, int slot2) public override int CompareBottom(int doc) { - Debugging.Assert(() => bottomSlot != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bottomSlot != -1); int docOrd = termsIndex.GetOrd(doc); if (docOrd == -1) { @@ -1250,7 +1250,7 @@ public override void Copy(int slot, int doc) } else { - Debugging.Assert(() => ord >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ord >= 0); if (values[slot] == null) { values[slot] = new BytesRef(); @@ -1319,7 +1319,7 @@ public override void SetBottom(int slot) if (bottomValue == null) { // missingOrd is null for all segments - Debugging.Assert(() => ords[bottomSlot] == missingOrd); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ords[bottomSlot] == missingOrd); bottomOrd = missingOrd; bottomSameReader = true; readerGen[bottomSlot] = currentReaderGen; diff --git a/src/Lucene.Net/Search/FieldValueHitQueue.cs b/src/Lucene.Net/Search/FieldValueHitQueue.cs index 10983a51cc..52d34028f1 100644 --- a/src/Lucene.Net/Search/FieldValueHitQueue.cs +++ b/src/Lucene.Net/Search/FieldValueHitQueue.cs @@ -72,8 +72,11 @@ public OneComparerFieldValueHitQueue(SortField[] fields, int size) /// true if document a should be sorted after document b. protected internal override bool LessThan(T hitA, T hitB) { - Debugging.Assert(() => hitA != hitB); - Debugging.Assert(() => hitA.Slot != hitB.Slot); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => hitA != hitB); + Debugging.Assert(() => hitA.Slot != hitB.Slot); + } int c = oneReverseMul * m_firstComparer.Compare(hitA.Slot, hitB.Slot); if (c != 0) @@ -107,8 +110,11 @@ public MultiComparersFieldValueHitQueue(SortField[] fields, int size) protected internal override bool LessThan(T hitA, T hitB) { - Debugging.Assert(() => hitA != hitB); - Debugging.Assert(() => hitA.Slot != hitB.Slot); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => hitA != hitB); + Debugging.Assert(() => hitA.Slot != hitB.Slot); + } int numComparers = m_comparers.Length; for (int i = 0; i < numComparers; ++i) diff --git a/src/Lucene.Net/Search/FilteredQuery.cs b/src/Lucene.Net/Search/FilteredQuery.cs index 786b81e096..281be75e88 100644 --- a/src/Lucene.Net/Search/FilteredQuery.cs +++ b/src/Lucene.Net/Search/FilteredQuery.cs @@ -139,7 +139,7 @@ public override Explanation Explain(AtomicReaderContext ir, int i) // return a filtering scorer public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) { - Debugging.Assert(() => outerInstance.filter != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.filter != null); DocIdSet filterDocIdSet = outerInstance.filter.GetDocIdSet(context, acceptDocs); if (filterDocIdSet == null) @@ -154,7 +154,7 @@ public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) // return a filtering top scorer public override BulkScorer GetBulkScorer(AtomicReaderContext context, bool scoreDocsInOrder, IBits acceptDocs) { - Debugging.Assert(() => outerInstance.filter != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.filter != null); DocIdSet filterDocIdSet = outerInstance.filter.GetDocIdSet(context, acceptDocs); if (filterDocIdSet == null) @@ -449,7 +449,7 @@ public override bool Equals(object o) { return false; } - Debugging.Assert(() => o is FilteredQuery); + if (Debugging.AssertsEnabled) Debugging.Assert(() => o is FilteredQuery); FilteredQuery fq = (FilteredQuery)o; return fq.query.Equals(this.query) && fq.filter.Equals(this.filter) && fq.strategy.Equals(this.strategy); } @@ -594,7 +594,7 @@ public override Scorer FilteredScorer(AtomicReaderContext context, Weight weight } else { - Debugging.Assert(() => firstFilterDoc > -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => firstFilterDoc > -1); // we are gonna advance() this scorer, so we set inorder=true/toplevel=false // we pass null as acceptDocs, as our filter has already respected acceptDocs, no need to do twice Scorer scorer = weight.GetScorer(context, null); diff --git a/src/Lucene.Net/Search/FuzzyTermsEnum.cs b/src/Lucene.Net/Search/FuzzyTermsEnum.cs index c095147826..151b28a277 100644 --- a/src/Lucene.Net/Search/FuzzyTermsEnum.cs +++ b/src/Lucene.Net/Search/FuzzyTermsEnum.cs @@ -251,7 +251,7 @@ protected virtual void MaxEditDistanceChanged(BytesRef lastTerm, int maxEdits, b // assert newEnum != null; if (newEnum == null) { - Debugging.Assert(() => maxEdits > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE); + if (Debugging.AssertsEnabled) Debugging.Assert(() => maxEdits > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE); throw new ArgumentException("maxEdits cannot be > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE"); } SetEnum(newEnum); diff --git a/src/Lucene.Net/Search/IndexSearcher.cs b/src/Lucene.Net/Search/IndexSearcher.cs index d79b6460f9..9875ceca3a 100644 --- a/src/Lucene.Net/Search/IndexSearcher.cs +++ b/src/Lucene.Net/Search/IndexSearcher.cs @@ -135,7 +135,7 @@ public IndexSearcher(IndexReader r, TaskScheduler executor) /// public IndexSearcher(IndexReaderContext context, TaskScheduler executor) { - Debugging.Assert(() => context.IsTopLevel, () => "IndexSearcher's ReaderContext must be topLevel for reader" + context.Reader); + if (Debugging.AssertsEnabled) Debugging.Assert(() => context.IsTopLevel, () => "IndexSearcher's ReaderContext must be topLevel for reader" + context.Reader); reader = context.Reader; this.executor = executor; this.m_readerContext = context; @@ -801,7 +801,7 @@ public SearcherCallableWithSort(ReentrantLock @lock, IndexSearcher searcher, Lea public TopFieldDocs Call() { - Debugging.Assert(() => slice.Leaves.Length == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => slice.Leaves.Length == 1); TopFieldDocs docs = searcher.Search(slice.Leaves, weight, after, nDocs, sort, true, doDocScores || sort.NeedsScores, doMaxScore); @lock.Lock(); try @@ -966,7 +966,7 @@ public virtual CollectionStatistics CollectionStatistics(string field) long sumTotalTermFreq; long sumDocFreq; - Debugging.Assert(() => field != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => field != null); Terms terms = MultiFields.GetTerms(reader, field); if (terms == null) diff --git a/src/Lucene.Net/Search/MinShouldMatchSumScorer.cs b/src/Lucene.Net/Search/MinShouldMatchSumScorer.cs index b8a1567247..cc19c8023e 100644 --- a/src/Lucene.Net/Search/MinShouldMatchSumScorer.cs +++ b/src/Lucene.Net/Search/MinShouldMatchSumScorer.cs @@ -116,7 +116,7 @@ public MinShouldMatchSumScorer(Weight weight, IList subScorers, int mini this.subScorers[i] = this.sortedSubScorers[mm - 1 + i]; } MinheapHeapify(); - Debugging.Assert(MinheapCheck); + if (Debugging.AssertsEnabled) Debugging.Assert(MinheapCheck); } /// @@ -140,7 +140,7 @@ public override sealed ICollection GetChildren() public override int NextDoc() { - Debugging.Assert(() => doc != NO_MORE_DOCS); + if (Debugging.AssertsEnabled) Debugging.Assert(() => doc != NO_MORE_DOCS); while (true) { // to remove current doc, call next() on all subScorers on current doc within heap diff --git a/src/Lucene.Net/Search/MultiPhraseQuery.cs b/src/Lucene.Net/Search/MultiPhraseQuery.cs index a4872d5107..80a2c6872c 100644 --- a/src/Lucene.Net/Search/MultiPhraseQuery.cs +++ b/src/Lucene.Net/Search/MultiPhraseQuery.cs @@ -223,7 +223,7 @@ public override void Normalize(float queryNorm, float topLevelBoost) public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) { - Debugging.Assert(() => outerInstance.termArrays.Count > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.termArrays.Count > 0); AtomicReader reader = (context.AtomicReader); IBits liveDocs = acceptDocs; @@ -286,7 +286,7 @@ public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) if (postingsEnum == null) { // term does exist, but has no positions - Debugging.Assert(() => termsEnum.Docs(liveDocs, null, DocsFlags.NONE) != null, () => "termstate found but no term exists in reader"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termsEnum.Docs(liveDocs, null, DocsFlags.NONE) != null, () => "termstate found but no term exists in reader"); throw new InvalidOperationException("field \"" + term.Field + "\" was indexed without position data; cannot run PhraseQuery (term=" + term.Text() + ")"); } diff --git a/src/Lucene.Net/Search/MultiTermQueryWrapperFilter.cs b/src/Lucene.Net/Search/MultiTermQueryWrapperFilter.cs index 9d7143502e..ac9b307719 100644 --- a/src/Lucene.Net/Search/MultiTermQueryWrapperFilter.cs +++ b/src/Lucene.Net/Search/MultiTermQueryWrapperFilter.cs @@ -109,7 +109,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo } TermsEnum termsEnum = m_query.GetTermsEnum(terms); - Debugging.Assert(() => termsEnum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termsEnum != null); if (termsEnum.Next() != null) { // fill into a FixedBitSet diff --git a/src/Lucene.Net/Search/NumericRangeQuery.cs b/src/Lucene.Net/Search/NumericRangeQuery.cs index 0bbe3f9da8..5ecd6a9c37 100644 --- a/src/Lucene.Net/Search/NumericRangeQuery.cs +++ b/src/Lucene.Net/Search/NumericRangeQuery.cs @@ -320,7 +320,7 @@ internal NumericRangeTermsEnum(NumericRangeQuery outerInstance, TermsEnum ten } else { - Debugging.Assert(() => this.outerInstance.dataType == NumericType.DOUBLE); + if (Debugging.AssertsEnabled) Debugging.Assert(() => this.outerInstance.dataType == NumericType.DOUBLE); minBound = (this.outerInstance.min == null) ? INT64_NEGATIVE_INFINITY : NumericUtils.DoubleToSortableInt64(Convert.ToDouble(this.outerInstance.min.Value, CultureInfo.InvariantCulture)); } if (!this.outerInstance.minInclusive && this.outerInstance.min != null) @@ -340,7 +340,7 @@ internal NumericRangeTermsEnum(NumericRangeQuery outerInstance, TermsEnum ten } else { - Debugging.Assert(() => this.outerInstance.dataType == NumericType.DOUBLE); + if (Debugging.AssertsEnabled) Debugging.Assert(() => this.outerInstance.dataType == NumericType.DOUBLE); maxBound = (this.outerInstance.max == null) ? INT64_POSITIVE_INFINITY : NumericUtils.DoubleToSortableInt64(Convert.ToDouble(this.outerInstance.max, CultureInfo.InvariantCulture)); } if (!this.outerInstance.maxInclusive && this.outerInstance.max != null) @@ -367,7 +367,7 @@ internal NumericRangeTermsEnum(NumericRangeQuery outerInstance, TermsEnum ten } else { - Debugging.Assert(() => this.outerInstance.dataType == NumericType.SINGLE); + if (Debugging.AssertsEnabled) Debugging.Assert(() => this.outerInstance.dataType == NumericType.SINGLE); minBound = (this.outerInstance.min == null) ? INT32_NEGATIVE_INFINITY : NumericUtils.SingleToSortableInt32(Convert.ToSingle(this.outerInstance.min, CultureInfo.InvariantCulture)); } if (!this.outerInstance.minInclusive && this.outerInstance.min != null) @@ -387,7 +387,7 @@ internal NumericRangeTermsEnum(NumericRangeQuery outerInstance, TermsEnum ten } else { - Debugging.Assert(() => this.outerInstance.dataType == NumericType.SINGLE); + if (Debugging.AssertsEnabled) Debugging.Assert(() => this.outerInstance.dataType == NumericType.SINGLE); maxBound = (this.outerInstance.max == null) ? INT32_POSITIVE_INFINITY : NumericUtils.SingleToSortableInt32(Convert.ToSingle(this.outerInstance.max, CultureInfo.InvariantCulture)); } if (!this.outerInstance.maxInclusive && this.outerInstance.max != null) @@ -445,10 +445,10 @@ public override sealed void AddRange(BytesRef minPrefixCoded, BytesRef maxPrefix private void NextRange() { - Debugging.Assert(() => rangeBounds.Count % 2 == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => rangeBounds.Count % 2 == 0); currentLowerBound = rangeBounds.Dequeue(); - Debugging.Assert(() => currentUpperBound == null || termComp.Compare(currentUpperBound, currentLowerBound) <= 0, () => "The current upper bound must be <= the new lower bound"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => currentUpperBound == null || termComp.Compare(currentUpperBound, currentLowerBound) <= 0, () => "The current upper bound must be <= the new lower bound"); currentUpperBound = rangeBounds.Dequeue(); } @@ -469,7 +469,7 @@ protected override sealed BytesRef NextSeekTerm(BytesRef term) } // no more sub-range enums available - Debugging.Assert(() => rangeBounds.Count == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => rangeBounds.Count == 0); currentLowerBound = currentUpperBound = null; return null; } diff --git a/src/Lucene.Net/Search/PhraseQuery.cs b/src/Lucene.Net/Search/PhraseQuery.cs index 17c10da0f5..3bd6d2e7b6 100644 --- a/src/Lucene.Net/Search/PhraseQuery.cs +++ b/src/Lucene.Net/Search/PhraseQuery.cs @@ -337,7 +337,7 @@ public override void Normalize(float queryNorm, float topLevelBoost) public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) { - Debugging.Assert(() => outerInstance.terms.Count > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.terms.Count > 0); AtomicReader reader = context.AtomicReader; IBits liveDocs = acceptDocs; PostingsAndFreq[] postingsFreqs = new PostingsAndFreq[outerInstance.terms.Count]; @@ -357,7 +357,7 @@ public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) TermState state = states[i].Get(context.Ord); if (state == null) // term doesnt exist in this segment { - Debugging.Assert(() => TermNotInReader(reader, t), () => "no termstate found but term exists in reader"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => TermNotInReader(reader, t), () => "no termstate found but term exists in reader"); return null; } te.SeekExact(t.Bytes, state); @@ -367,7 +367,7 @@ public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) // positions. if (postingsEnum == null) { - Debugging.Assert(() => te.SeekExact(t.Bytes), () => "termstate found but no term exists in reader"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => te.SeekExact(t.Bytes), () => "termstate found but no term exists in reader"); // term does exist, but has no positions throw new InvalidOperationException("field \"" + t.Field + "\" was indexed without position data; cannot run PhraseQuery (term=" + t.Text() + ")"); } diff --git a/src/Lucene.Net/Search/QueryRescorer.cs b/src/Lucene.Net/Search/QueryRescorer.cs index 3bca239e8d..3fe94d06f5 100644 --- a/src/Lucene.Net/Search/QueryRescorer.cs +++ b/src/Lucene.Net/Search/QueryRescorer.cs @@ -101,7 +101,7 @@ public override TopDocs Rescore(IndexSearcher searcher, TopDocs firstPassTopDocs else { // Query did not match this doc: - Debugging.Assert(() => actualDoc > targetDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => actualDoc > targetDoc); hit.Score = Combine(hit.Score, false, 0.0f); } diff --git a/src/Lucene.Net/Search/ReferenceManager.cs b/src/Lucene.Net/Search/ReferenceManager.cs index d9ab9c87ec..1376cb77e1 100644 --- a/src/Lucene.Net/Search/ReferenceManager.cs +++ b/src/Lucene.Net/Search/ReferenceManager.cs @@ -117,7 +117,7 @@ public G Acquire() } if (GetRefCount(@ref) == 0 && (object)current == (object)@ref) { - Debugging.Assert(() => @ref != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => @ref != null); /* if we can't increment the reader but we are still the current reference the RM is in a illegal states since we can't make any progress @@ -200,7 +200,7 @@ private void DoMaybeRefresh() G newReference = RefreshIfNeeded(reference); if (newReference != null) { - Debugging.Assert(() => !ReferenceEquals(newReference, reference), () => "refreshIfNeeded should return null if refresh wasn't needed"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !ReferenceEquals(newReference, reference), () => "refreshIfNeeded should return null if refresh wasn't needed"); try { SwapReference(newReference); @@ -311,7 +311,7 @@ protected virtual void AfterMaybeRefresh() /// If the release operation on the given resource throws an public void Release(G reference) { - Debugging.Assert(() => !(reference is null)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !(reference is null)); DecRef(reference); } diff --git a/src/Lucene.Net/Search/ReqOptSumScorer.cs b/src/Lucene.Net/Search/ReqOptSumScorer.cs index 63b9236d5e..fd0830513e 100644 --- a/src/Lucene.Net/Search/ReqOptSumScorer.cs +++ b/src/Lucene.Net/Search/ReqOptSumScorer.cs @@ -43,8 +43,11 @@ internal class ReqOptSumScorer : Scorer public ReqOptSumScorer(Scorer reqScorer, Scorer optScorer) : base(reqScorer.m_weight) { - Debugging.Assert(() => reqScorer != null); - Debugging.Assert(() => optScorer != null); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => reqScorer != null); + Debugging.Assert(() => optScorer != null); + } this.reqScorer = reqScorer; this.optScorer = optScorer; } diff --git a/src/Lucene.Net/Search/ScoringRewrite.cs b/src/Lucene.Net/Search/ScoringRewrite.cs index 3d4ba71300..b1b5bd7d15 100644 --- a/src/Lucene.Net/Search/ScoringRewrite.cs +++ b/src/Lucene.Net/Search/ScoringRewrite.cs @@ -134,7 +134,7 @@ public override Query Rewrite(IndexReader reader, MultiTermQuery query) { int pos = sort[i]; Term term = new Term(query.Field, col.terms.Get(pos, new BytesRef())); - Debugging.Assert(() => reader.DocFreq(term) == termStates[pos].DocFreq); + if (Debugging.AssertsEnabled) Debugging.Assert(() => reader.DocFreq(term) == termStates[pos].DocFreq); AddClause(result, term, termStates[pos].DocFreq, query.Boost * boost[pos], termStates[pos]); } } @@ -173,13 +173,13 @@ public override bool Collect(BytesRef bytes) { int e = terms.Add(bytes); TermState state = termsEnum.GetTermState(); - Debugging.Assert(() => state != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != null); if (e < 0) { // duplicate term: update docFreq int pos = (-e) - 1; array.termState[pos].Register(state, m_readerContext.Ord, termsEnum.DocFreq, termsEnum.TotalTermFreq); - Debugging.Assert(() => array.boost[pos] == boostAtt.Boost, () => "boost should be equal in all segment TermsEnums"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => array.boost[pos] == boostAtt.Boost, () => "boost should be equal in all segment TermsEnums"); } else { @@ -209,7 +209,7 @@ public override int[] Init() int[] ord = base.Init(); boost = new float[ArrayUtil.Oversize(ord.Length, RamUsageEstimator.NUM_BYTES_SINGLE)]; termState = new TermContext[ArrayUtil.Oversize(ord.Length, RamUsageEstimator.NUM_BYTES_OBJECT_REF)]; - Debugging.Assert(() => termState.Length >= ord.Length && boost.Length >= ord.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termState.Length >= ord.Length && boost.Length >= ord.Length); return ord; } @@ -223,7 +223,7 @@ public override int[] Grow() Array.Copy(termState, 0, tmpTermState, 0, termState.Length); termState = tmpTermState; } - Debugging.Assert(() => termState.Length >= ord.Length && boost.Length >= ord.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termState.Length >= ord.Length && boost.Length >= ord.Length); return ord; } diff --git a/src/Lucene.Net/Search/SearcherManager.cs b/src/Lucene.Net/Search/SearcherManager.cs index 59829a44fd..7f3a9c6fe1 100644 --- a/src/Lucene.Net/Search/SearcherManager.cs +++ b/src/Lucene.Net/Search/SearcherManager.cs @@ -120,7 +120,7 @@ protected override void DecRef(IndexSearcher reference) protected override IndexSearcher RefreshIfNeeded(IndexSearcher referenceToRefresh) { IndexReader r = referenceToRefresh.IndexReader; - Debugging.Assert(() => r is DirectoryReader, () => "searcher's IndexReader should be a DirectoryReader, but got " + r); + if (Debugging.AssertsEnabled) Debugging.Assert(() => r is DirectoryReader, () => "searcher's IndexReader should be a DirectoryReader, but got " + r); IndexReader newReader = DirectoryReader.OpenIfChanged((DirectoryReader)r); if (newReader == null) { @@ -152,7 +152,7 @@ public bool IsSearcherCurrent() try { IndexReader r = searcher.IndexReader; - Debugging.Assert(() => r is DirectoryReader, () => "searcher's IndexReader should be a DirectoryReader, but got " + r); + if (Debugging.AssertsEnabled) Debugging.Assert(() => r is DirectoryReader, () => "searcher's IndexReader should be a DirectoryReader, but got " + r); return ((DirectoryReader)r).IsCurrent(); } finally diff --git a/src/Lucene.Net/Search/Similarities/SimilarityBase.cs b/src/Lucene.Net/Search/Similarities/SimilarityBase.cs index 4640dc6937..d8f190e13b 100644 --- a/src/Lucene.Net/Search/Similarities/SimilarityBase.cs +++ b/src/Lucene.Net/Search/Similarities/SimilarityBase.cs @@ -101,7 +101,7 @@ protected internal virtual BasicStats NewStats(string field, float queryBoost) protected internal virtual void FillBasicStats(BasicStats stats, CollectionStatistics collectionStats, TermStatistics termStats) { // #positions(field) must be >= #positions(term) - Debugging.Assert(() => collectionStats.SumTotalTermFreq == -1 || collectionStats.SumTotalTermFreq >= termStats.TotalTermFreq); + if (Debugging.AssertsEnabled) Debugging.Assert(() => collectionStats.SumTotalTermFreq == -1 || collectionStats.SumTotalTermFreq >= termStats.TotalTermFreq); long numberOfDocuments = collectionStats.MaxDoc; long docFreq = termStats.DocFreq; diff --git a/src/Lucene.Net/Search/SloppyPhraseScorer.cs b/src/Lucene.Net/Search/SloppyPhraseScorer.cs index 6e2d33019b..2968233207 100644 --- a/src/Lucene.Net/Search/SloppyPhraseScorer.cs +++ b/src/Lucene.Net/Search/SloppyPhraseScorer.cs @@ -506,7 +506,7 @@ private IList> GatherRptGroups(JCG.LinkedDictionary pp.rptGroup == -1 || pp.rptGroup == g); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pp.rptGroup == -1 || pp.rptGroup == g); pp.rptGroup = g; } } @@ -682,7 +682,7 @@ public override float GetScore() public override int Advance(int target) { - Debugging.Assert(() => target > DocID); + if (Debugging.AssertsEnabled) Debugging.Assert(() => target > DocID); do { if (!AdvanceMin(target)) diff --git a/src/Lucene.Net/Search/SortField.cs b/src/Lucene.Net/Search/SortField.cs index 6d2ffce6d9..277304e795 100644 --- a/src/Lucene.Net/Search/SortField.cs +++ b/src/Lucene.Net/Search/SortField.cs @@ -456,7 +456,7 @@ public virtual FieldComparer GetComparer(int numHits, int sortPos) #pragma warning restore 612, 618 case SortFieldType.CUSTOM: - Debugging.Assert(() => comparerSource != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => comparerSource != null); return comparerSource.NewComparer(field, numHits, sortPos, reverse); case SortFieldType.STRING: diff --git a/src/Lucene.Net/Search/SortRescorer.cs b/src/Lucene.Net/Search/SortRescorer.cs index d373e982bd..b18ac5905c 100644 --- a/src/Lucene.Net/Search/SortRescorer.cs +++ b/src/Lucene.Net/Search/SortRescorer.cs @@ -92,7 +92,7 @@ public override Explanation Explain(IndexSearcher searcher, Explanation firstPas { TopDocs oneHit = new TopDocs(1, new ScoreDoc[] { new ScoreDoc(docID, firstPassExplanation.Value) }); TopDocs hits = Rescore(searcher, oneHit, 1); - Debugging.Assert(() => hits.TotalHits == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => hits.TotalHits == 1); // TODO: if we could ask the Sort to explain itself then // we wouldn't need the separate ExpressionRescorer... diff --git a/src/Lucene.Net/Search/Spans/NearSpansOrdered.cs b/src/Lucene.Net/Search/Spans/NearSpansOrdered.cs index 9424fc8a3a..4620933571 100644 --- a/src/Lucene.Net/Search/Spans/NearSpansOrdered.cs +++ b/src/Lucene.Net/Search/Spans/NearSpansOrdered.cs @@ -285,7 +285,7 @@ private bool ToSameDoc() } for (int i = 0; i < subSpansByDoc.Length; i++) { - Debugging.Assert(() => subSpansByDoc[i].Doc == maxDoc, () => " NearSpansOrdered.toSameDoc() spans " + subSpansByDoc[0] + "\n at doc " + subSpansByDoc[i].Doc + ", but should be at " + maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => subSpansByDoc[i].Doc == maxDoc, () => " NearSpansOrdered.toSameDoc() spans " + subSpansByDoc[0] + "\n at doc " + subSpansByDoc[i].Doc + ", but should be at " + maxDoc); } inSameDoc = true; return true; @@ -298,7 +298,7 @@ private bool ToSameDoc() /// and ends before . internal static bool DocSpansOrdered(Spans spans1, Spans spans2) { - Debugging.Assert(() => spans1.Doc == spans2.Doc, () => "doc1 " + spans1.Doc + " != doc2 " + spans2.Doc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => spans1.Doc == spans2.Doc, () => "doc1 " + spans1.Doc + " != doc2 " + spans2.Doc); int start1 = spans1.Start; int start2 = spans2.Start; /* Do not call docSpansOrdered(int,int,int,int) to avoid invoking .end() : */ @@ -409,7 +409,7 @@ private bool ShrinkToAfterShortestMatch() possibleMatchPayloads.UnionWith(possiblePayload); } - Debugging.Assert(() => prevStart <= matchStart); + if (Debugging.AssertsEnabled) Debugging.Assert(() => prevStart <= matchStart); if (matchStart > prevEnd) // Only non overlapping spans add to slop. { matchSlop += (matchStart - prevEnd); diff --git a/src/Lucene.Net/Search/Spans/SpanFirstQuery.cs b/src/Lucene.Net/Search/Spans/SpanFirstQuery.cs index abc997b0e5..bd67ec3ee8 100644 --- a/src/Lucene.Net/Search/Spans/SpanFirstQuery.cs +++ b/src/Lucene.Net/Search/Spans/SpanFirstQuery.cs @@ -41,7 +41,7 @@ public SpanFirstQuery(SpanQuery match, int end) protected override AcceptStatus AcceptPosition(Spans spans) { - Debugging.Assert(() => spans.Start != spans.End, () => "start equals end: " + spans.Start); + if (Debugging.AssertsEnabled) Debugging.Assert(() => spans.Start != spans.End, () => "start equals end: " + spans.Start); if (spans.Start >= m_end) { return AcceptStatus.NO_AND_ADVANCE; diff --git a/src/Lucene.Net/Search/Spans/SpanPositionRangeQuery.cs b/src/Lucene.Net/Search/Spans/SpanPositionRangeQuery.cs index 2eeb036360..522ed66ba8 100644 --- a/src/Lucene.Net/Search/Spans/SpanPositionRangeQuery.cs +++ b/src/Lucene.Net/Search/Spans/SpanPositionRangeQuery.cs @@ -40,7 +40,7 @@ public SpanPositionRangeQuery(SpanQuery match, int start, int end) protected override AcceptStatus AcceptPosition(Spans spans) { - Debugging.Assert(() => spans.Start != spans.End); + if (Debugging.AssertsEnabled) Debugging.Assert(() => spans.Start != spans.End); if (spans.Start >= m_end) { return AcceptStatus.NO_AND_ADVANCE; diff --git a/src/Lucene.Net/Search/Spans/TermSpans.cs b/src/Lucene.Net/Search/Spans/TermSpans.cs index 3f610e098e..ebf2bec8cf 100644 --- a/src/Lucene.Net/Search/Spans/TermSpans.cs +++ b/src/Lucene.Net/Search/Spans/TermSpans.cs @@ -76,7 +76,7 @@ public override bool Next() public override bool SkipTo(int target) { - Debugging.Assert(() => target > m_doc); + if (Debugging.AssertsEnabled) Debugging.Assert(() => target > m_doc); m_doc = m_postings.Advance(target); if (m_doc == DocIdSetIterator.NO_MORE_DOCS) { diff --git a/src/Lucene.Net/Search/TermCollectingRewrite.cs b/src/Lucene.Net/Search/TermCollectingRewrite.cs index 286ab3aa7a..b6280ed85f 100644 --- a/src/Lucene.Net/Search/TermCollectingRewrite.cs +++ b/src/Lucene.Net/Search/TermCollectingRewrite.cs @@ -68,7 +68,7 @@ internal void CollectTerms(IndexReader reader, MultiTermQuery query, TermCollect } TermsEnum termsEnum = GetTermsEnum(query, terms, collector.Attributes); - Debugging.Assert(() => termsEnum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termsEnum != null); if (termsEnum == TermsEnum.EMPTY) { diff --git a/src/Lucene.Net/Search/TermQuery.cs b/src/Lucene.Net/Search/TermQuery.cs index 8278d5a997..54f6732515 100644 --- a/src/Lucene.Net/Search/TermQuery.cs +++ b/src/Lucene.Net/Search/TermQuery.cs @@ -60,7 +60,7 @@ internal sealed class TermWeight : Weight public TermWeight(TermQuery outerInstance, IndexSearcher searcher, TermContext termStates) { this.outerInstance = outerInstance; - Debugging.Assert(() => termStates != null, () => "TermContext must not be null"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termStates != null, () => "TermContext must not be null"); this.termStates = termStates; this.similarity = searcher.Similarity; this.stats = similarity.ComputeWeight(outerInstance.Boost, searcher.CollectionStatistics(outerInstance.term.Field), searcher.TermStatistics(outerInstance.term, termStates)); @@ -85,14 +85,14 @@ public override void Normalize(float queryNorm, float topLevelBoost) public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) { - Debugging.Assert(() => termStates.TopReaderContext == ReaderUtil.GetTopLevelContext(context), () => "The top-reader used to create Weight (" + termStates.TopReaderContext + ") is not the same as the current reader's top-reader (" + ReaderUtil.GetTopLevelContext(context)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termStates.TopReaderContext == ReaderUtil.GetTopLevelContext(context), () => "The top-reader used to create Weight (" + termStates.TopReaderContext + ") is not the same as the current reader's top-reader (" + ReaderUtil.GetTopLevelContext(context)); TermsEnum termsEnum = GetTermsEnum(context); if (termsEnum == null) { return null; } DocsEnum docs = termsEnum.Docs(acceptDocs, null); - Debugging.Assert(() => docs != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docs != null); return new TermScorer(this, docs, similarity.GetSimScorer(stats, context)); } @@ -105,7 +105,7 @@ private TermsEnum GetTermsEnum(AtomicReaderContext context) TermState state = termStates.Get(context.Ord); if (state == null) // term is not present in that reader { - Debugging.Assert(() => TermNotInReader(context.AtomicReader, outerInstance.term), () => "no termstate found but term exists in reader term=" + outerInstance.term); + if (Debugging.AssertsEnabled) Debugging.Assert(() => TermNotInReader(context.AtomicReader, outerInstance.term), () => "no termstate found but term exists in reader term=" + outerInstance.term); return null; } //System.out.println("LD=" + reader.getLiveDocs() + " set?=" + (reader.getLiveDocs() != null ? reader.getLiveDocs().get(0) : "null")); @@ -170,7 +170,7 @@ public TermQuery(Term t, int docFreq) /// public TermQuery(Term t, TermContext states) { - Debugging.Assert(() => states != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => states != null); term = t; docFreq = states.DocFreq; perReaderTermState = states; diff --git a/src/Lucene.Net/Search/TermScorer.cs b/src/Lucene.Net/Search/TermScorer.cs index 14cb6154e4..9af6d46fba 100644 --- a/src/Lucene.Net/Search/TermScorer.cs +++ b/src/Lucene.Net/Search/TermScorer.cs @@ -62,7 +62,7 @@ public override int NextDoc() public override float GetScore() { - Debugging.Assert(() => DocID != NO_MORE_DOCS); + if (Debugging.AssertsEnabled) Debugging.Assert(() => DocID != NO_MORE_DOCS); return docScorer.Score(docsEnum.DocID, docsEnum.Freq); } diff --git a/src/Lucene.Net/Search/TermStatistics.cs b/src/Lucene.Net/Search/TermStatistics.cs index 7b082ef0d2..9ad51ba51b 100644 --- a/src/Lucene.Net/Search/TermStatistics.cs +++ b/src/Lucene.Net/Search/TermStatistics.cs @@ -37,8 +37,11 @@ public class TermStatistics /// public TermStatistics(BytesRef term, long docFreq, long totalTermFreq) { - Debugging.Assert(() => docFreq >= 0); - Debugging.Assert(() => totalTermFreq == -1 || totalTermFreq >= docFreq); // #positions must be >= #postings + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => docFreq >= 0); + Debugging.Assert(() => totalTermFreq == -1 || totalTermFreq >= docFreq); // #positions must be >= #postings + } this.term = term; this.docFreq = docFreq; this.totalTermFreq = totalTermFreq; diff --git a/src/Lucene.Net/Search/TopDocs.cs b/src/Lucene.Net/Search/TopDocs.cs index 64b9131de8..37f7c01ef7 100644 --- a/src/Lucene.Net/Search/TopDocs.cs +++ b/src/Lucene.Net/Search/TopDocs.cs @@ -107,7 +107,7 @@ public ScoreMergeSortQueue(TopDocs[] shardHits) // Returns true if first is < second protected internal override bool LessThan(ShardRef first, ShardRef second) { - Debugging.Assert(() => first != second); + if (Debugging.AssertsEnabled) Debugging.Assert(() => first != second); float firstScore = shardHits[first.ShardIndex][first.HitIndex].Score; float secondScore = shardHits[second.ShardIndex][second.HitIndex].Score; @@ -134,7 +134,7 @@ protected internal override bool LessThan(ShardRef first, ShardRef second) { // Tie break in same shard: resolve however the // shard had resolved it: - Debugging.Assert(() => first.HitIndex != second.HitIndex); + if (Debugging.AssertsEnabled) Debugging.Assert(() => first.HitIndex != second.HitIndex); return first.HitIndex < second.HitIndex; } } @@ -191,7 +191,7 @@ public MergeSortQueue(Sort sort, TopDocs[] shardHits) // Returns true if first is < second protected internal override bool LessThan(ShardRef first, ShardRef second) { - Debugging.Assert(() => first != second); + if (Debugging.AssertsEnabled) Debugging.Assert(() => first != second); FieldDoc firstFD = (FieldDoc)shardHits[first.ShardIndex][first.HitIndex]; FieldDoc secondFD = (FieldDoc)shardHits[second.ShardIndex][second.HitIndex]; //System.out.println(" lessThan:\n first=" + first + " doc=" + firstFD.doc + " score=" + firstFD.score + "\n second=" + second + " doc=" + secondFD.doc + " score=" + secondFD.score); @@ -226,7 +226,7 @@ protected internal override bool LessThan(ShardRef first, ShardRef second) // Tie break in same shard: resolve however the // shard had resolved it: //System.out.println(" return tb " + (first.hitIndex < second.hitIndex)); - Debugging.Assert(() => first.HitIndex != second.HitIndex); + if (Debugging.AssertsEnabled) Debugging.Assert(() => first.HitIndex != second.HitIndex); return first.HitIndex < second.HitIndex; } } @@ -305,7 +305,7 @@ public static TopDocs Merge(Sort sort, int start, int size, TopDocs[] shardHits) int hitUpto = 0; while (hitUpto < numIterOnHits) { - Debugging.Assert(() => queue.Count > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => queue.Count > 0); ShardRef @ref = queue.Pop(); ScoreDoc hit = shardHits[@ref.ShardIndex].ScoreDocs[@ref.HitIndex++]; hit.ShardIndex = @ref.ShardIndex; diff --git a/src/Lucene.Net/Search/TopScoreDocCollector.cs b/src/Lucene.Net/Search/TopScoreDocCollector.cs index 3e8c330446..95062d7615 100644 --- a/src/Lucene.Net/Search/TopScoreDocCollector.cs +++ b/src/Lucene.Net/Search/TopScoreDocCollector.cs @@ -51,8 +51,11 @@ public override void Collect(int doc) float score = scorer.GetScore(); // this collector cannot handle these scores: - Debugging.Assert(() => !float.IsNegativeInfinity(score)); - Debugging.Assert(() => !float.IsNaN(score)); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => !float.IsNegativeInfinity(score)); + Debugging.Assert(() => !float.IsNaN(score)); + } m_totalHits++; if (score <= pqTop.Score) @@ -90,9 +93,12 @@ public override void Collect(int doc) { float score = scorer.GetScore(); - // this collector cannot handle these scores: - Debugging.Assert(() => !float.IsNegativeInfinity(score)); - Debugging.Assert(() => !float.IsNaN(score)); + if (Debugging.AssertsEnabled) + { + // this collector cannot handle these scores: + Debugging.Assert(() => !float.IsNegativeInfinity(score)); + Debugging.Assert(() => !float.IsNaN(score)); + } m_totalHits++; @@ -145,7 +151,7 @@ public override void Collect(int doc) float score = scorer.GetScore(); // this collector cannot handle NaN - Debugging.Assert(() => !float.IsNaN(score)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !float.IsNaN(score)); m_totalHits++; if (score < pqTop.Score) @@ -188,7 +194,7 @@ public override void Collect(int doc) float score = scorer.GetScore(); // this collector cannot handle NaN - Debugging.Assert(() => !float.IsNaN(score)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !float.IsNaN(score)); m_totalHits++; if (score > after.Score || (score == after.Score && doc <= afterDoc)) diff --git a/src/Lucene.Net/Search/TopTermsRewrite.cs b/src/Lucene.Net/Search/TopTermsRewrite.cs index 55c585f3cf..234a52d9aa 100644 --- a/src/Lucene.Net/Search/TopTermsRewrite.cs +++ b/src/Lucene.Net/Search/TopTermsRewrite.cs @@ -84,7 +84,7 @@ public override Query Rewrite(IndexReader reader, MultiTermQuery query) foreach (ScoreTerm st in scoreTerms) { Term term = new Term(query.m_field, st.Bytes); - Debugging.Assert(() => reader.DocFreq(term) == st.TermState.DocFreq, () => "reader DF is " + reader.DocFreq(term) + " vs " + st.TermState.DocFreq + " term=" + term); + if (Debugging.AssertsEnabled) Debugging.Assert(() => reader.DocFreq(term) == st.TermState.DocFreq, () => "reader DF is " + reader.DocFreq(term) + " vs " + st.TermState.DocFreq + " term=" + term); AddClause(q, term, st.TermState.DocFreq, query.Boost * st.Boost, st.TermState); // add to query } return q; @@ -120,7 +120,7 @@ public override void SetNextEnum(TermsEnum termsEnum) this.termsEnum = termsEnum; this.termComp = termsEnum.Comparer; - Debugging.Assert(() => CompareToLastTerm(null)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => CompareToLastTerm(null)); // lazy init the initial ScoreTerm because comparer is not known on ctor: if (st == null) @@ -145,7 +145,7 @@ private bool CompareToLastTerm(BytesRef t) } else { - Debugging.Assert(() => termsEnum.Comparer.Compare(lastTerm, t) < 0, () => "lastTerm=" + lastTerm + " t=" + t); + if (Debugging.AssertsEnabled) Debugging.Assert(() => termsEnum.Comparer.Compare(lastTerm, t) < 0, () => "lastTerm=" + lastTerm + " t=" + t); lastTerm.CopyBytes(t); } return true; @@ -157,7 +157,7 @@ public override bool Collect(BytesRef bytes) // make sure within a single seg we always collect // terms in order - Debugging.Assert(() => CompareToLastTerm(bytes)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => CompareToLastTerm(bytes)); //System.out.println("TTR.collect term=" + bytes.utf8ToString() + " boost=" + boost + " ord=" + readerContext.ord); // ignore uncompetitive hits @@ -174,11 +174,11 @@ public override bool Collect(BytesRef bytes) } } TermState state = termsEnum.GetTermState(); - Debugging.Assert(() => state != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => state != null); if (visitedTerms.TryGetValue(bytes, out ScoreTerm t2)) { // if the term is already in the PQ, only update docFreq of term in PQ - Debugging.Assert(() => t2.Boost == boost, () => "boost should be equal in all segment TermsEnums"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => t2.Boost == boost, () => "boost should be equal in all segment TermsEnums"); t2.TermState.Register(state, m_readerContext.Ord, termsEnum.DocFreq, termsEnum.TotalTermFreq); } else @@ -187,7 +187,7 @@ public override bool Collect(BytesRef bytes) st.Bytes.CopyBytes(bytes); st.Boost = boost; visitedTerms[st.Bytes] = st; - Debugging.Assert(() => st.TermState.DocFreq == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => st.TermState.DocFreq == 0); st.TermState.Register(state, m_readerContext.Ord, termsEnum.DocFreq, termsEnum.TotalTermFreq); stQueue.Add(st); // possibly drop entries from queue @@ -201,7 +201,7 @@ public override bool Collect(BytesRef bytes) { st = new ScoreTerm(termComp, new TermContext(m_topReaderContext)); } - Debugging.Assert(() => stQueue.Count <= maxSize, () => "the PQ size must be limited to maxSize"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => stQueue.Count <= maxSize, () => "the PQ size must be limited to maxSize"); // set maxBoostAtt with values to help FuzzyTermsEnum to optimize if (stQueue.Count == maxSize) { @@ -247,7 +247,7 @@ public override bool Equals(object obj) private static readonly IComparer scoreTermSortByTermComp = Comparer.Create((st1, st2) => { - Debugging.Assert(() => st1.TermComp == st2.TermComp, () => "term comparer should not change between segments"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => st1.TermComp == st2.TermComp, () => "term comparer should not change between segments"); return st1.TermComp.Compare(st1.Bytes, st2.Bytes); }); diff --git a/src/Lucene.Net/Store/BaseDirectory.cs b/src/Lucene.Net/Store/BaseDirectory.cs index 63e0d6a3f5..37286f2e56 100644 --- a/src/Lucene.Net/Store/BaseDirectory.cs +++ b/src/Lucene.Net/Store/BaseDirectory.cs @@ -66,7 +66,7 @@ public override void ClearLock(string name) public override void SetLockFactory(LockFactory lockFactory) { - Debugging.Assert(() => lockFactory != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => lockFactory != null); this.m_lockFactory = lockFactory; lockFactory.LockPrefix = this.GetLockID(); } diff --git a/src/Lucene.Net/Store/BufferedIndexInput.cs b/src/Lucene.Net/Store/BufferedIndexInput.cs index 40e32ed03f..142f6173f0 100644 --- a/src/Lucene.Net/Store/BufferedIndexInput.cs +++ b/src/Lucene.Net/Store/BufferedIndexInput.cs @@ -80,7 +80,7 @@ public BufferedIndexInput(string resourceDesc, int bufferSize) /// Change the buffer size used by this public void SetBufferSize(int newSize) { - Debugging.Assert(() => m_buffer == null || bufferSize == m_buffer.Length, () => "buffer=" + m_buffer + " bufferSize=" + bufferSize + " buffer.length=" + (m_buffer != null ? m_buffer.Length : 0)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m_buffer == null || bufferSize == m_buffer.Length, () => "buffer=" + m_buffer + " bufferSize=" + bufferSize + " buffer.length=" + (m_buffer != null ? m_buffer.Length : 0)); if (newSize != bufferSize) { CheckBufferSize(newSize); diff --git a/src/Lucene.Net/Store/ByteArrayDataOutput.cs b/src/Lucene.Net/Store/ByteArrayDataOutput.cs index c23564f40a..c2a7dcfcb4 100644 --- a/src/Lucene.Net/Store/ByteArrayDataOutput.cs +++ b/src/Lucene.Net/Store/ByteArrayDataOutput.cs @@ -66,13 +66,13 @@ public virtual void Reset(byte[] bytes, int offset, int len) public override void WriteByte(byte b) { - Debugging.Assert(() => pos < limit); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pos < limit); bytes[pos++] = b; } public override void WriteBytes(byte[] b, int offset, int length) { - Debugging.Assert(() => pos + length <= limit); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pos + length <= limit); System.Buffer.BlockCopy(b, offset, bytes, pos, length); pos += length; } diff --git a/src/Lucene.Net/Store/ByteBufferIndexInput.cs b/src/Lucene.Net/Store/ByteBufferIndexInput.cs index 8402ad9ff3..21575c8670 100644 --- a/src/Lucene.Net/Store/ByteBufferIndexInput.cs +++ b/src/Lucene.Net/Store/ByteBufferIndexInput.cs @@ -86,8 +86,11 @@ internal ByteBufferIndexInput(string resourceDescription, ByteBuffer[] buffers, // uses RuntimeHelpers.GetHashCode() to find the item, so technically, it IS an identity collection. this.clones = trackClones ? new ConditionalWeakTable() : null; - Debugging.Assert(() => chunkSizePower >= 0 && chunkSizePower <= 30); - Debugging.Assert(() => ((long)((ulong)length >> chunkSizePower)) < int.MaxValue); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => chunkSizePower >= 0 && chunkSizePower <= 30); + Debugging.Assert(() => ((long)((ulong)length >> chunkSizePower)) < int.MaxValue); + } // LUCENENET specific: MMapIndexInput calls SetBuffers() to populate // the buffers, so we need to skip that call if it is null here, and @@ -301,7 +304,7 @@ private ByteBufferIndexInput BuildSlice(long offset, long length) ByteBufferIndexInput clone = (ByteBufferIndexInput)base.Clone(); clone.isClone = true; // we keep clone.clones, so it shares the same map with original and we have no additional cost on clones - Debugging.Assert(() => clone.clones == this.clones); + if (Debugging.AssertsEnabled) Debugging.Assert(() => clone.clones == this.clones); clone.buffers = BuildSlice(buffers, offset, length); clone.offset = (int)(offset & chunkSizeMask); clone.length = length; @@ -391,7 +394,7 @@ protected override void Dispose(bool disposing) #if FEATURE_CONDITIONALWEAKTABLE_ENUMERATOR foreach (var pair in clones) { - Debugging.Assert(() => pair.Key.isClone); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pair.Key.isClone); pair.Key.UnsetBuffers(); } this.clones.Clear(); diff --git a/src/Lucene.Net/Store/CompoundFileDirectory.cs b/src/Lucene.Net/Store/CompoundFileDirectory.cs index 92db1d2f3c..bf7b4bf061 100644 --- a/src/Lucene.Net/Store/CompoundFileDirectory.cs +++ b/src/Lucene.Net/Store/CompoundFileDirectory.cs @@ -120,7 +120,7 @@ public CompoundFileDirectory(Directory directory, string fileName, IOContext con } else { - Debugging.Assert(() => !(directory is CompoundFileDirectory), () => "compound file inside of compound file: " + fileName); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !(directory is CompoundFileDirectory), () => "compound file inside of compound file: " + fileName); this.entries = SENTINEL; this.IsOpen = true; writer = new CompoundFileWriter(directory, fileName); @@ -295,7 +295,7 @@ protected override void Dispose(bool disposing) IsOpen = false; if (writer != null) { - Debugging.Assert(() => openForWrite); + if (Debugging.AssertsEnabled) Debugging.Assert(() => openForWrite); writer.Dispose(); } else @@ -311,7 +311,7 @@ public override IndexInput OpenInput(string name, IOContext context) lock (this) { EnsureOpen(); - Debugging.Assert(() => !openForWrite); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !openForWrite); string id = IndexFileNames.StripSegmentName(name); if (!entries.TryGetValue(id, out FileEntry entry) || entry == null) { @@ -415,7 +415,7 @@ public override Lock MakeLock(string name) public override IndexInputSlicer CreateSlicer(string name, IOContext context) { EnsureOpen(); - Debugging.Assert(() => !openForWrite); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !openForWrite); string id = IndexFileNames.StripSegmentName(name); if (!entries.TryGetValue(id, out FileEntry entry) || entry == null) { diff --git a/src/Lucene.Net/Store/CompoundFileWriter.cs b/src/Lucene.Net/Store/CompoundFileWriter.cs index 3c1953e6ee..5250880e33 100644 --- a/src/Lucene.Net/Store/CompoundFileWriter.cs +++ b/src/Lucene.Net/Store/CompoundFileWriter.cs @@ -160,7 +160,7 @@ public void Dispose() closed = true; // open the compound stream GetOutput(); - Debugging.Assert(() => dataOut != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => dataOut != null); CodecUtil.WriteFooter(dataOut); } catch (IOException e) @@ -253,7 +253,7 @@ internal IndexOutput CreateOutput(string name, IOContext context) bool outputLocked = false; try { - Debugging.Assert(() => name != null, () => "name must not be null"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => name != null, () => "name must not be null"); if (entries.ContainsKey(name)) { throw new ArgumentException("File " + name + " already exists"); @@ -262,7 +262,7 @@ internal IndexOutput CreateOutput(string name, IOContext context) entry.File = name; entries[name] = entry; string id = IndexFileNames.StripSegmentName(name); - Debugging.Assert(() => !seenIDs.Contains(id), () => "file=\"" + name + "\" maps to id=\"" + id + "\", which was already written"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !seenIDs.Contains(id), () => "file=\"" + name + "\" maps to id=\"" + id + "\", which was already written"); seenIDs.Add(id); DirectCFSIndexOutput @out; @@ -285,7 +285,7 @@ internal IndexOutput CreateOutput(string name, IOContext context) entries.Remove(name); if (outputLocked) // release the output lock if not successful { - Debugging.Assert(() => outputTaken); + if (Debugging.AssertsEnabled) Debugging.Assert(() => outputTaken); ReleaseOutputLock(); } } @@ -315,7 +315,7 @@ private void PrunePendingEntries() finally { bool compareAndSet = outputTaken.CompareAndSet(true, false); - Debugging.Assert(() => compareAndSet); + if (Debugging.AssertsEnabled) Debugging.Assert(() => compareAndSet); } } } @@ -397,7 +397,7 @@ public override long GetFilePointer() [Obsolete("(4.1) this method will be removed in Lucene 5.0")] public override void Seek(long pos) { - Debugging.Assert(() => !closed); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !closed); @delegate.Seek(offset + pos); } @@ -405,21 +405,21 @@ public override long Length { get { - Debugging.Assert(() => !closed); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !closed); return @delegate.Length - offset; } } public override void WriteByte(byte b) { - Debugging.Assert(() => !closed); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !closed); writtenBytes++; @delegate.WriteByte(b); } public override void WriteBytes(byte[] b, int offset, int length) { - Debugging.Assert(() => !closed); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !closed); writtenBytes += length; @delegate.WriteBytes(b, offset, length); } diff --git a/src/Lucene.Net/Store/DataInput.cs b/src/Lucene.Net/Store/DataInput.cs index 0979d2daae..fb21389e42 100644 --- a/src/Lucene.Net/Store/DataInput.cs +++ b/src/Lucene.Net/Store/DataInput.cs @@ -324,7 +324,7 @@ public virtual void SkipBytes(long numBytes) { skipBuffer = new byte[SKIP_BUFFER_SIZE]; } - Debugging.Assert(() => skipBuffer.Length == SKIP_BUFFER_SIZE); + if (Debugging.AssertsEnabled) Debugging.Assert(() => skipBuffer.Length == SKIP_BUFFER_SIZE); for (long skipped = 0; skipped < numBytes; ) { var step = (int)Math.Min(SKIP_BUFFER_SIZE, numBytes - skipped); diff --git a/src/Lucene.Net/Store/DataOutput.cs b/src/Lucene.Net/Store/DataOutput.cs index b7e1937e93..a4a27aa64f 100644 --- a/src/Lucene.Net/Store/DataOutput.cs +++ b/src/Lucene.Net/Store/DataOutput.cs @@ -231,7 +231,7 @@ public virtual void WriteInt64(long i) /// public void WriteVInt64(long i) { - Debugging.Assert(() => i >= 0L); + if (Debugging.AssertsEnabled) Debugging.Assert(() => i >= 0L); while ((i & ~0x7FL) != 0L) { WriteByte((byte)unchecked((sbyte)((i & 0x7FL) | 0x80L))); @@ -262,7 +262,7 @@ public virtual void WriteString(string s) /// Copy numBytes bytes from input to ourself. public virtual void CopyBytes(DataInput input, long numBytes) { - Debugging.Assert(() => numBytes >= 0, () => "numBytes=" + numBytes); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numBytes >= 0, () => "numBytes=" + numBytes); long left = numBytes; if (copyBuffer == null) { diff --git a/src/Lucene.Net/Store/IOContext.cs b/src/Lucene.Net/Store/IOContext.cs index bd791448a4..7a5240e39e 100644 --- a/src/Lucene.Net/Store/IOContext.cs +++ b/src/Lucene.Net/Store/IOContext.cs @@ -67,7 +67,7 @@ public IOContext() public IOContext(FlushInfo flushInfo) { - Debugging.Assert(() => flushInfo != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => flushInfo != null); this.Context = UsageContext.FLUSH; this.MergeInfo = null; this.ReadOnce = false; @@ -94,8 +94,11 @@ public IOContext(MergeInfo mergeInfo) private IOContext(UsageContext context, MergeInfo mergeInfo) { - Debugging.Assert(() => context != UsageContext.MERGE || mergeInfo != null, () => "MergeInfo must not be null if context is MERGE"); - Debugging.Assert(() => context != UsageContext.FLUSH, () => "Use IOContext(FlushInfo) to create a FLUSH IOContext"); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => context != UsageContext.MERGE || mergeInfo != null, () => "MergeInfo must not be null if context is MERGE"); + Debugging.Assert(() => context != UsageContext.FLUSH, () => "Use IOContext(FlushInfo) to create a FLUSH IOContext"); + } this.Context = context; this.ReadOnce = false; this.MergeInfo = mergeInfo; diff --git a/src/Lucene.Net/Store/MMapDirectory.cs b/src/Lucene.Net/Store/MMapDirectory.cs index a0ccb0bcdc..a469d7234a 100644 --- a/src/Lucene.Net/Store/MMapDirectory.cs +++ b/src/Lucene.Net/Store/MMapDirectory.cs @@ -111,7 +111,7 @@ public MMapDirectory(DirectoryInfo path, LockFactory lockFactory, int maxChunkSi throw new ArgumentException("Maximum chunk size for mmap must be >0"); } this.chunkSizePower = 31 - maxChunkSize.LeadingZeroCount(); - Debugging.Assert(() => this.chunkSizePower >= 0 && this.chunkSizePower <= 30); + if (Debugging.AssertsEnabled) Debugging.Assert(() => this.chunkSizePower >= 0 && this.chunkSizePower <= 30); } /// diff --git a/src/Lucene.Net/Store/NIOFSDirectory.cs b/src/Lucene.Net/Store/NIOFSDirectory.cs index 8eefa140eb..f0658c44b2 100644 --- a/src/Lucene.Net/Store/NIOFSDirectory.cs +++ b/src/Lucene.Net/Store/NIOFSDirectory.cs @@ -234,7 +234,7 @@ protected override void ReadInternal(byte[] b, int offset, int len) if (b == m_buffer && 0 == offset) { // Use our own pre-wrapped byteBuf: - Debugging.Assert(() => byteBuf != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => byteBuf != null); byteBuf.Clear(); byteBuf.Limit = len; bb = byteBuf; @@ -259,7 +259,7 @@ protected override void ReadInternal(byte[] b, int offset, int len) { int toRead = Math.Min(CHUNK_SIZE, readLength); bb.Limit = readOffset + toRead; - Debugging.Assert(() => bb.Remaining == toRead); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bb.Remaining == toRead); int i = m_channel.Read(bb, pos); if (i <= 0) // be defensive here, even though we checked before hand, something could have changed { @@ -269,7 +269,7 @@ protected override void ReadInternal(byte[] b, int offset, int len) readOffset += i; readLength -= i; } - Debugging.Assert(() => readLength == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => readLength == 0); } catch (IOException ioe) { diff --git a/src/Lucene.Net/Store/RAMOutputStream.cs b/src/Lucene.Net/Store/RAMOutputStream.cs index d1e431513a..fec7838db0 100644 --- a/src/Lucene.Net/Store/RAMOutputStream.cs +++ b/src/Lucene.Net/Store/RAMOutputStream.cs @@ -162,7 +162,7 @@ public override void WriteByte(byte b) public override void WriteBytes(byte[] b, int offset, int len) { - Debugging.Assert(() => b != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => b != null); crc.Update(b, offset, len); while (len > 0) { diff --git a/src/Lucene.Net/Store/RateLimitedDirectoryWrapper.cs b/src/Lucene.Net/Store/RateLimitedDirectoryWrapper.cs index 1d352f7ca0..8ad4dce9df 100644 --- a/src/Lucene.Net/Store/RateLimitedDirectoryWrapper.cs +++ b/src/Lucene.Net/Store/RateLimitedDirectoryWrapper.cs @@ -67,7 +67,7 @@ public override void Copy(Directory to, string src, string dest, IOContext conte private RateLimiter GetRateLimiter(IOContext.UsageContext context) { - //Debugging.Assert(context != null); // LUCENENET NOTE: In .NET, enum can never be null + //if (Debugging.AssertsEnabled) Debugging.Assert(context != null); // LUCENENET NOTE: In .NET, enum can never be null RateLimiter ret; return _contextRateLimiters.TryGetValue(context, out ret) ? ret : null; } diff --git a/src/Lucene.Net/Store/SimpleFSDirectory.cs b/src/Lucene.Net/Store/SimpleFSDirectory.cs index 35167d9954..5e80121224 100644 --- a/src/Lucene.Net/Store/SimpleFSDirectory.cs +++ b/src/Lucene.Net/Store/SimpleFSDirectory.cs @@ -226,7 +226,7 @@ protected override void ReadInternal(byte[] b, int offset, int len) // { // throw new EndOfStreamException("read past EOF: " + this + " off: " + offset + " len: " + len + " total: " + total + " chunkLen: " + toRead + " end: " + m_end); // } - // Debugging.Assert(i > 0, "RandomAccessFile.read with non zero-length toRead must always read at least one byte"); + // if (Debugging.AssertsEnabled) Debugging.Assert(i > 0, "RandomAccessFile.read with non zero-length toRead must always read at least one byte"); // total += i; //} @@ -235,7 +235,7 @@ protected override void ReadInternal(byte[] b, int offset, int len) // all we need to do is Read(). total = m_file.Read(b, offset, len); - Debugging.Assert(() => total == len); + if (Debugging.AssertsEnabled) Debugging.Assert(() => total == len); } catch (IOException ioe) { diff --git a/src/Lucene.Net/Support/BitArrayExtensions.cs b/src/Lucene.Net/Support/BitArrayExtensions.cs index ce15d315ae..c607d5d884 100644 --- a/src/Lucene.Net/Support/BitArrayExtensions.cs +++ b/src/Lucene.Net/Support/BitArrayExtensions.cs @@ -278,7 +278,7 @@ public static void SafeSet(this BitArray a, int loc, bool value) // Clears all bits in this BitArray that correspond to a set bit in the parameter BitArray public static void AndNot(this BitArray bitsA, BitArray bitsB) { - //Debugging.Assert(bitsA.Length == bitsB.Length, "BitArray lengths are not the same"); + //if (Debugging.AssertsEnabled) Debugging.Assert(bitsA.Length == bitsB.Length, "BitArray lengths are not the same"); for (int i = 0; i < bitsA.Length; i++) { //bitsA was longer than bitsB diff --git a/src/Lucene.Net/Support/Collections.cs b/src/Lucene.Net/Support/Collections.cs index ff632a5b47..468bc5a93a 100644 --- a/src/Lucene.Net/Support/Collections.cs +++ b/src/Lucene.Net/Support/Collections.cs @@ -270,7 +270,7 @@ private class ReverseComparer2 : IComparer public ReverseComparer2(IComparer cmp) { - Debugging.Assert(() => cmp != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => cmp != null); this.cmp = cmp; } diff --git a/src/Lucene.Net/Util/ArrayUtil.cs b/src/Lucene.Net/Util/ArrayUtil.cs index 8c19f4f38d..238c0c6c5d 100644 --- a/src/Lucene.Net/Util/ArrayUtil.cs +++ b/src/Lucene.Net/Util/ArrayUtil.cs @@ -270,7 +270,7 @@ public static int GetShrinkSize(int currentSize, int targetSize, int bytesPerEle public static short[] Grow(short[] array, int minSize) { - Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { short[] newArray = new short[Oversize(minSize, RamUsageEstimator.NUM_BYTES_INT16)]; @@ -290,7 +290,7 @@ public static short[] Grow(short[] array) public static float[] Grow(float[] array, int minSize) { - Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { float[] newArray = new float[Oversize(minSize, RamUsageEstimator.NUM_BYTES_SINGLE)]; @@ -310,7 +310,7 @@ public static float[] Grow(float[] array) public static double[] Grow(double[] array, int minSize) { - Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { double[] newArray = new double[Oversize(minSize, RamUsageEstimator.NUM_BYTES_DOUBLE)]; @@ -330,7 +330,7 @@ public static double[] Grow(double[] array) public static short[] Shrink(short[] array, int targetSize) { - Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); int newSize = GetShrinkSize(array.Length, targetSize, RamUsageEstimator.NUM_BYTES_INT16); if (newSize != array.Length) { @@ -346,7 +346,7 @@ public static short[] Shrink(short[] array, int targetSize) public static int[] Grow(int[] array, int minSize) { - Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { int[] newArray = new int[Oversize(minSize, RamUsageEstimator.NUM_BYTES_INT32)]; @@ -366,7 +366,7 @@ public static int[] Grow(int[] array) public static int[] Shrink(int[] array, int targetSize) { - Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); int newSize = GetShrinkSize(array.Length, targetSize, RamUsageEstimator.NUM_BYTES_INT32); if (newSize != array.Length) { @@ -382,7 +382,7 @@ public static int[] Shrink(int[] array, int targetSize) public static long[] Grow(long[] array, int minSize) { - Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { long[] newArray = new long[Oversize(minSize, RamUsageEstimator.NUM_BYTES_INT64)]; @@ -402,7 +402,7 @@ public static long[] Grow(long[] array) public static long[] Shrink(long[] array, int targetSize) { - Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); int newSize = GetShrinkSize(array.Length, targetSize, RamUsageEstimator.NUM_BYTES_INT64); if (newSize != array.Length) { @@ -419,7 +419,7 @@ public static long[] Shrink(long[] array, int targetSize) [CLSCompliant(false)] public static sbyte[] Grow(sbyte[] array, int minSize) { - Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { var newArray = new sbyte[Oversize(minSize, 1)]; @@ -434,7 +434,7 @@ public static sbyte[] Grow(sbyte[] array, int minSize) public static byte[] Grow(byte[] array, int minSize) { - Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { byte[] newArray = new byte[Oversize(minSize, 1)]; @@ -454,7 +454,7 @@ public static byte[] Grow(byte[] array) public static byte[] Shrink(byte[] array, int targetSize) { - Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); int newSize = GetShrinkSize(array.Length, targetSize, 1); if (newSize != array.Length) { @@ -470,7 +470,7 @@ public static byte[] Shrink(byte[] array, int targetSize) public static bool[] Grow(bool[] array, int minSize) { - Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { bool[] newArray = new bool[Oversize(minSize, 1)]; @@ -490,7 +490,7 @@ public static bool[] Grow(bool[] array) public static bool[] Shrink(bool[] array, int targetSize) { - Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); int newSize = GetShrinkSize(array.Length, targetSize, 1); if (newSize != array.Length) { @@ -506,7 +506,7 @@ public static bool[] Shrink(bool[] array, int targetSize) public static char[] Grow(char[] array, int minSize) { - Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { char[] newArray = new char[Oversize(minSize, RamUsageEstimator.NUM_BYTES_CHAR)]; @@ -526,7 +526,7 @@ public static char[] Grow(char[] array) public static char[] Shrink(char[] array, int targetSize) { - Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); int newSize = GetShrinkSize(array.Length, targetSize, RamUsageEstimator.NUM_BYTES_CHAR); if (newSize != array.Length) { @@ -543,7 +543,7 @@ public static char[] Shrink(char[] array, int targetSize) [CLSCompliant(false)] public static int[][] Grow(int[][] array, int minSize) { - Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { var newArray = new int[Oversize(minSize, RamUsageEstimator.NUM_BYTES_OBJECT_REF)][]; @@ -565,7 +565,7 @@ public static int[][] Grow(int[][] array) [CLSCompliant(false)] public static int[][] Shrink(int[][] array, int targetSize) { - Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); int newSize = GetShrinkSize(array.Length, targetSize, RamUsageEstimator.NUM_BYTES_OBJECT_REF); if (newSize != array.Length) { @@ -582,7 +582,7 @@ public static int[][] Shrink(int[][] array, int targetSize) [CLSCompliant(false)] public static float[][] Grow(float[][] array, int minSize) { - Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { float[][] newArray = new float[Oversize(minSize, RamUsageEstimator.NUM_BYTES_OBJECT_REF)][]; @@ -604,7 +604,7 @@ public static float[][] Grow(float[][] array) [CLSCompliant(false)] public static float[][] Shrink(float[][] array, int targetSize) { - Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); int newSize = GetShrinkSize(array.Length, targetSize, RamUsageEstimator.NUM_BYTES_OBJECT_REF); if (newSize != array.Length) { @@ -779,7 +779,7 @@ public static int[] ToInt32Array(ICollection ints) } // paranoia: - Debugging.Assert(() => upto == result.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => upto == result.Length); return result; } diff --git a/src/Lucene.Net/Util/AttributeSource.cs b/src/Lucene.Net/Util/AttributeSource.cs index 3b7927cff3..3971574589 100644 --- a/src/Lucene.Net/Util/AttributeSource.cs +++ b/src/Lucene.Net/Util/AttributeSource.cs @@ -370,7 +370,7 @@ public void AddAttributeImpl(Attribute att) foreach (var curInterfaceRef in foundInterfaces) { curInterfaceRef.TryGetTarget(out Type curInterface); - Debugging.Assert(() => curInterface != null, () => "We have a strong reference on the class holding the interfaces, so they should never get evicted"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => curInterface != null, () => "We have a strong reference on the class holding the interfaces, so they should never get evicted"); // Attribute is a superclass of this interface if (!attributes.ContainsKey(curInterface)) { diff --git a/src/Lucene.Net/Util/Automaton/Automaton.cs b/src/Lucene.Net/Util/Automaton/Automaton.cs index b7ae9136c3..a6b20c1fb0 100644 --- a/src/Lucene.Net/Util/Automaton/Automaton.cs +++ b/src/Lucene.Net/Util/Automaton/Automaton.cs @@ -299,7 +299,7 @@ public virtual void SetNumberedStates(State[] states) public virtual void SetNumberedStates(State[] states, int count) { - Debugging.Assert(() => count <= states.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => count <= states.Length); // TODO: maybe we can eventually allow for oversizing here... if (count < states.Length) { @@ -550,7 +550,7 @@ public virtual Transition[][] GetSortedTransitions() s.SortTransitions(Transition.COMPARE_BY_MIN_MAX_THEN_DEST); s.TrimTransitionsArray(); transitions[s.number] = s.TransitionsArray; - Debugging.Assert(() => s.TransitionsArray != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => s.TransitionsArray != null); } return transitions; } diff --git a/src/Lucene.Net/Util/Automaton/BasicOperations.cs b/src/Lucene.Net/Util/Automaton/BasicOperations.cs index 9a0793c215..e5485b96f2 100644 --- a/src/Lucene.Net/Util/Automaton/BasicOperations.cs +++ b/src/Lucene.Net/Util/Automaton/BasicOperations.cs @@ -721,7 +721,7 @@ private PointTransitions Find(int point) if (count == HASHMAP_CUTOVER) { // switch to HashMap on the fly - Debugging.Assert(() => map.Count == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => map.Count == 0); for (int i = 0; i < count; i++) { map[points[i].point] = points[i]; @@ -845,7 +845,7 @@ public static void Determinize(Automaton a) if (statesSet.upto > 0) { - Debugging.Assert(() => lastPoint != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => lastPoint != -1); statesSet.ComputeHash(); @@ -868,7 +868,7 @@ public static void Determinize(Automaton a) } else { - Debugging.Assert(() => (accCount > 0) == q.accept, () => "accCount=" + accCount + " vs existing accept=" + q.accept + " states=" + statesSet); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (accCount > 0) == q.accept, () => "accCount=" + accCount + " vs existing accept=" + q.accept + " states=" + statesSet); } r.AddTransition(new Transition(lastPoint, point - 1, q)); @@ -902,7 +902,7 @@ public static void Determinize(Automaton a) points.points[i].starts.count = 0; } points.Reset(); - Debugging.Assert(() => statesSet.upto == 0, () => "upto=" + statesSet.upto); + if (Debugging.AssertsEnabled) Debugging.Assert(() => statesSet.upto == 0, () => "upto=" + statesSet.upto); } a.deterministic = true; a.SetNumberedStates(newStatesArray, newStateUpto); diff --git a/src/Lucene.Net/Util/Automaton/CompiledAutomaton.cs b/src/Lucene.Net/Util/Automaton/CompiledAutomaton.cs index fd8de06584..d20418c773 100644 --- a/src/Lucene.Net/Util/Automaton/CompiledAutomaton.cs +++ b/src/Lucene.Net/Util/Automaton/CompiledAutomaton.cs @@ -228,7 +228,7 @@ private BytesRef AddTail(int state, BytesRef term, int idx, int leadLabel) } } - Debugging.Assert(() => maxTransition != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => maxTransition != null); // Append floorLabel int floorLabel; @@ -256,7 +256,7 @@ private BytesRef AddTail(int state, BytesRef term, int idx, int leadLabel) Transition[] transitions = sortedTransitions[state]; if (transitions.Length == 0) { - Debugging.Assert(() => RunAutomaton.IsAccept(state)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => RunAutomaton.IsAccept(state)); term.Length = idx; //if (DEBUG) System.out.println(" return " + term.utf8ToString()); return term; @@ -265,7 +265,7 @@ private BytesRef AddTail(int state, BytesRef term, int idx, int leadLabel) { // We are pushing "top" -- so get last label of // last transition: - Debugging.Assert(() => transitions.Length != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => transitions.Length != 0); Transition lastTransition = transitions[transitions.Length - 1]; if (idx >= term.Bytes.Length) { @@ -364,7 +364,7 @@ public virtual BytesRef Floor(BytesRef input, BytesRef output) Transition[] transitions = sortedTransitions[state]; if (transitions.Length == 0) { - Debugging.Assert(() => RunAutomaton.IsAccept(state)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => RunAutomaton.IsAccept(state)); output.Length = idx; //if (DEBUG) System.out.println(" return " + output.utf8ToString()); return output; diff --git a/src/Lucene.Net/Util/Automaton/DaciukMihovAutomatonBuilder.cs b/src/Lucene.Net/Util/Automaton/DaciukMihovAutomatonBuilder.cs index f1e6f079f5..5a96cddde9 100644 --- a/src/Lucene.Net/Util/Automaton/DaciukMihovAutomatonBuilder.cs +++ b/src/Lucene.Net/Util/Automaton/DaciukMihovAutomatonBuilder.cs @@ -131,7 +131,7 @@ public override int GetHashCode() /// internal State NewState(int label) { - Debugging.Assert(() => Array.BinarySearch(labels, label) < 0, () => "State already has transition labeled: " + label); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Array.BinarySearch(labels, label) < 0, () => "State already has transition labeled: " + label); labels = Arrays.CopyOf(labels, labels.Length + 1); states = Arrays.CopyOf(states, states.Length + 1); @@ -145,7 +145,7 @@ internal State NewState(int label) /// internal State LastChild() // LUCENENET NOTE: Kept this a method because there is another overload { - Debugging.Assert(() => HasChildren, () => "No outgoing transitions."); + if (Debugging.AssertsEnabled) Debugging.Assert(() => HasChildren, () => "No outgoing transitions."); return states[states.Length - 1]; } @@ -161,7 +161,7 @@ internal State LastChild(int label) { s = states[index]; } - Debugging.Assert(() => s == GetState(label)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => s == GetState(label)); return s; } @@ -171,7 +171,7 @@ internal State LastChild(int label) /// internal void ReplaceLastChild(State state) { - Debugging.Assert(() => HasChildren, () => "No outgoing transitions."); + if (Debugging.AssertsEnabled) Debugging.Assert(() => HasChildren, () => "No outgoing transitions."); states[states.Length - 1] = state; } @@ -227,9 +227,12 @@ private static bool ReferenceEquals(object[] a1, object[] a2) /// public void Add(CharsRef current) { - Debugging.Assert(() => stateRegistry != null, () => "Automaton already built."); - Debugging.Assert(() => previous == null || comparer.Compare(previous, current) <= 0, () => "Input must be in sorted UTF-8 order: " + previous + " >= " + current); - Debugging.Assert(() => SetPrevious(current)); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => stateRegistry != null, () => "Automaton already built."); + Debugging.Assert(() => previous == null || comparer.Compare(previous, current) <= 0, () => "Input must be in sorted UTF-8 order: " + previous + " >= " + current); + Debugging.Assert(() => SetPrevious(current)); + } // Descend in the automaton (find matching prefix). int pos = 0, max = current.Length; diff --git a/src/Lucene.Net/Util/Automaton/Lev1ParametricDescription.cs b/src/Lucene.Net/Util/Automaton/Lev1ParametricDescription.cs index ad11e5488f..d0e79358f4 100644 --- a/src/Lucene.Net/Util/Automaton/Lev1ParametricDescription.cs +++ b/src/Lucene.Net/Util/Automaton/Lev1ParametricDescription.cs @@ -32,12 +32,12 @@ internal class Lev1ParametricDescription : ParametricDescription internal override int Transition(int absState, int position, int vector) { // null absState should never be passed in - Debugging.Assert(() => absState != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => absState != -1); // decode absState -> state, offset int state = absState / (m_w + 1); int offset = absState % (m_w + 1); - Debugging.Assert(() => offset >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => offset >= 0); if (position == m_w) { diff --git a/src/Lucene.Net/Util/Automaton/Lev1TParametricDescription.cs b/src/Lucene.Net/Util/Automaton/Lev1TParametricDescription.cs index f3e6e362a0..68095e6d4a 100644 --- a/src/Lucene.Net/Util/Automaton/Lev1TParametricDescription.cs +++ b/src/Lucene.Net/Util/Automaton/Lev1TParametricDescription.cs @@ -34,12 +34,12 @@ internal class Lev1TParametricDescription : ParametricDescription internal override int Transition(int absState, int position, int vector) { // null absState should never be passed in - Debugging.Assert(() => absState != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => absState != -1); // decode absState -> state, offset int state = absState / (m_w + 1); int offset = absState % (m_w + 1); - Debugging.Assert(() => offset >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => offset >= 0); if (position == m_w) { diff --git a/src/Lucene.Net/Util/Automaton/Lev2ParametricDescription.cs b/src/Lucene.Net/Util/Automaton/Lev2ParametricDescription.cs index f87f7a048a..c5d74ce894 100644 --- a/src/Lucene.Net/Util/Automaton/Lev2ParametricDescription.cs +++ b/src/Lucene.Net/Util/Automaton/Lev2ParametricDescription.cs @@ -32,12 +32,12 @@ internal class Lev2ParametricDescription : ParametricDescription internal override int Transition(int absState, int position, int vector) { // null absState should never be passed in - Debugging.Assert(() => absState != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => absState != -1); // decode absState -> state, offset int state = absState / (m_w + 1); int offset = absState % (m_w + 1); - Debugging.Assert(() => offset >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => offset >= 0); if (position == m_w) { diff --git a/src/Lucene.Net/Util/Automaton/Lev2TParametricDescription.cs b/src/Lucene.Net/Util/Automaton/Lev2TParametricDescription.cs index 1b08abc8fe..08325b7e98 100644 --- a/src/Lucene.Net/Util/Automaton/Lev2TParametricDescription.cs +++ b/src/Lucene.Net/Util/Automaton/Lev2TParametricDescription.cs @@ -34,12 +34,12 @@ internal class Lev2TParametricDescription : ParametricDescription internal override int Transition(int absState, int position, int vector) { // null absState should never be passed in - Debugging.Assert(() => absState != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => absState != -1); // decode absState -> state, offset int state = absState / (m_w + 1); int offset = absState % (m_w + 1); - Debugging.Assert(() => offset >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => offset >= 0); if (position == m_w) { diff --git a/src/Lucene.Net/Util/Automaton/LevenshteinAutomata.cs b/src/Lucene.Net/Util/Automaton/LevenshteinAutomata.cs index 8048eab29c..e360036321 100644 --- a/src/Lucene.Net/Util/Automaton/LevenshteinAutomata.cs +++ b/src/Lucene.Net/Util/Automaton/LevenshteinAutomata.cs @@ -276,7 +276,7 @@ internal virtual bool IsAccept(int absState) // decode absState -> state, offset int state = absState / (m_w + 1); int offset = absState % (m_w + 1); - Debugging.Assert(() => offset >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => offset >= 0); return m_w - offset + minErrors[state] <= m_n; } diff --git a/src/Lucene.Net/Util/Automaton/SortedIntSet.cs b/src/Lucene.Net/Util/Automaton/SortedIntSet.cs index 92192fd46b..bd0f40ea4c 100644 --- a/src/Lucene.Net/Util/Automaton/SortedIntSet.cs +++ b/src/Lucene.Net/Util/Automaton/SortedIntSet.cs @@ -157,7 +157,7 @@ public void Decr(int num) return; } } - Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false); } public void ComputeHash() diff --git a/src/Lucene.Net/Util/Automaton/State.cs b/src/Lucene.Net/Util/Automaton/State.cs index 2e5912ca46..1e49816d68 100644 --- a/src/Lucene.Net/Util/Automaton/State.cs +++ b/src/Lucene.Net/Util/Automaton/State.cs @@ -184,7 +184,7 @@ public virtual bool Accept /// public virtual State Step(int c) { - Debugging.Assert(() => c >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => c >= 0); for (int i = 0; i < numTransitions; i++) { Transition t = transitionsArray[i]; diff --git a/src/Lucene.Net/Util/Automaton/Transition.cs b/src/Lucene.Net/Util/Automaton/Transition.cs index fb4e54fce0..84ed6d7f83 100644 --- a/src/Lucene.Net/Util/Automaton/Transition.cs +++ b/src/Lucene.Net/Util/Automaton/Transition.cs @@ -63,7 +63,7 @@ public class Transition /// Destination state. public Transition(int c, State to) { - Debugging.Assert(() => c >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => c >= 0); min = max = c; this.to = to; } @@ -76,8 +76,11 @@ public Transition(int c, State to) /// Destination state. public Transition(int min, int max, State to) { - Debugging.Assert(() => min >= 0); - Debugging.Assert(() => max >= 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => min >= 0); + Debugging.Assert(() => max >= 0); + } if (max < min) { int t = max; diff --git a/src/Lucene.Net/Util/Automaton/UTF32ToUTF8.cs b/src/Lucene.Net/Util/Automaton/UTF32ToUTF8.cs index 313786d441..319a8482f3 100644 --- a/src/Lucene.Net/Util/Automaton/UTF32ToUTF8.cs +++ b/src/Lucene.Net/Util/Automaton/UTF32ToUTF8.cs @@ -177,8 +177,11 @@ private void Build(State start, State end, UTF8Sequence startUTF8, UTF8Sequence } else { - Debugging.Assert(() => startUTF8.len > upto + 1); - Debugging.Assert(() => endUTF8.len > upto + 1); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => startUTF8.len > upto + 1); + Debugging.Assert(() => endUTF8.len > upto + 1); + } State n = NewUTF8State(); // Single value leading edge diff --git a/src/Lucene.Net/Util/BroadWord.cs b/src/Lucene.Net/Util/BroadWord.cs index db7241caef..6515c67ca0 100644 --- a/src/Lucene.Net/Util/BroadWord.cs +++ b/src/Lucene.Net/Util/BroadWord.cs @@ -71,7 +71,7 @@ public static int Select(long x, int r) long b = (long)((ulong)(((long)((ulong)SmallerUpTo7_8(s, (r * L8_L)) >> 7)) * L8_L) >> 53); // & (~7L); // Step 3, side ways addition for byte number times 8 long l = r - (((long)((ulong)(s << 8) >> (int)b)) & 0xFFL); // Step 4, byte wise rank, subtract the rank with byte at b-8, or zero for b=0; - Debugging.Assert(() => 0L <= 1, () => l.ToString(CultureInfo.InvariantCulture)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => 0L <= 1, () => l.ToString(CultureInfo.InvariantCulture)); //assert l < 8 : l; //fails when bit r is not available. // Select bit l from byte (x >>> b): @@ -150,7 +150,7 @@ public static long SmallerUpto15_16(long x, long y) /// The index of the r-th 1 bit in x, or if no such bit exists, 72. public static int SelectNaive(long x, int r) { - Debugging.Assert(() => r >= 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => r >= 1); int s = -1; while ((x != 0L) && (r > 0)) { diff --git a/src/Lucene.Net/Util/ByteBlockPool.cs b/src/Lucene.Net/Util/ByteBlockPool.cs index 88d0731191..94474258bd 100644 --- a/src/Lucene.Net/Util/ByteBlockPool.cs +++ b/src/Lucene.Net/Util/ByteBlockPool.cs @@ -354,7 +354,7 @@ public void SetBytesRef(BytesRef term, int textStart) term.Length = (bytes[pos] & 0x7f) + ((bytes[pos + 1] & 0xff) << 7); term.Offset = pos + 2; } - Debugging.Assert(() => term.Length >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => term.Length >= 0); } /// diff --git a/src/Lucene.Net/Util/BytesRef.cs b/src/Lucene.Net/Util/BytesRef.cs index 7e293b6af1..f12ad79a30 100644 --- a/src/Lucene.Net/Util/BytesRef.cs +++ b/src/Lucene.Net/Util/BytesRef.cs @@ -88,7 +88,7 @@ public BytesRef(byte[] bytes, int offset, int length) this.bytes = bytes; this.Offset = offset; this.Length = length; - Debugging.Assert(IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(IsValid); } /// @@ -140,7 +140,7 @@ public BytesRef(string text) /// unpaired surrogates or invalid UTF16 code units. public void CopyChars(ICharSequence text) { - Debugging.Assert(() => Offset == 0); // TODO broken if offset != 0 + if (Debugging.AssertsEnabled) Debugging.Assert(() => Offset == 0); // TODO broken if offset != 0 UnicodeUtil.UTF16toUTF8(text, 0, text.Length, this); } @@ -151,7 +151,7 @@ public void CopyChars(ICharSequence text) /// unpaired surrogates or invalid UTF16 code units. public void CopyChars(string text) { - Debugging.Assert(() => Offset == 0); // TODO broken if offset != 0 + if (Debugging.AssertsEnabled) Debugging.Assert(() => Offset == 0); // TODO broken if offset != 0 UnicodeUtil.UTF16toUTF8(text, 0, text.Length, this); } @@ -164,7 +164,7 @@ public void CopyChars(string text) /// Another , should not be null. public bool BytesEquals(BytesRef other) { - Debugging.Assert(() => other != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => other != null); if (Length == other.Length) { var otherUpto = other.Offset; @@ -298,7 +298,7 @@ public void Append(BytesRef other) /// public void Grow(int newLength) { - Debugging.Assert(() => Offset == 0); // NOTE: senseless if offset != 0 + if (Debugging.AssertsEnabled) Debugging.Assert(() => Offset == 0); // NOTE: senseless if offset != 0 bytes = ArrayUtil.Grow(bytes, newLength); } @@ -307,7 +307,7 @@ public void Grow(int newLength) public int CompareTo(object other) // LUCENENET specific: Implemented IComparable for FieldComparer { BytesRef br = other as BytesRef; - Debugging.Assert(() => br != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => br != null); return utf8SortedAsUnicodeSortOrder.Compare(this, br); } diff --git a/src/Lucene.Net/Util/BytesRefArray.cs b/src/Lucene.Net/Util/BytesRefArray.cs index 4257dd3cce..e8c7db7345 100644 --- a/src/Lucene.Net/Util/BytesRefArray.cs +++ b/src/Lucene.Net/Util/BytesRefArray.cs @@ -97,7 +97,7 @@ public BytesRef Get(BytesRef spare, int index) { int offset = offsets[index]; int length = index == lastElement - 1 ? currentOffset - offset : offsets[index + 1] - offset; - Debugging.Assert(() => spare.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => spare.Offset == 0); spare.Grow(length); spare.Length = length; pool.ReadBytes(offset, spare.Bytes, spare.Offset, spare.Length); diff --git a/src/Lucene.Net/Util/BytesRefHash.cs b/src/Lucene.Net/Util/BytesRefHash.cs index 450483ff31..1e45f4ebae 100644 --- a/src/Lucene.Net/Util/BytesRefHash.cs +++ b/src/Lucene.Net/Util/BytesRefHash.cs @@ -121,8 +121,11 @@ public BytesRefHash(ByteBlockPool pool, int capacity, BytesStartArray bytesStart /// bytesID public BytesRef Get(int bytesID, BytesRef @ref) { - Debugging.Assert(() => bytesStart != null, () => "bytesStart is null - not initialized"); - Debugging.Assert(() => bytesID < bytesStart.Length, () => "bytesID exceeds byteStart len: " + bytesStart.Length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => bytesStart != null, () => "bytesStart is null - not initialized"); + Debugging.Assert(() => bytesID < bytesStart.Length, () => "bytesID exceeds byteStart len: " + bytesStart.Length); + } pool.SetBytesRef(@ref, bytesStart[bytesID]); return @ref; } @@ -137,7 +140,7 @@ public BytesRef Get(int bytesID, BytesRef @ref) /// public int[] Compact() { - Debugging.Assert(() => bytesStart != null, () => "bytesStart is null - not initialized"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bytesStart != null, () => "bytesStart is null - not initialized"); int upto = 0; for (int i = 0; i < hashSize; i++) { @@ -152,7 +155,7 @@ public int[] Compact() } } - Debugging.Assert(() => upto == count); + if (Debugging.AssertsEnabled) Debugging.Assert(() => upto == count); lastCount = count; return ids; } @@ -198,7 +201,7 @@ protected override void Swap(int i, int j) protected override int Compare(int i, int j) { int id1 = compact[i], id2 = compact[j]; - Debugging.Assert(() => outerInstance.bytesStart.Length > id1 && outerInstance.bytesStart.Length > id2); + if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.bytesStart.Length > id1 && outerInstance.bytesStart.Length > id2); outerInstance.pool.SetBytesRef(outerInstance.scratch1, outerInstance.bytesStart[id1]); outerInstance.pool.SetBytesRef(scratch2, outerInstance.bytesStart[id2]); return comp.Compare(outerInstance.scratch1, scratch2); @@ -207,14 +210,14 @@ protected override int Compare(int i, int j) protected override void SetPivot(int i) { int id = compact[i]; - Debugging.Assert(() => outerInstance.bytesStart.Length > id); + if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.bytesStart.Length > id); outerInstance.pool.SetBytesRef(pivot, outerInstance.bytesStart[id]); } protected override int ComparePivot(int j) { int id = compact[j]; - Debugging.Assert(() => outerInstance.bytesStart.Length > id); + if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.bytesStart.Length > id); outerInstance.pool.SetBytesRef(scratch2, outerInstance.bytesStart[id]); return comp.Compare(pivot, scratch2); } @@ -301,7 +304,7 @@ public void Dispose() /// public int Add(BytesRef bytes) { - Debugging.Assert(() => bytesStart != null, () => "Bytesstart is null - not initialized"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bytesStart != null, () => "Bytesstart is null - not initialized"); int length = bytes.Length; // final position int hashPos = FindHash(bytes); @@ -324,7 +327,7 @@ public int Add(BytesRef bytes) if (count >= bytesStart.Length) { bytesStart = bytesStartArray.Grow(); - Debugging.Assert(() => count < bytesStart.Length + 1, () => "count: " + count + " len: " + bytesStart.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => count < bytesStart.Length + 1, () => "count: " + count + " len: " + bytesStart.Length); } e = count++; @@ -339,7 +342,7 @@ public int Add(BytesRef bytes) // 1 byte to store length buffer[bufferUpto] = (byte)length; pool.ByteUpto += length + 1; - Debugging.Assert(() => length >= 0, () => "Length must be positive: " + length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => length >= 0, () => "Length must be positive: " + length); System.Buffer.BlockCopy(bytes.Bytes, bytes.Offset, buffer, bufferUpto + 1, length); } else @@ -350,7 +353,7 @@ public int Add(BytesRef bytes) pool.ByteUpto += length + 2; System.Buffer.BlockCopy(bytes.Bytes, bytes.Offset, buffer, bufferUpto + 2, length); } - Debugging.Assert(() => ids[hashPos] == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ids[hashPos] == -1); ids[hashPos] = e; if (count == hashHalfSize) @@ -377,7 +380,7 @@ public int Find(BytesRef bytes) private int FindHash(BytesRef bytes) { - Debugging.Assert(() => bytesStart != null, () => "bytesStart is null - not initialized"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bytesStart != null, () => "bytesStart is null - not initialized"); int code = DoHash(bytes.Bytes, bytes.Offset, bytes.Length); @@ -409,7 +412,7 @@ private int FindHash(BytesRef bytes) /// public int AddByPoolOffset(int offset) { - Debugging.Assert(() => bytesStart != null, () => "Bytesstart is null - not initialized"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bytesStart != null, () => "Bytesstart is null - not initialized"); // final position int code = offset; int hashPos = offset & hashMask; @@ -431,11 +434,11 @@ public int AddByPoolOffset(int offset) if (count >= bytesStart.Length) { bytesStart = bytesStartArray.Grow(); - Debugging.Assert(() => count < bytesStart.Length + 1, () => "count: " + count + " len: " + bytesStart.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => count < bytesStart.Length + 1, () => "count: " + count + " len: " + bytesStart.Length); } e = count++; bytesStart[e] = offset; - Debugging.Assert(() => ids[hashPos] == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ids[hashPos] == -1); ids[hashPos] = e; if (count == hashHalfSize) @@ -489,7 +492,7 @@ private void Rehash(int newSize, bool hashOnData) } int hashPos = code & newMask; - Debugging.Assert(() => hashPos >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => hashPos >= 0); if (newHash[hashPos] != -1) { // Conflict; use linear probe to find an open slot @@ -546,8 +549,11 @@ public void Reinit() /// for the given id public int ByteStart(int bytesID) { - Debugging.Assert(() => bytesStart != null, () => "bytesStart is null - not initialized"); - Debugging.Assert(() => bytesID >= 0 && bytesID < count, () => bytesID.ToString()); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => bytesStart != null, () => "bytesStart is null - not initialized"); + Debugging.Assert(() => bytesID >= 0 && bytesID < count, () => bytesID.ToString()); + } return bytesStart[bytesID]; } @@ -645,7 +651,7 @@ public override int[] Clear() public override int[] Grow() { - Debugging.Assert(() => bytesStart != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bytesStart != null); return bytesStart = ArrayUtil.Grow(bytesStart, bytesStart.Length + 1); } diff --git a/src/Lucene.Net/Util/CharsRef.cs b/src/Lucene.Net/Util/CharsRef.cs index 2116fe2aef..a604720498 100644 --- a/src/Lucene.Net/Util/CharsRef.cs +++ b/src/Lucene.Net/Util/CharsRef.cs @@ -99,7 +99,7 @@ public CharsRef(char[] chars, int offset, int length) this.chars = chars; this.Offset = offset; this.Length = length; - Debugging.Assert(IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(IsValid); } /// @@ -228,7 +228,7 @@ public void CopyChars(CharsRef other) /// public void Grow(int newLength) { - Debugging.Assert(() => Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Offset == 0); if (chars.Length < newLength) { chars = ArrayUtil.Grow(chars, newLength); diff --git a/src/Lucene.Net/Util/FilterIterator.cs b/src/Lucene.Net/Util/FilterIterator.cs index 9cd8fce613..2b16464d9e 100644 --- a/src/Lucene.Net/Util/FilterIterator.cs +++ b/src/Lucene.Net/Util/FilterIterator.cs @@ -47,7 +47,7 @@ public bool MoveNext() return false; } - Debugging.Assert(() => nextIsSet); + if (Debugging.AssertsEnabled) Debugging.Assert(() => nextIsSet); try { current = next; diff --git a/src/Lucene.Net/Util/FixedBitSet.cs b/src/Lucene.Net/Util/FixedBitSet.cs index 9d12d67f1a..f0ba370b61 100644 --- a/src/Lucene.Net/Util/FixedBitSet.cs +++ b/src/Lucene.Net/Util/FixedBitSet.cs @@ -259,7 +259,7 @@ public int Cardinality() public bool Get(int index) { - Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index + ", numBits=" + numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index + ", numBits=" + numBits); int i = index >> 6; // div 64 // signed shift will keep a negative index and force an // array-index-out-of-bounds-exception, removing the need for an explicit check. @@ -270,7 +270,7 @@ public bool Get(int index) public void Set(int index) { - Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index + ", numBits=" + numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index + ", numBits=" + numBits); int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -279,7 +279,7 @@ public void Set(int index) public bool GetAndSet(int index) { - Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -290,7 +290,7 @@ public bool GetAndSet(int index) public void Clear(int index) { - Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = index >> 6; int bit = index & 0x03f; long bitmask = 1L << bit; @@ -299,7 +299,7 @@ public void Clear(int index) public bool GetAndClear(int index) { - Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -314,7 +314,7 @@ public bool GetAndClear(int index) /// public int NextSetBit(int index) { - Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index + ", numBits=" + numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index + ", numBits=" + numBits); int i = index >> 6; int subIndex = index & 0x3f; // index within the word long word = bits[i] >> subIndex; // skip all the bits to the right of index @@ -342,7 +342,7 @@ public int NextSetBit(int index) /// public int PrevSetBit(int index) { - Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index + " numBits=" + numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index + " numBits=" + numBits); int i = index >> 6; int subIndex = index & 0x3f; // index within the word long word = (bits[i] << (63 - subIndex)); // skip all the bits to the left of index @@ -405,7 +405,7 @@ public void Or(FixedBitSet other) private void Or(long[] otherArr, int otherNumWords) { - Debugging.Assert(() => otherNumWords <= numWords, () => "numWords=" + numWords + ", otherNumWords=" + otherNumWords); + if (Debugging.AssertsEnabled) Debugging.Assert(() => otherNumWords <= numWords, () => "numWords=" + numWords + ", otherNumWords=" + otherNumWords); long[] thisArr = this.bits; int pos = Math.Min(numWords, otherNumWords); while (--pos >= 0) @@ -418,7 +418,7 @@ private void Or(long[] otherArr, int otherNumWords) /// this = this XOR other public void Xor(FixedBitSet other) { - Debugging.Assert(() => other.numWords <= numWords, () => "numWords=" + numWords + ", other.numWords=" + other.numWords); + if (Debugging.AssertsEnabled) Debugging.Assert(() => other.numWords <= numWords, () => "numWords=" + numWords + ", other.numWords=" + other.numWords); long[] thisBits = this.bits; long[] otherBits = other.bits; int pos = Math.Min(numWords, other.numWords); @@ -577,8 +577,11 @@ private void AndNot(long[] otherArr, int otherNumWords) /// One-past the last bit to flip public void Flip(int startIndex, int endIndex) { - Debugging.Assert(() => startIndex >= 0 && startIndex < numBits); - Debugging.Assert(() => endIndex >= 0 && endIndex <= numBits); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => startIndex >= 0 && startIndex < numBits); + Debugging.Assert(() => endIndex >= 0 && endIndex <= numBits); + } if (endIndex <= startIndex) { return; @@ -623,8 +626,11 @@ public void Flip(int startIndex, int endIndex) /// One-past the last bit to set public void Set(int startIndex, int endIndex) { - Debugging.Assert(() => startIndex >= 0 && startIndex < numBits); - Debugging.Assert(() => endIndex >= 0 && endIndex <= numBits); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => startIndex >= 0 && startIndex < numBits); + Debugging.Assert(() => endIndex >= 0 && endIndex <= numBits); + } if (endIndex <= startIndex) { return; @@ -655,8 +661,11 @@ public void Set(int startIndex, int endIndex) /// One-past the last bit to clear public void Clear(int startIndex, int endIndex) { - Debugging.Assert(() => startIndex >= 0 && startIndex < numBits, () => "startIndex=" + startIndex + ", numBits=" + numBits); - Debugging.Assert(() => endIndex >= 0 && endIndex <= numBits, () => "endIndex=" + endIndex + ", numBits=" + numBits); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => startIndex >= 0 && startIndex < numBits, () => "startIndex=" + startIndex + ", numBits=" + numBits); + Debugging.Assert(() => endIndex >= 0 && endIndex <= numBits, () => "endIndex=" + endIndex + ", numBits=" + numBits); + } if (endIndex <= startIndex) { return; diff --git a/src/Lucene.Net/Util/Fst/Builder.cs b/src/Lucene.Net/Util/Fst/Builder.cs index c48c50c09d..37b14d3f42 100644 --- a/src/Lucene.Net/Util/Fst/Builder.cs +++ b/src/Lucene.Net/Util/Fst/Builder.cs @@ -206,7 +206,7 @@ private CompiledNode CompileNode(UnCompiledNode nodeIn, int tailLength) { node = fst.AddNode(nodeIn); } - Debugging.Assert(() => node != -2); + if (Debugging.AssertsEnabled) Debugging.Assert(() => node != -2); nodeIn.Clear(); @@ -370,8 +370,11 @@ public virtual void Add(Int32sRef input, T output) output = NO_OUTPUT; } - Debugging.Assert(() => lastInput.Length == 0 || input.CompareTo(lastInput) >= 0, () => "inputs are added out of order lastInput=" + lastInput + " vs input=" + input); - Debugging.Assert(() => ValidOutput(output)); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => lastInput.Length == 0 || input.CompareTo(lastInput) >= 0, () => "inputs are added out of order lastInput=" + lastInput + " vs input=" + input); + Debugging.Assert(() => ValidOutput(output)); + } //System.out.println("\nadd: " + input); if (input.Length == 0) @@ -441,7 +444,7 @@ public virtual void Add(Int32sRef input, T output) UnCompiledNode parentNode = frontier[idx - 1]; T lastOutput = parentNode.GetLastOutput(input.Int32s[input.Offset + idx - 1]); - Debugging.Assert(() => ValidOutput(lastOutput)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ValidOutput(lastOutput)); T commonOutputPrefix; T wordSuffix; @@ -449,9 +452,9 @@ public virtual void Add(Int32sRef input, T output) if (!lastOutput.Equals(NO_OUTPUT)) { commonOutputPrefix = fst.Outputs.Common(output, lastOutput); - Debugging.Assert(() => ValidOutput(commonOutputPrefix)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ValidOutput(commonOutputPrefix)); wordSuffix = fst.Outputs.Subtract(lastOutput, commonOutputPrefix); - Debugging.Assert(() => ValidOutput(wordSuffix)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ValidOutput(wordSuffix)); parentNode.SetLastOutput(input.Int32s[input.Offset + idx - 1], commonOutputPrefix); node.PrependOutput(wordSuffix); } @@ -461,7 +464,7 @@ public virtual void Add(Int32sRef input, T output) } output = fst.Outputs.Subtract(output, commonOutputPrefix); - Debugging.Assert(() => ValidOutput(output)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ValidOutput(output)); } if (lastInput.Length == input.Length && prefixLenPlus1 == 1 + input.Length) @@ -657,17 +660,20 @@ public void Clear() public S GetLastOutput(int labelToMatch) { - Debugging.Assert(() => NumArcs > 0); - Debugging.Assert(() => Arcs[NumArcs - 1].Label == labelToMatch); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => NumArcs > 0); + Debugging.Assert(() => Arcs[NumArcs - 1].Label == labelToMatch); + } return Arcs[NumArcs - 1].Output; } public void AddArc(int label, INode target) { - Debugging.Assert(() => label >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => label >= 0); if (NumArcs != 0) { - Debugging.Assert(() => label > Arcs[NumArcs - 1].Label, () => "arc[-1].Label=" + Arcs[NumArcs - 1].Label + " new label=" + label + " numArcs=" + NumArcs); + if (Debugging.AssertsEnabled) Debugging.Assert(() => label > Arcs[NumArcs - 1].Label, () => "arc[-1].Label=" + Arcs[NumArcs - 1].Label + " new label=" + label + " numArcs=" + NumArcs); } if (NumArcs == Arcs.Length) { @@ -688,9 +694,9 @@ public void AddArc(int label, INode target) public void ReplaceLast(int labelToMatch, INode target, S nextFinalOutput, bool isFinal) { - Debugging.Assert(() => NumArcs > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => NumArcs > 0); Arc arc = Arcs[NumArcs - 1]; - Debugging.Assert(() => arc.Label == labelToMatch, () => "arc.Label=" + arc.Label + " vs " + labelToMatch); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.Label == labelToMatch, () => "arc.Label=" + arc.Label + " vs " + labelToMatch); arc.Target = target; //assert target.Node != -2; arc.NextFinalOutput = nextFinalOutput; @@ -699,36 +705,42 @@ public void ReplaceLast(int labelToMatch, INode target, S nextFinalOutput, bool public void DeleteLast(int label, INode target) { - Debugging.Assert(() => NumArcs > 0); - Debugging.Assert(() => label == Arcs[NumArcs - 1].Label); - Debugging.Assert(() => target == Arcs[NumArcs - 1].Target); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => NumArcs > 0); + Debugging.Assert(() => label == Arcs[NumArcs - 1].Label); + Debugging.Assert(() => target == Arcs[NumArcs - 1].Target); + } NumArcs--; } public void SetLastOutput(int labelToMatch, S newOutput) { - Debugging.Assert(() => Owner.ValidOutput(newOutput)); - Debugging.Assert(() => NumArcs > 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => Owner.ValidOutput(newOutput)); + Debugging.Assert(() => NumArcs > 0); + } Arc arc = Arcs[NumArcs - 1]; - Debugging.Assert(() => arc.Label == labelToMatch); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.Label == labelToMatch); arc.Output = newOutput; } // pushes an output prefix forward onto all arcs public void PrependOutput(S outputPrefix) { - Debugging.Assert(() => Owner.ValidOutput(outputPrefix)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Owner.ValidOutput(outputPrefix)); for (int arcIdx = 0; arcIdx < NumArcs; arcIdx++) { Arcs[arcIdx].Output = Owner.Fst.Outputs.Add(outputPrefix, Arcs[arcIdx].Output); - Debugging.Assert(() => Owner.ValidOutput(Arcs[arcIdx].Output)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Owner.ValidOutput(Arcs[arcIdx].Output)); } if (IsFinal) { Output = Owner.Fst.Outputs.Add(outputPrefix, Output); - Debugging.Assert(() => Owner.ValidOutput(Output)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Owner.ValidOutput(Output)); } } } diff --git a/src/Lucene.Net/Util/Fst/ByteSequenceOutputs.cs b/src/Lucene.Net/Util/Fst/ByteSequenceOutputs.cs index f92702821d..e203edff5b 100644 --- a/src/Lucene.Net/Util/Fst/ByteSequenceOutputs.cs +++ b/src/Lucene.Net/Util/Fst/ByteSequenceOutputs.cs @@ -42,8 +42,11 @@ private ByteSequenceOutputs() public override BytesRef Common(BytesRef output1, BytesRef output2) { - Debugging.Assert(() => output1 != null); - Debugging.Assert(() => output2 != null); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => output1 != null); + Debugging.Assert(() => output2 != null); + } int pos1 = output1.Offset; int pos2 = output2.Offset; @@ -81,8 +84,11 @@ public override BytesRef Common(BytesRef output1, BytesRef output2) public override BytesRef Subtract(BytesRef output, BytesRef inc) { - Debugging.Assert(() => output != null); - Debugging.Assert(() => inc != null); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => output != null); + Debugging.Assert(() => inc != null); + } if (inc == NO_OUTPUT) { // no prefix removed @@ -95,16 +101,22 @@ public override BytesRef Subtract(BytesRef output, BytesRef inc) } else { - Debugging.Assert(() => inc.Length < output.Length, () => "inc.length=" + inc.Length + " vs output.length=" + output.Length); - Debugging.Assert(() => inc.Length > 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => inc.Length < output.Length, () => "inc.length=" + inc.Length + " vs output.length=" + output.Length); + Debugging.Assert(() => inc.Length > 0); + } return new BytesRef(output.Bytes, output.Offset + inc.Length, output.Length - inc.Length); } } public override BytesRef Add(BytesRef prefix, BytesRef output) { - Debugging.Assert(() => prefix != null); - Debugging.Assert(() => output != null); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => prefix != null); + Debugging.Assert(() => output != null); + } if (prefix == NO_OUTPUT) { return output; @@ -115,8 +127,8 @@ public override BytesRef Add(BytesRef prefix, BytesRef output) } else { - Debugging.Assert(() => prefix.Length > 0); - Debugging.Assert(() => output.Length > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => prefix.Length > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => output.Length > 0); BytesRef result = new BytesRef(prefix.Length + output.Length); Array.Copy(prefix.Bytes, prefix.Offset, result.Bytes, 0, prefix.Length); Array.Copy(output.Bytes, output.Offset, result.Bytes, prefix.Length, output.Length); @@ -127,7 +139,7 @@ public override BytesRef Add(BytesRef prefix, BytesRef output) public override void Write(BytesRef prefix, DataOutput @out) { - Debugging.Assert(() => prefix != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => prefix != null); @out.WriteVInt32(prefix.Length); @out.WriteBytes(prefix.Bytes, prefix.Offset, prefix.Length); } diff --git a/src/Lucene.Net/Util/Fst/BytesRefFSTEnum.cs b/src/Lucene.Net/Util/Fst/BytesRefFSTEnum.cs index 9e2074893c..ad1813fac4 100644 --- a/src/Lucene.Net/Util/Fst/BytesRefFSTEnum.cs +++ b/src/Lucene.Net/Util/Fst/BytesRefFSTEnum.cs @@ -86,7 +86,7 @@ public BytesRefFSTEnum.InputOutput SeekExact(BytesRef target) m_targetLength = target.Length; if (base.DoSeekExact()) { - Debugging.Assert(() => m_upto == 1 + target.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m_upto == 1 + target.Length); return SetResult(); } else diff --git a/src/Lucene.Net/Util/Fst/BytesStore.cs b/src/Lucene.Net/Util/Fst/BytesStore.cs index ee857d099a..2a8813aaa8 100644 --- a/src/Lucene.Net/Util/Fst/BytesStore.cs +++ b/src/Lucene.Net/Util/Fst/BytesStore.cs @@ -131,7 +131,7 @@ public override void WriteBytes(byte[] b, int offset, int len) internal virtual void WriteBytes(long dest, byte[] b, int offset, int len) { //System.out.println(" BS.writeBytes dest=" + dest + " offset=" + offset + " len=" + len); - Debugging.Assert(() => dest + len <= Position, () => "dest=" + dest + " pos=" + Position + " len=" + len); + if (Debugging.AssertsEnabled) Debugging.Assert(() => dest + len <= Position, () => "dest=" + dest + " pos=" + Position + " len=" + len); // Note: weird: must go "backwards" because copyBytes // calls us with overlapping src/dest. If we @@ -198,7 +198,7 @@ internal virtual void WriteBytes(long dest, byte[] b, int offset, int len) public virtual void CopyBytes(long src, long dest, int len) { //System.out.println("BS.copyBytes src=" + src + " dest=" + dest + " len=" + len); - Debugging.Assert(() => src < dest); + if (Debugging.AssertsEnabled) Debugging.Assert(() => src < dest); // Note: weird: must go "backwards" because copyBytes // calls us with overlapping src/dest. If we @@ -287,8 +287,11 @@ public virtual void WriteInt32(long pos, int value) /// Reverse from , inclusive, to , inclusive. public virtual void Reverse(long srcPos, long destPos) { - Debugging.Assert(() => srcPos < destPos); - Debugging.Assert(() => destPos < Position); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => srcPos < destPos); + Debugging.Assert(() => destPos < Position); + } //System.out.println("reverse src=" + srcPos + " dest=" + destPos); int srcBlockIndex = (int)(srcPos >> blockBits); @@ -355,8 +358,11 @@ public virtual void SkipBytes(int len) /// public virtual void Truncate(long newLen) { - Debugging.Assert(() => newLen <= Position); - Debugging.Assert(() => newLen >= 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => newLen <= Position); + Debugging.Assert(() => newLen >= 0); + } int blockIndex = (int)(newLen >> blockBits); nextWrite = (int)(newLen & blockMask); if (nextWrite == 0) @@ -373,7 +379,7 @@ public virtual void Truncate(long newLen) { current = blocks[blockIndex]; } - Debugging.Assert(() => newLen == Position); + if (Debugging.AssertsEnabled) Debugging.Assert(() => newLen == Position); } public virtual void Finish() @@ -469,7 +475,7 @@ public override long Position nextBuffer = bufferIndex + 1; current = outerInstance.blocks[bufferIndex]; nextRead = (int)(value & outerInstance.blockMask); - Debugging.Assert(() => this.Position == value, () => "pos=" + value + " Position=" + this.Position); + if (Debugging.AssertsEnabled) Debugging.Assert(() => this.Position == value, () => "pos=" + value + " Position=" + this.Position); } } @@ -542,7 +548,7 @@ public override long Position nextBuffer = bufferIndex - 1; current = outerInstance.blocks[bufferIndex]; nextRead = (int)(value & outerInstance.blockMask); - Debugging.Assert(() => this.Position == value, () => "value=" + value + " this.Position=" + this.Position); + if (Debugging.AssertsEnabled) Debugging.Assert(() => this.Position == value, () => "value=" + value + " this.Position=" + this.Position); } } diff --git a/src/Lucene.Net/Util/Fst/CharSequenceOutputs.cs b/src/Lucene.Net/Util/Fst/CharSequenceOutputs.cs index c38c7da21b..ceedf894ba 100644 --- a/src/Lucene.Net/Util/Fst/CharSequenceOutputs.cs +++ b/src/Lucene.Net/Util/Fst/CharSequenceOutputs.cs @@ -42,8 +42,11 @@ private CharSequenceOutputs() public override CharsRef Common(CharsRef output1, CharsRef output2) { - Debugging.Assert(() => output1 != null); - Debugging.Assert(() => output2 != null); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => output1 != null); + Debugging.Assert(() => output2 != null); + } int pos1 = output1.Offset; int pos2 = output2.Offset; @@ -81,8 +84,11 @@ public override CharsRef Common(CharsRef output1, CharsRef output2) public override CharsRef Subtract(CharsRef output, CharsRef inc) { - Debugging.Assert(() => output != null); - Debugging.Assert(() => inc != null); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => output != null); + Debugging.Assert(() => inc != null); + } if (inc == NO_OUTPUT) { // no prefix removed @@ -95,16 +101,22 @@ public override CharsRef Subtract(CharsRef output, CharsRef inc) } else { - Debugging.Assert(() => inc.Length < output.Length, () => "inc.Length=" + inc.Length + " vs output.Length=" + output.Length); - Debugging.Assert(() => inc.Length > 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => inc.Length < output.Length, () => "inc.Length=" + inc.Length + " vs output.Length=" + output.Length); + Debugging.Assert(() => inc.Length > 0); + } return new CharsRef(output.Chars, output.Offset + inc.Length, output.Length - inc.Length); } } public override CharsRef Add(CharsRef prefix, CharsRef output) { - Debugging.Assert(() => prefix != null); - Debugging.Assert(() => output != null); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => prefix != null); + Debugging.Assert(() => output != null); + } if (prefix == NO_OUTPUT) { return output; @@ -115,8 +127,11 @@ public override CharsRef Add(CharsRef prefix, CharsRef output) } else { - Debugging.Assert(() => prefix.Length > 0); - Debugging.Assert(() => output.Length > 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => prefix.Length > 0); + Debugging.Assert(() => output.Length > 0); + } var result = new CharsRef(prefix.Length + output.Length); Array.Copy(prefix.Chars, prefix.Offset, result.Chars, 0, prefix.Length); Array.Copy(output.Chars, output.Offset, result.Chars, prefix.Length, output.Length); @@ -127,7 +142,7 @@ public override CharsRef Add(CharsRef prefix, CharsRef output) public override void Write(CharsRef prefix, DataOutput @out) { - Debugging.Assert(() => prefix != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => prefix != null); @out.WriteVInt32(prefix.Length); // TODO: maybe UTF8? for (int idx = 0; idx < prefix.Length; idx++) diff --git a/src/Lucene.Net/Util/Fst/FST.cs b/src/Lucene.Net/Util/Fst/FST.cs index 86bd5283dd..bee159ecf5 100644 --- a/src/Lucene.Net/Util/Fst/FST.cs +++ b/src/Lucene.Net/Util/Fst/FST.cs @@ -376,8 +376,11 @@ private void CacheRootArcs() cachedRootArcs = (FST.Arc[])new FST.Arc[0x80]; ReadRootArcs(cachedRootArcs); - Debugging.Assert(() => SetAssertingRootArcs(cachedRootArcs)); - Debugging.Assert(AssertRootArcs); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => SetAssertingRootArcs(cachedRootArcs)); + Debugging.Assert(AssertRootArcs); + } } public void ReadRootArcs(FST.Arc[] arcs) @@ -390,7 +393,7 @@ public void ReadRootArcs(FST.Arc[] arcs) ReadFirstRealTargetArc(arc.Target, arc, @in); while (true) { - Debugging.Assert(() => arc.Label != FST.END_LABEL); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.Label != FST.END_LABEL); if (arc.Label < cachedRootArcs.Length) { arcs[arc.Label] = (new FST.Arc()).CopyFrom(arc); @@ -581,15 +584,15 @@ public void Save(FileInfo file) // LUCENENET NOTE: static Read() was moved into the FST class private void WriteLabel(DataOutput @out, int v) { - Debugging.Assert(() => v >= 0, () => "v=" + v); + if (Debugging.AssertsEnabled) Debugging.Assert(() => v >= 0, () => "v=" + v); if (inputType == FST.INPUT_TYPE.BYTE1) { - Debugging.Assert(() => v <= 255, () => "v=" + v); + if (Debugging.AssertsEnabled) Debugging.Assert(() => v <= 255, () => "v=" + v); @out.WriteByte((byte)(sbyte)v); } else if (inputType == FST.INPUT_TYPE.BYTE2) { - Debugging.Assert(() => v <= 65535, () => "v=" + v); + if (Debugging.AssertsEnabled) Debugging.Assert(() => v <= 65535, () => "v=" + v); @out.WriteInt16((short)v); } else @@ -693,7 +696,7 @@ internal long AddNode(Builder.UnCompiledNode nodeIn) } else { - Debugging.Assert(() => arc.NextFinalOutput.Equals(NO_OUTPUT)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.NextFinalOutput.Equals(NO_OUTPUT)); } bool targetHasArcs = target.Node > 0; @@ -732,7 +735,7 @@ internal long AddNode(Builder.UnCompiledNode nodeIn) if (targetHasArcs && (flags & FST.BIT_TARGET_NEXT) == 0) { - Debugging.Assert(() => target.Node > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => target.Node > 0); //System.out.println(" write target"); bytes.WriteVInt64(target.Node); } @@ -772,7 +775,7 @@ internal long AddNode(Builder.UnCompiledNode nodeIn) if (doFixedArray) { const int MAX_HEADER_SIZE = 11; // header(byte) + numArcs(vint) + numBytes(vint) - Debugging.Assert(() => maxBytesPerArc > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => maxBytesPerArc > 0); // 2nd pass just "expands" all arcs to take up a fixed // byte size @@ -792,7 +795,7 @@ internal long AddNode(Builder.UnCompiledNode nodeIn) // expand the arcs in place, backwards long srcPos = bytes.Position; long destPos = fixedArrayStart + nodeIn.NumArcs * maxBytesPerArc; - Debugging.Assert(() => destPos >= srcPos); + if (Debugging.AssertsEnabled) Debugging.Assert(() => destPos >= srcPos); if (destPos > srcPos) { bytes.SkipBytes((int)(destPos - srcPos)); @@ -804,7 +807,7 @@ internal long AddNode(Builder.UnCompiledNode nodeIn) if (srcPos != destPos) { //System.out.println(" copy len=" + bytesPerArc[arcIdx]); - Debugging.Assert(() => destPos > srcPos, () => "destPos=" + destPos + " srcPos=" + srcPos + " arcIdx=" + arcIdx + " maxBytesPerArc=" + maxBytesPerArc + " bytesPerArc[arcIdx]=" + bytesPerArc[arcIdx] + " nodeIn.numArcs=" + nodeIn.NumArcs); + if (Debugging.AssertsEnabled) Debugging.Assert(() => destPos > srcPos, () => "destPos=" + destPos + " srcPos=" + srcPos + " arcIdx=" + arcIdx + " maxBytesPerArc=" + maxBytesPerArc + " bytesPerArc[arcIdx]=" + bytesPerArc[arcIdx] + " nodeIn.numArcs=" + nodeIn.NumArcs); bytes.CopyBytes(srcPos, destPos, bytesPerArc[arcIdx]); } } @@ -890,7 +893,7 @@ public FST.Arc ReadLastTargetArc(FST.Arc follow, FST.Arc arc, FST.Bytes if (!TargetHasArcs(follow)) { //System.out.println(" end node"); - Debugging.Assert(() => follow.IsFinal); + if (Debugging.AssertsEnabled) Debugging.Assert(() => follow.IsFinal); arc.Label = FST.END_LABEL; arc.Target = FST.FINAL_END_NODE; arc.Output = follow.NextFinalOutput; @@ -957,7 +960,7 @@ public FST.Arc ReadLastTargetArc(FST.Arc follow, FST.Arc arc, FST.Bytes arc.NextArc = @in.Position; } ReadNextRealArc(arc, @in); - Debugging.Assert(() => arc.IsLast); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.IsLast); return arc; } } @@ -1091,7 +1094,7 @@ public FST.Arc ReadNextArc(FST.Arc arc, FST.BytesReader @in) /// public int ReadNextArcLabel(FST.Arc arc, FST.BytesReader @in) { - Debugging.Assert(() => !arc.IsLast); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !arc.IsLast); if (arc.Label == FST.END_LABEL) { @@ -1156,7 +1159,7 @@ public FST.Arc ReadNextRealArc(FST.Arc arc, FST.BytesReader @in) { // arcs are at fixed entries arc.ArcIdx++; - Debugging.Assert(() => arc.ArcIdx < arc.NumArcs); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.ArcIdx < arc.NumArcs); @in.Position = arc.PosArcsStart; @in.SkipBytes(arc.ArcIdx * arc.BytesPerArc); } @@ -1223,7 +1226,7 @@ public FST.Arc ReadNextRealArc(FST.Arc arc, FST.BytesReader @in) else { arc.Target = arc.Node - 1; - Debugging.Assert(() => arc.Target > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.Target > 0); } } else @@ -1299,7 +1302,7 @@ public FST.Arc FindTargetArc(int labelToMatch, FST.Arc follow, FST.Arc { // LUCENE-5152: detect tricky cases where caller // modified previously returned cached root-arcs: - Debugging.Assert(AssertRootArcs); + if (Debugging.AssertsEnabled) Debugging.Assert(AssertRootArcs); FST.Arc result = cachedRootArcs[labelToMatch]; if (result == null) { @@ -1826,7 +1829,7 @@ internal FST Pack(int minInCountDeref, int maxDerefNodes, float acceptableOve } else { - Debugging.Assert(() => arc.NextFinalOutput.Equals(NO_OUTPUT)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.NextFinalOutput.Equals(NO_OUTPUT)); } if (!TargetHasArcs(arc)) { @@ -1869,7 +1872,7 @@ internal FST Pack(int minInCountDeref, int maxDerefNodes, float acceptableOve absPtr = 0; } - Debugging.Assert(() => flags != FST.ARCS_AS_FIXED_ARRAY); + if (Debugging.AssertsEnabled) Debugging.Assert(() => flags != FST.ARCS_AS_FIXED_ARRAY); writer.WriteByte((byte)(sbyte)flags); fst.WriteLabel(writer, arc.Label); @@ -1991,7 +1994,7 @@ internal FST Pack(int minInCountDeref, int maxDerefNodes, float acceptableOve // order) so nodes should only point forward to // other nodes because we only produce acyclic FSTs // w/ nodes only pointing "forwards": - Debugging.Assert(() => !negDelta); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !negDelta); //System.out.println("TOT wasted=" + totWasted); // Converged! break; @@ -2020,9 +2023,12 @@ internal FST Pack(int minInCountDeref, int maxDerefNodes, float acceptableOve fst.EmptyOutput = emptyOutput; } - Debugging.Assert(() => fst.nodeCount == nodeCount, () => "fst.nodeCount=" + fst.nodeCount + " nodeCount=" + nodeCount); - Debugging.Assert(() => fst.arcCount == arcCount); - Debugging.Assert(() => fst.arcWithOutputCount == arcWithOutputCount, () => "fst.arcWithOutputCount=" + fst.arcWithOutputCount + " arcWithOutputCount=" + arcWithOutputCount); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => fst.nodeCount == nodeCount, () => "fst.nodeCount=" + fst.nodeCount + " nodeCount=" + nodeCount); + Debugging.Assert(() => fst.arcCount == arcCount); + Debugging.Assert(() => fst.arcWithOutputCount == arcWithOutputCount, () => "fst.arcWithOutputCount=" + fst.arcWithOutputCount + " arcWithOutputCount=" + arcWithOutputCount); + } fst.bytes.Finish(); fst.CacheRootArcs(); @@ -2337,7 +2343,7 @@ public NodeQueue(int topN) protected internal override bool LessThan(NodeAndInCount a, NodeAndInCount b) { int cmp = a.CompareTo(b); - Debugging.Assert(() => cmp != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => cmp != 0); return cmp < 0; } } diff --git a/src/Lucene.Net/Util/Fst/FSTEnum.cs b/src/Lucene.Net/Util/Fst/FSTEnum.cs index 9dd3bc5516..ae43d3bf57 100644 --- a/src/Lucene.Net/Util/Fst/FSTEnum.cs +++ b/src/Lucene.Net/Util/Fst/FSTEnum.cs @@ -199,8 +199,11 @@ protected virtual void DoSeekCeil() // Match arc.ArcIdx = mid - 1; m_fst.ReadNextRealArc(arc, @in); - Debugging.Assert(() => arc.ArcIdx == mid); - Debugging.Assert(() => arc.Label == targetLabel, () => "arc.label=" + arc.Label + " vs targetLabel=" + targetLabel + " mid=" + mid); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => arc.ArcIdx == mid); + Debugging.Assert(() => arc.Label == targetLabel, () => "arc.label=" + arc.Label + " vs targetLabel=" + targetLabel + " mid=" + mid); + } m_output[m_upto] = m_fst.Outputs.Add(m_output[m_upto - 1], arc.Output); if (targetLabel == FST.END_LABEL) { @@ -217,7 +220,7 @@ protected virtual void DoSeekCeil() // Dead end arc.ArcIdx = arc.NumArcs - 2; m_fst.ReadNextRealArc(arc, @in); - Debugging.Assert(() => arc.IsLast); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.IsLast); // Dead end (target is after the last arc); // rollback to last fork then push m_upto--; @@ -242,7 +245,7 @@ protected virtual void DoSeekCeil() { arc.ArcIdx = (low > high ? low : high) - 1; m_fst.ReadNextRealArc(arc, @in); - Debugging.Assert(() => arc.Label > targetLabel); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.Label > targetLabel); PushFirst(); return; } @@ -370,8 +373,11 @@ protected virtual void DoSeekFloor() //System.out.println(" match! arcIdx=" + mid); arc.ArcIdx = mid - 1; m_fst.ReadNextRealArc(arc, @in); - Debugging.Assert(() => arc.ArcIdx == mid); - Debugging.Assert(() => arc.Label == targetLabel, () => "arc.label=" + arc.Label + " vs targetLabel=" + targetLabel + " mid=" + mid); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => arc.ArcIdx == mid); + Debugging.Assert(() => arc.Label == targetLabel, () => "arc.label=" + arc.Label + " vs targetLabel=" + targetLabel + " mid=" + mid); + } m_output[m_upto] = m_fst.Outputs.Add(m_output[m_upto - 1], arc.Output); if (targetLabel == FST.END_LABEL) { @@ -426,8 +432,11 @@ protected virtual void DoSeekFloor() // LUCNENET specific: We don't want the ReadNextArcLabel call to be // excluded when Debug.Assert is stripped out by the compiler. bool check = arc.IsLast || m_fst.ReadNextArcLabel(arc, @in) > targetLabel; - Debugging.Assert(() => check); - Debugging.Assert(() => arc.Label < targetLabel, () => "arc.label=" + arc.Label + " vs targetLabel=" + targetLabel); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => check); + Debugging.Assert(() => arc.Label < targetLabel, () => "arc.label=" + arc.Label + " vs targetLabel=" + targetLabel); + } PushLast(); return; } @@ -574,7 +583,7 @@ private void Incr() private void PushFirst() { FST.Arc arc = m_arcs[m_upto]; - Debugging.Assert(() => arc != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc != null); while (true) { @@ -601,7 +610,7 @@ private void PushFirst() private void PushLast() { FST.Arc arc = m_arcs[m_upto]; - Debugging.Assert(() => arc != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc != null); while (true) { diff --git a/src/Lucene.Net/Util/Fst/IntSequenceOutputs.cs b/src/Lucene.Net/Util/Fst/IntSequenceOutputs.cs index 7bfed37fb4..2486563f02 100644 --- a/src/Lucene.Net/Util/Fst/IntSequenceOutputs.cs +++ b/src/Lucene.Net/Util/Fst/IntSequenceOutputs.cs @@ -44,8 +44,11 @@ private Int32SequenceOutputs() public override Int32sRef Common(Int32sRef output1, Int32sRef output2) { - Debugging.Assert(() => output1 != null); - Debugging.Assert(() => output2 != null); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => output1 != null); + Debugging.Assert(() => output2 != null); + } int pos1 = output1.Offset; int pos2 = output2.Offset; @@ -83,8 +86,11 @@ public override Int32sRef Common(Int32sRef output1, Int32sRef output2) public override Int32sRef Subtract(Int32sRef output, Int32sRef inc) { - Debugging.Assert(() => output != null); - Debugging.Assert(() => inc != null); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => output != null); + Debugging.Assert(() => inc != null); + } if (inc == NO_OUTPUT) { // no prefix removed @@ -97,16 +103,22 @@ public override Int32sRef Subtract(Int32sRef output, Int32sRef inc) } else { - Debugging.Assert(() => inc.Length < output.Length, () => "inc.length=" + inc.Length + " vs output.length=" + output.Length); - Debugging.Assert(() => inc.Length > 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => inc.Length < output.Length, () => "inc.length=" + inc.Length + " vs output.length=" + output.Length); + Debugging.Assert(() => inc.Length > 0); + } return new Int32sRef(output.Int32s, output.Offset + inc.Length, output.Length - inc.Length); } } public override Int32sRef Add(Int32sRef prefix, Int32sRef output) { - Debugging.Assert(() => prefix != null); - Debugging.Assert(() => output != null); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => prefix != null); + Debugging.Assert(() => output != null); + } if (prefix == NO_OUTPUT) { return output; @@ -117,8 +129,11 @@ public override Int32sRef Add(Int32sRef prefix, Int32sRef output) } else { - Debugging.Assert(() => prefix.Length > 0); - Debugging.Assert(() => output.Length > 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => prefix.Length > 0); + Debugging.Assert(() => output.Length > 0); + } Int32sRef result = new Int32sRef(prefix.Length + output.Length); Array.Copy(prefix.Int32s, prefix.Offset, result.Int32s, 0, prefix.Length); Array.Copy(output.Int32s, output.Offset, result.Int32s, prefix.Length, output.Length); @@ -129,7 +144,7 @@ public override Int32sRef Add(Int32sRef prefix, Int32sRef output) public override void Write(Int32sRef prefix, DataOutput @out) { - Debugging.Assert(() => prefix != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => prefix != null); @out.WriteVInt32(prefix.Length); for (int idx = 0; idx < prefix.Length; idx++) { diff --git a/src/Lucene.Net/Util/Fst/IntsRefFSTEnum.cs b/src/Lucene.Net/Util/Fst/IntsRefFSTEnum.cs index 8d57075daa..11fb3da316 100644 --- a/src/Lucene.Net/Util/Fst/IntsRefFSTEnum.cs +++ b/src/Lucene.Net/Util/Fst/IntsRefFSTEnum.cs @@ -88,7 +88,7 @@ public Int32sRefFSTEnum.InputOutput SeekExact(Int32sRef target) m_targetLength = target.Length; if (base.DoSeekExact()) { - Debugging.Assert(() => m_upto == 1 + target.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m_upto == 1 + target.Length); return SetResult(); } else diff --git a/src/Lucene.Net/Util/Fst/NoOutputs.cs b/src/Lucene.Net/Util/Fst/NoOutputs.cs index 33bb13754c..f96864461f 100644 --- a/src/Lucene.Net/Util/Fst/NoOutputs.cs +++ b/src/Lucene.Net/Util/Fst/NoOutputs.cs @@ -64,30 +64,42 @@ private NoOutputs() public override object Common(object output1, object output2) { - Debugging.Assert(() => output1 == NO_OUTPUT); - Debugging.Assert(() => output2 == NO_OUTPUT); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => output1 == NO_OUTPUT); + Debugging.Assert(() => output2 == NO_OUTPUT); + } return NO_OUTPUT; } public override object Subtract(object output, object inc) { - Debugging.Assert(() => output == NO_OUTPUT); - Debugging.Assert(() => inc == NO_OUTPUT); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => output == NO_OUTPUT); + Debugging.Assert(() => inc == NO_OUTPUT); + } return NO_OUTPUT; } public override object Add(object prefix, object output) { - Debugging.Assert(() => prefix == NO_OUTPUT, () => "got " + prefix); - Debugging.Assert(() => output == NO_OUTPUT); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => prefix == NO_OUTPUT, () => "got " + prefix); + Debugging.Assert(() => output == NO_OUTPUT); + } return NO_OUTPUT; } [MethodImpl(MethodImplOptions.NoInlining)] public override object Merge(object first, object second) { - Debugging.Assert(() => first == NO_OUTPUT); - Debugging.Assert(() => second == NO_OUTPUT); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => first == NO_OUTPUT); + Debugging.Assert(() => second == NO_OUTPUT); + } return NO_OUTPUT; } diff --git a/src/Lucene.Net/Util/Fst/NodeHash.cs b/src/Lucene.Net/Util/Fst/NodeHash.cs index 8e6460f557..8b05c88278 100644 --- a/src/Lucene.Net/Util/Fst/NodeHash.cs +++ b/src/Lucene.Net/Util/Fst/NodeHash.cs @@ -162,7 +162,7 @@ public long Add(Builder.UnCompiledNode nodeIn) // freeze & add long node = fst.AddNode(nodeIn); //System.out.println(" now freeze node=" + node); - Debugging.Assert(() => Hash(node) == h, () => "frozenHash=" + Hash(node) + " vs h=" + h); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Hash(node) == h, () => "frozenHash=" + Hash(node) + " vs h=" + h); count++; table.Set(pos, node); // Rehash at 2/3 occupancy: diff --git a/src/Lucene.Net/Util/Fst/PairOutputs.cs b/src/Lucene.Net/Util/Fst/PairOutputs.cs index c9eb877259..ca85eaa279 100644 --- a/src/Lucene.Net/Util/Fst/PairOutputs.cs +++ b/src/Lucene.Net/Util/Fst/PairOutputs.cs @@ -86,7 +86,7 @@ public virtual Pair NewPair(A a, B b) else { var p = new Pair(a, b); - Debugging.Assert(() => Valid(p)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Valid(p)); return p; } } @@ -126,28 +126,37 @@ private bool Valid(Pair pair) public override Pair Common(Pair pair1, Pair pair2) { - Debugging.Assert(() => Valid(pair1)); - Debugging.Assert(() => Valid(pair2)); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => Valid(pair1)); + Debugging.Assert(() => Valid(pair2)); + } return NewPair(outputs1.Common(pair1.Output1, pair2.Output1), outputs2.Common(pair1.Output2, pair2.Output2)); } public override Pair Subtract(Pair output, Pair inc) { - Debugging.Assert(() => Valid(output)); - Debugging.Assert(() => Valid(inc)); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => Valid(output)); + Debugging.Assert(() => Valid(inc)); + } return NewPair(outputs1.Subtract(output.Output1, inc.Output1), outputs2.Subtract(output.Output2, inc.Output2)); } public override Pair Add(Pair prefix, Pair output) { - Debugging.Assert(() => Valid(prefix)); - Debugging.Assert(() => Valid(output)); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => Valid(prefix)); + Debugging.Assert(() => Valid(output)); + } return NewPair(outputs1.Add(prefix.Output1, output.Output1), outputs2.Add(prefix.Output2, output.Output2)); } public override void Write(Pair output, DataOutput writer) { - Debugging.Assert(() => Valid(output)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Valid(output)); outputs1.Write(output.Output1, writer); outputs2.Write(output.Output2, writer); } @@ -163,7 +172,7 @@ public override Pair Read(DataInput @in) public override string OutputToString(Pair output) { - Debugging.Assert(() => Valid(output)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Valid(output)); return ""; } diff --git a/src/Lucene.Net/Util/Fst/PositiveIntOutputs.cs b/src/Lucene.Net/Util/Fst/PositiveIntOutputs.cs index ea2cb101e5..0825c97a40 100644 --- a/src/Lucene.Net/Util/Fst/PositiveIntOutputs.cs +++ b/src/Lucene.Net/Util/Fst/PositiveIntOutputs.cs @@ -45,25 +45,34 @@ private PositiveInt32Outputs() public override long? Common(long? output1, long? output2) { - Debugging.Assert(() => Valid(output1)); - Debugging.Assert(() => Valid(output2)); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => Valid(output1)); + Debugging.Assert(() => Valid(output2)); + } if (output1 == NO_OUTPUT || output2 == NO_OUTPUT) { return NO_OUTPUT; } else { - Debugging.Assert(() => output1 > 0); - Debugging.Assert(() => output2 > 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => output1 > 0); + Debugging.Assert(() => output2 > 0); + } return Math.Min(output1.Value, output2.Value); } } public override long? Subtract(long? output, long? inc) { - Debugging.Assert(() => Valid(output)); - Debugging.Assert(() => Valid(inc)); - Debugging.Assert(() => output >= inc); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => Valid(output)); + Debugging.Assert(() => Valid(inc)); + Debugging.Assert(() => output >= inc); + } if (inc == NO_OUTPUT) { @@ -81,8 +90,11 @@ private PositiveInt32Outputs() public override long? Add(long? prefix, long? output) { - Debugging.Assert(() => Valid(prefix)); - Debugging.Assert(() => Valid(output)); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => Valid(prefix)); + Debugging.Assert(() => Valid(output)); + } if (prefix == NO_OUTPUT) { return output; @@ -99,7 +111,7 @@ private PositiveInt32Outputs() public override void Write(long? output, DataOutput @out) { - Debugging.Assert(() => Valid(output)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Valid(output)); @out.WriteVInt64(output.Value); } diff --git a/src/Lucene.Net/Util/Fst/Util.cs b/src/Lucene.Net/Util/Fst/Util.cs index a924b55924..ed59751da8 100644 --- a/src/Lucene.Net/Util/Fst/Util.cs +++ b/src/Lucene.Net/Util/Fst/Util.cs @@ -74,7 +74,7 @@ public static T Get(FST fst, Int32sRef input) /// public static T Get(FST fst, BytesRef input) { - Debugging.Assert(() => fst.InputType == FST.INPUT_TYPE.BYTE1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fst.InputType == FST.INPUT_TYPE.BYTE1); var fstReader = fst.GetBytesReader(); @@ -385,7 +385,7 @@ public TopNSearcher(FST fst, int topN, int maxQueueDepth, IComparer compar /// protected virtual void AddIfCompetitive(FSTPath path) { - Debugging.Assert(() => queue != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => queue != null); T cost = fst.Outputs.Add(path.Cost, path.Arc.Output); //System.out.println(" addIfCompetitive queue.size()=" + queue.size() + " path=" + path + " + label=" + path.arc.label); @@ -408,7 +408,7 @@ protected virtual void AddIfCompetitive(FSTPath path) path.Input.Length--; // We should never see dups: - Debugging.Assert(() => cmp != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => cmp != 0); if (cmp < 0) { @@ -596,7 +596,7 @@ public virtual TopResults Search() fst.ReadNextArc(path.Arc, fstReader); } - Debugging.Assert(() => foundZero); + if (Debugging.AssertsEnabled) Debugging.Assert(() => foundZero); if (queue != null) { @@ -917,7 +917,7 @@ public static void ToDot(FST fst, TextWriter @out, bool sameRank, bool lab arcColor = "black"; } - Debugging.Assert(() => arc.Label != FST.END_LABEL); + if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.Label != FST.END_LABEL); @out.Write(" " + node + " -> " + arc.Target + " [label=\"" + PrintableLabel(arc.Label) + outs + "\"" + (arc.IsFinal ? " style=\"bold\"" : "") + " color=\"" + arcColor + "\"]\n"); // Break the loop if we're on the last arc of this state. @@ -1068,7 +1068,7 @@ public static BytesRef ToBytesRef(Int32sRef input, BytesRef scratch) { int value = input.Int32s[i + input.Offset]; // NOTE: we allow -128 to 255 - Debugging.Assert(() => value >= sbyte.MinValue && value <= 255, () => "value " + value + " doesn't fit into byte"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => value >= sbyte.MinValue && value <= 255, () => "value " + value + " doesn't fit into byte"); scratch.Bytes[i] = (byte)value; } scratch.Length = input.Length; diff --git a/src/Lucene.Net/Util/IndexableBinaryStringTools.cs b/src/Lucene.Net/Util/IndexableBinaryStringTools.cs index 43f17972f8..51a877d676 100644 --- a/src/Lucene.Net/Util/IndexableBinaryStringTools.cs +++ b/src/Lucene.Net/Util/IndexableBinaryStringTools.cs @@ -137,7 +137,7 @@ public static void Encode(byte[] inputArray, int inputOffset, int inputLength, c [CLSCompliant(false)] public static void Encode(sbyte[] inputArray, int inputOffset, int inputLength, char[] outputArray, int outputOffset, int outputLength) { - Debugging.Assert(() => outputLength == GetEncodedLength(inputArray, inputOffset, inputLength)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => outputLength == GetEncodedLength(inputArray, inputOffset, inputLength)); if (inputLength > 0) { int inputByteNum = inputOffset; @@ -217,7 +217,7 @@ public static void Decode(char[] inputArray, int inputOffset, int inputLength, b [CLSCompliant(false)] public static void Decode(char[] inputArray, int inputOffset, int inputLength, sbyte[] outputArray, int outputOffset, int outputLength) { - Debugging.Assert(() => outputLength == GetDecodedLength(inputArray, inputOffset, inputLength)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => outputLength == GetDecodedLength(inputArray, inputOffset, inputLength)); int numInputChars = inputLength - 1; int numOutputBytes = outputLength; diff --git a/src/Lucene.Net/Util/InfoStream.cs b/src/Lucene.Net/Util/InfoStream.cs index 9724d44092..020a072463 100644 --- a/src/Lucene.Net/Util/InfoStream.cs +++ b/src/Lucene.Net/Util/InfoStream.cs @@ -42,7 +42,7 @@ private sealed class NoOutput : InfoStream { public override void Message(string component, string message) { - Debugging.Assert(() => false, () => "message() should not be called when isEnabled returns false"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "message() should not be called when isEnabled returns false"); } public override bool IsEnabled(string component) diff --git a/src/Lucene.Net/Util/IntBlockPool.cs b/src/Lucene.Net/Util/IntBlockPool.cs index 54045808ec..a1a05dfde7 100644 --- a/src/Lucene.Net/Util/IntBlockPool.cs +++ b/src/Lucene.Net/Util/IntBlockPool.cs @@ -243,7 +243,7 @@ private int NewSlice(int size) if (Int32Upto > INT32_BLOCK_SIZE - size) { NextBuffer(); - Debugging.Assert(() => AssertSliceBuffer(buffer)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => AssertSliceBuffer(buffer)); } int upto = Int32Upto; @@ -292,7 +292,7 @@ private int AllocSlice(int[] slice, int sliceOffset) if (Int32Upto > INT32_BLOCK_SIZE - newSize) { NextBuffer(); - Debugging.Assert(() => AssertSliceBuffer(buffer)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => AssertSliceBuffer(buffer)); } int newUpto = Int32Upto; @@ -337,7 +337,7 @@ public virtual void Reset(int sliceOffset) public virtual void WriteInt32(int value) { int[] ints = pool.buffers[offset >> INT32_BLOCK_SHIFT]; - Debugging.Assert(() => ints != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ints != null); int relativeOffset = offset & INT32_BLOCK_MASK; if (ints[relativeOffset] != 0) { @@ -427,7 +427,7 @@ public bool IsEndOfSlice { get { - Debugging.Assert(() => upto + bufferOffset <= end); + if (Debugging.AssertsEnabled) Debugging.Assert(() => upto + bufferOffset <= end); return upto + bufferOffset == end; } } @@ -440,8 +440,11 @@ public bool IsEndOfSlice /// public int ReadInt32() { - Debugging.Assert(() => !IsEndOfSlice); - Debugging.Assert(() => upto <= limit); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => !IsEndOfSlice); + Debugging.Assert(() => upto <= limit); + } if (upto == limit) { NextSlice(); @@ -465,7 +468,7 @@ private void NextSlice() if (nextIndex + newSize >= end) { // We are advancing to the final slice - Debugging.Assert(() => end - nextIndex > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => end - nextIndex > 0); limit = end - bufferOffset; } else diff --git a/src/Lucene.Net/Util/IntsRef.cs b/src/Lucene.Net/Util/IntsRef.cs index 3a3b6df5ec..0ee2174bb2 100644 --- a/src/Lucene.Net/Util/IntsRef.cs +++ b/src/Lucene.Net/Util/IntsRef.cs @@ -101,7 +101,7 @@ public Int32sRef(int[] ints, int offset, int length) this.ints = ints; this.Offset = offset; this.Length = length; - Debugging.Assert(IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(IsValid); } /// @@ -222,7 +222,7 @@ public void CopyInt32s(Int32sRef other) /// public void Grow(int newLength) { - Debugging.Assert(() => Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Offset == 0); if (ints.Length < newLength) { ints = ArrayUtil.Grow(ints, newLength); diff --git a/src/Lucene.Net/Util/LongBitSet.cs b/src/Lucene.Net/Util/LongBitSet.cs index 7c6d4131d6..8d2d8508a2 100644 --- a/src/Lucene.Net/Util/LongBitSet.cs +++ b/src/Lucene.Net/Util/LongBitSet.cs @@ -122,7 +122,7 @@ public long Cardinality() public bool Get(long index) { - Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index); int i = (int)(index >> 6); // div 64 // signed shift will keep a negative index and force an // array-index-out-of-bounds-exception, removing the need for an explicit check. @@ -133,7 +133,7 @@ public bool Get(long index) public void Set(long index) { - Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index + " numBits=" + numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index + " numBits=" + numBits); int wordNum = (int)(index >> 6); // div 64 int bit = (int)(index & 0x3f); // mod 64 long bitmask = 1L << bit; @@ -142,7 +142,7 @@ public void Set(long index) public bool GetAndSet(long index) { - Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = (int)(index >> 6); // div 64 int bit = (int)(index & 0x3f); // mod 64 long bitmask = 1L << bit; @@ -153,7 +153,7 @@ public bool GetAndSet(long index) public void Clear(long index) { - Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = (int)(index >> 6); int bit = (int)(index & 0x03f); long bitmask = 1L << bit; @@ -162,7 +162,7 @@ public void Clear(long index) public bool GetAndClear(long index) { - Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = (int)(index >> 6); // div 64 int bit = (int)(index & 0x3f); // mod 64 long bitmask = 1L << bit; @@ -177,7 +177,7 @@ public bool GetAndClear(long index) /// public long NextSetBit(long index) { - Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); int i = (int)(index >> 6); int subIndex = (int)(index & 0x3f); // index within the word long word = bits[i] >> subIndex; // skip all the bits to the right of index @@ -205,7 +205,7 @@ public long NextSetBit(long index) /// public long PrevSetBit(long index) { - Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index + " numBits=" + numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index + " numBits=" + numBits); int i = (int)(index >> 6); int subIndex = (int)(index & 0x3f); // index within the word long word = (bits[i] << (63 - subIndex)); // skip all the bits to the left of index @@ -231,7 +231,7 @@ public long PrevSetBit(long index) /// this = this OR other public void Or(Int64BitSet other) { - Debugging.Assert(() => other.numWords <= numWords, () => "numWords=" + numWords + ", other.numWords=" + other.numWords); + if (Debugging.AssertsEnabled) Debugging.Assert(() => other.numWords <= numWords, () => "numWords=" + numWords + ", other.numWords=" + other.numWords); int pos = Math.Min(numWords, other.numWords); while (--pos >= 0) { @@ -243,7 +243,7 @@ public void Or(Int64BitSet other) /// this = this XOR other public void Xor(Int64BitSet other) { - Debugging.Assert(() => other.numWords <= numWords, () => "numWords=" + numWords + ", other.numWords=" + other.numWords); + if (Debugging.AssertsEnabled) Debugging.Assert(() => other.numWords <= numWords, () => "numWords=" + numWords + ", other.numWords=" + other.numWords); int pos = Math.Min(numWords, other.numWords); while (--pos >= 0) { @@ -303,8 +303,11 @@ public void AndNot(Int64BitSet other) /// One-past the last bit to flip public void Flip(long startIndex, long endIndex) { - Debugging.Assert(() => startIndex >= 0 && startIndex < numBits); - Debugging.Assert(() => endIndex >= 0 && endIndex <= numBits); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => startIndex >= 0 && startIndex < numBits); + Debugging.Assert(() => endIndex >= 0 && endIndex <= numBits); + } if (endIndex <= startIndex) { return; @@ -348,8 +351,11 @@ public void Flip(long startIndex, long endIndex) /// One-past the last bit to set public void Set(long startIndex, long endIndex) { - Debugging.Assert(() => startIndex >= 0 && startIndex < numBits); - Debugging.Assert(() => endIndex >= 0 && endIndex <= numBits); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => startIndex >= 0 && startIndex < numBits); + Debugging.Assert(() => endIndex >= 0 && endIndex <= numBits); + } if (endIndex <= startIndex) { return; @@ -379,8 +385,11 @@ public void Set(long startIndex, long endIndex) /// One-past the last bit to clear public void Clear(long startIndex, long endIndex) { - Debugging.Assert(() => startIndex >= 0 && startIndex < numBits); - Debugging.Assert(() => endIndex >= 0 && endIndex <= numBits); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => startIndex >= 0 && startIndex < numBits); + Debugging.Assert(() => endIndex >= 0 && endIndex <= numBits); + } if (endIndex <= startIndex) { return; diff --git a/src/Lucene.Net/Util/LongsRef.cs b/src/Lucene.Net/Util/LongsRef.cs index 13946c8637..efc1896703 100644 --- a/src/Lucene.Net/Util/LongsRef.cs +++ b/src/Lucene.Net/Util/LongsRef.cs @@ -101,7 +101,7 @@ public Int64sRef(long[] longs, int offset, int length) this.longs = longs; this.Offset = offset; this.Length = length; - Debugging.Assert(IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(IsValid); } /// @@ -222,7 +222,7 @@ public void CopyInt64s(Int64sRef other) /// public void Grow(int newLength) { - Debugging.Assert(() => Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Offset == 0); if (longs.Length < newLength) { longs = ArrayUtil.Grow(longs, newLength); diff --git a/src/Lucene.Net/Util/MergedIterator.cs b/src/Lucene.Net/Util/MergedIterator.cs index 87696a743a..5ff2d09871 100644 --- a/src/Lucene.Net/Util/MergedIterator.cs +++ b/src/Lucene.Net/Util/MergedIterator.cs @@ -113,7 +113,7 @@ public void Dispose() private void PullTop() { - Debugging.Assert(() => numTop == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numTop == 0); top[numTop++] = queue.Pop(); if (removeDuplicates) { diff --git a/src/Lucene.Net/Util/OfflineSorter.cs b/src/Lucene.Net/Util/OfflineSorter.cs index f2bd12ecb2..c9ec9f38e3 100644 --- a/src/Lucene.Net/Util/OfflineSorter.cs +++ b/src/Lucene.Net/Util/OfflineSorter.cs @@ -369,7 +369,7 @@ private FileInfo SortPartition(/*int len*/) // LUCENENET NOTE: made private, sin IBytesRefIterator iter = buffer.GetIterator(comparer); while ((spare = iter.Next()) != null) { - Debugging.Assert(() => spare.Length <= ushort.MaxValue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => spare.Length <= ushort.MaxValue); @out.Write(spare); } } @@ -534,7 +534,7 @@ private static BinaryWriterDataOutput NewBinaryWriterDataOutput(FileInfo file) /// public virtual void Write(BytesRef @ref) { - Debugging.Assert(() => @ref != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => @ref != null); Write(@ref.Bytes, @ref.Offset, @ref.Length); } @@ -554,9 +554,12 @@ public virtual void Write(byte[] bytes) /// public virtual void Write(byte[] bytes, int off, int len) { - Debugging.Assert(() => bytes != null); - Debugging.Assert(() => off >= 0 && off + len <= bytes.Length); - Debugging.Assert(() => len >= 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => bytes != null); + Debugging.Assert(() => off >= 0 && off + len <= bytes.Length); + Debugging.Assert(() => len >= 0); + } os.WriteInt16((short)len); os.WriteBytes(bytes, off, len); // LUCENENET NOTE: We call WriteBytes, since there is no Write() on Lucene's version of DataOutput } @@ -651,7 +654,7 @@ public virtual byte[] Read() } #pragma warning restore CA1031 // Do not catch general exception types - Debugging.Assert(() => length >= 0, () => "Sanity: sequence length < 0: " + length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => length >= 0, () => "Sanity: sequence length < 0: " + length); byte[] result = new byte[length]; inputStream.ReadBytes(result, 0, length); return result; diff --git a/src/Lucene.Net/Util/OpenBitSet.cs b/src/Lucene.Net/Util/OpenBitSet.cs index 4891078755..e0d65c69ec 100644 --- a/src/Lucene.Net/Util/OpenBitSet.cs +++ b/src/Lucene.Net/Util/OpenBitSet.cs @@ -200,7 +200,7 @@ public virtual bool Get(int index) /// public virtual bool FastGet(int index) { - Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); int i = index >> 6; // div 64 // signed shift will keep a negative index and force an // array-index-out-of-bounds-exception, removing the need for an explicit check. @@ -230,7 +230,7 @@ public virtual bool Get(long index) /// public virtual bool FastGet(long index) { - Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); int i = (int)(index >> 6); // div 64 int bit = (int)index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -255,7 +255,7 @@ public boolean get1(int index) { /// public virtual int GetBit(int index) { - Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); int i = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 return ((int)((long)((ulong)m_bits[i] >> bit))) & 0x01; @@ -286,7 +286,7 @@ public virtual void Set(long index) /// public virtual void FastSet(int index) { - Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -299,7 +299,7 @@ public virtual void FastSet(int index) /// public virtual void FastSet(long index) { - Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = (int)(index >> 6); int bit = (int)index & 0x3f; long bitmask = 1L << bit; @@ -354,7 +354,7 @@ protected virtual int ExpandingWordNum(long index) /// public virtual void FastClear(int index) { - Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = index >> 6; int bit = index & 0x03f; long bitmask = 1L << bit; @@ -374,7 +374,7 @@ public virtual void FastClear(int index) /// public virtual void FastClear(long index) { - Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = (int)(index >> 6); // div 64 int bit = (int)index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -493,7 +493,7 @@ public virtual void Clear(long startIndex, long endIndex) /// public virtual bool GetAndSet(int index) { - Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -508,7 +508,7 @@ public virtual bool GetAndSet(int index) /// public virtual bool GetAndSet(long index) { - Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = (int)(index >> 6); // div 64 int bit = (int)index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -523,7 +523,7 @@ public virtual bool GetAndSet(long index) /// public virtual void FastFlip(int index) { - Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -536,7 +536,7 @@ public virtual void FastFlip(int index) /// public virtual void FastFlip(long index) { - Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = (int)(index >> 6); // div 64 int bit = (int)index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -559,7 +559,7 @@ public virtual void Flip(long index) /// public virtual bool FlipAndGet(int index) { - Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -573,7 +573,7 @@ public virtual bool FlipAndGet(int index) /// public virtual bool FlipAndGet(long index) { - Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); int wordNum = (int)(index >> 6); // div 64 int bit = (int)index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -913,7 +913,7 @@ public virtual void Union(OpenBitSet other) // https://github.com/apache/lucenenet/pull/154 int oldLen = m_wlen; EnsureCapacityWords(newLen); - Debugging.Assert(() => (numBits = Math.Max(other.numBits, numBits)) >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (numBits = Math.Max(other.numBits, numBits)) >= 0); long[] thisArr = this.m_bits; long[] otherArr = other.m_bits; @@ -953,7 +953,7 @@ public virtual void Xor(OpenBitSet other) // https://github.com/apache/lucenenet/pull/154 int oldLen = m_wlen; EnsureCapacityWords(newLen); - Debugging.Assert(() => (numBits = Math.Max(other.numBits, numBits)) >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (numBits = Math.Max(other.numBits, numBits)) >= 0); long[] thisArr = this.m_bits; long[] otherArr = other.m_bits; @@ -1011,7 +1011,7 @@ public virtual void EnsureCapacityWords(int numWords) { m_bits = ArrayUtil.Grow(m_bits, numWords); m_wlen = numWords; - Debugging.Assert(() => (this.numBits = Math.Max(this.numBits, numWords << 6)) >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (this.numBits = Math.Max(this.numBits, numWords << 6)) >= 0); } /// @@ -1023,7 +1023,7 @@ public virtual void EnsureCapacity(long numBits) EnsureCapacityWords(Bits2words(numBits)); // ensureCapacityWords sets numBits to a multiple of 64, but we want to set // it to exactly what the app asked. - Debugging.Assert(() => (this.numBits = Math.Max(this.numBits, numBits)) >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (this.numBits = Math.Max(this.numBits, numBits)) >= 0); } /// diff --git a/src/Lucene.Net/Util/PForDeltaDocIdSet.cs b/src/Lucene.Net/Util/PForDeltaDocIdSet.cs index 2c5c5f19db..6f5479a981 100644 --- a/src/Lucene.Net/Util/PForDeltaDocIdSet.cs +++ b/src/Lucene.Net/Util/PForDeltaDocIdSet.cs @@ -63,7 +63,7 @@ static PForDeltaDocIdSet() for (int i = 1; i < ITERATIONS.Length; ++i) { DECODERS[i] = PackedInt32s.GetDecoder(PackedInt32s.Format.PACKED, PackedInt32s.VERSION_CURRENT, i); - Debugging.Assert(() => BLOCK_SIZE % DECODERS[i].ByteValueCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => BLOCK_SIZE % DECODERS[i].ByteValueCount == 0); ITERATIONS[i] = BLOCK_SIZE / DECODERS[i].ByteValueCount; BYTE_BLOCK_COUNTS[i] = ITERATIONS[i] * DECODERS[i].ByteBlockCount; maxByteBLockCount = Math.Max(maxByteBLockCount, DECODERS[i].ByteBlockCount); @@ -212,7 +212,7 @@ internal virtual int ComputeOptimalNumberOfBits() } } this.bitsPerException = actualBitsPerValue - bitsPerValue; - Debugging.Assert(() => bufferSize < BLOCK_SIZE || numExceptions < bufferSize); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bufferSize < BLOCK_SIZE || numExceptions < bufferSize); return blockSize; } @@ -231,7 +231,7 @@ internal virtual void PforEncode() buffer[i] &= mask; } } - Debugging.Assert(() => ex == numExceptions); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ex == numExceptions); Arrays.Fill(exceptions, numExceptions, BLOCK_SIZE, 0); } @@ -245,7 +245,7 @@ internal virtual void PforEncode() if (numExceptions > 0) { - Debugging.Assert(() => bitsPerException > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerException > 0); data.WriteByte((byte)(sbyte)numExceptions); data.WriteByte((byte)(sbyte)bitsPerException); PackedInt32s.IEncoder encoder = PackedInt32s.GetEncoder(PackedInt32s.Format.PACKED, PackedInt32s.VERSION_CURRENT, bitsPerException); @@ -316,18 +316,18 @@ internal virtual void EncodeBlock() ++numBlocks; - Debugging.Assert(() => data.Length - originalLength == blockSize, () => (data.Length - originalLength) + " <> " + blockSize); + if (Debugging.AssertsEnabled) Debugging.Assert(() => data.Length - originalLength == blockSize, () => (data.Length - originalLength) + " <> " + blockSize); } /// /// Build the instance. public virtual PForDeltaDocIdSet Build() { - Debugging.Assert(() => bufferSize < BLOCK_SIZE); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bufferSize < BLOCK_SIZE); if (cardinality == 0) { - Debugging.Assert(() => previousDoc == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => previousDoc == -1); return EMPTY; } @@ -469,7 +469,7 @@ internal virtual void PforDecompress(byte token) internal virtual void UnaryDecompress(byte token) { - Debugging.Assert(() => (token & HAS_EXCEPTIONS) == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (token & HAS_EXCEPTIONS) == 0); int docID = this.docID; for (int i = 0; i < BLOCK_SIZE; ) { @@ -505,7 +505,7 @@ internal virtual void DecompressBlock() internal virtual void SkipBlock() { - Debugging.Assert(() => i == BLOCK_SIZE); + if (Debugging.AssertsEnabled) Debugging.Assert(() => i == BLOCK_SIZE); DecompressBlock(); docID = nextDocs[BLOCK_SIZE - 1]; } @@ -525,8 +525,11 @@ internal virtual int ForwardBinarySearch(int target) // advance forward and double the window at each step int indexSize = (int)docIDs.Count; int lo = Math.Max(blockIdx / indexInterval, 0), hi = lo + 1; - Debugging.Assert(() => blockIdx == -1 || docIDs.Get(lo) <= docID); - Debugging.Assert(() => lo + 1 == docIDs.Count || docIDs.Get(lo + 1) > docID); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => blockIdx == -1 || docIDs.Get(lo) <= docID); + Debugging.Assert(() => lo + 1 == docIDs.Count || docIDs.Get(lo + 1) > docID); + } while (true) { if (hi >= indexSize) @@ -557,14 +560,17 @@ internal virtual int ForwardBinarySearch(int target) hi = mid - 1; } } - Debugging.Assert(() => docIDs.Get(hi) <= target); - Debugging.Assert(() => hi + 1 == docIDs.Count || docIDs.Get(hi + 1) > target); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => docIDs.Get(hi) <= target); + Debugging.Assert(() => hi + 1 == docIDs.Count || docIDs.Get(hi + 1) > target); + } return hi; } public override int Advance(int target) { - Debugging.Assert(() => target > docID); + if (Debugging.AssertsEnabled) Debugging.Assert(() => target > docID); if (nextDocs[BLOCK_SIZE - 1] < target) { // not in the next block, now use the index diff --git a/src/Lucene.Net/Util/Packed/AbstractAppendingLongBuffer.cs b/src/Lucene.Net/Util/Packed/AbstractAppendingLongBuffer.cs index cbab9fd6fe..e0c22dac68 100644 --- a/src/Lucene.Net/Util/Packed/AbstractAppendingLongBuffer.cs +++ b/src/Lucene.Net/Util/Packed/AbstractAppendingLongBuffer.cs @@ -110,7 +110,7 @@ internal virtual void Grow(int newBlockCount) public override sealed long Get(long index) { - Debugging.Assert(() => index >= 0 && index < Count); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < Count); int block = (int)(index >> pageShift); int element = (int)(index & pageMask); return Get(block, element); @@ -123,9 +123,12 @@ public override sealed long Get(long index) /// public int Get(long index, long[] arr, int off, int len) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < Count); - Debugging.Assert(() => off + len <= arr.Length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < Count); + Debugging.Assert(() => off + len <= arr.Length); + } int block = (int)(index >> pageShift); int element = (int)(index & pageMask); @@ -193,7 +196,7 @@ internal void FillValues() /// Return the next long in the buffer. public long Next() { - Debugging.Assert(() => HasNext); + if (Debugging.AssertsEnabled) Debugging.Assert(() => HasNext); long result = currentValues[pOff++]; if (pOff == currentCount) { diff --git a/src/Lucene.Net/Util/Packed/AbstractBlockPackedWriter.cs b/src/Lucene.Net/Util/Packed/AbstractBlockPackedWriter.cs index 298c3e0248..264ea28d64 100644 --- a/src/Lucene.Net/Util/Packed/AbstractBlockPackedWriter.cs +++ b/src/Lucene.Net/Util/Packed/AbstractBlockPackedWriter.cs @@ -72,7 +72,7 @@ protected AbstractBlockPackedWriter(DataOutput @out, int blockSize) // LUCENENET /// Reset this writer to wrap . The block size remains unchanged. public virtual void Reset(DataOutput @out) { - Debugging.Assert(() => @out != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => @out != null); this.m_out = @out; m_off = 0; m_ord = 0L; diff --git a/src/Lucene.Net/Util/Packed/AbstractPagedMutable.cs b/src/Lucene.Net/Util/Packed/AbstractPagedMutable.cs index ef91b66cb4..f1df1bfa56 100644 --- a/src/Lucene.Net/Util/Packed/AbstractPagedMutable.cs +++ b/src/Lucene.Net/Util/Packed/AbstractPagedMutable.cs @@ -86,7 +86,7 @@ internal int IndexInPage(long index) public override sealed long Get(long index) { - Debugging.Assert(() => index >= 0 && index < size); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < size); int pageIndex = PageIndex(index); int indexInPage = IndexInPage(index); return subMutables[pageIndex].Get(indexInPage); @@ -96,7 +96,7 @@ public override sealed long Get(long index) /// Set value at . public void Set(long index, long value) { - Debugging.Assert(() => index >= 0 && index < size); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < size); int pageIndex = PageIndex(index); int indexInPage = IndexInPage(index); subMutables[pageIndex].Set(indexInPage, value); @@ -150,7 +150,7 @@ public T Resize(long newSize) /// Similar to . public T Grow(long minSize) { - Debugging.Assert(() => minSize >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => minSize >= 0); if (minSize <= Count) { T result = (T)this; diff --git a/src/Lucene.Net/Util/Packed/BlockPackedReader.cs b/src/Lucene.Net/Util/Packed/BlockPackedReader.cs index b2903c54fc..d4750a57c4 100644 --- a/src/Lucene.Net/Util/Packed/BlockPackedReader.cs +++ b/src/Lucene.Net/Util/Packed/BlockPackedReader.cs @@ -83,7 +83,7 @@ public BlockPackedReader(IndexInput @in, int packedIntsVersion, int blockSize, l public override long Get(long index) { - Debugging.Assert(() => index >= 0 && index < valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < valueCount); int block = (int)((long)((ulong)index >> blockShift)); int idx = (int)(index & blockMask); return (minValues == null ? 0 : minValues[block]) + subReaders[block].Get(idx); diff --git a/src/Lucene.Net/Util/Packed/BlockPackedReaderIterator.cs b/src/Lucene.Net/Util/Packed/BlockPackedReaderIterator.cs index e54dfcae98..c2e25bca95 100644 --- a/src/Lucene.Net/Util/Packed/BlockPackedReaderIterator.cs +++ b/src/Lucene.Net/Util/Packed/BlockPackedReaderIterator.cs @@ -129,7 +129,7 @@ public BlockPackedReaderIterator(DataInput @in, int packedIntsVersion, int block public void Reset(DataInput @in, long valueCount) { this.@in = @in; - Debugging.Assert(() => valueCount >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount >= 0); this.valueCount = valueCount; off = blockSize; ord = 0; @@ -139,7 +139,7 @@ public void Reset(DataInput @in, long valueCount) /// Skip exactly values. public void Skip(long count) { - Debugging.Assert(() => count >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => count >= 0); if (ord + count > valueCount || ord + count < 0) { throw new EndOfStreamException(); @@ -156,7 +156,7 @@ public void Skip(long count) } // 2. skip as many blocks as necessary - Debugging.Assert(() => off == blockSize); + if (Debugging.AssertsEnabled) Debugging.Assert(() => off == blockSize); while (count >= blockSize) { int token = @in.ReadByte() & 0xFF; @@ -180,7 +180,7 @@ public void Skip(long count) } // 3. skip last values - Debugging.Assert(() => count < blockSize); + if (Debugging.AssertsEnabled) Debugging.Assert(() => count < blockSize); Refill(); ord += count; off += (int)count; @@ -229,7 +229,7 @@ public long Next() /// Read between 1 and values. public Int64sRef Next(int count) { - Debugging.Assert(() => count > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => count > 0); if (ord == valueCount) { throw new EndOfStreamException(); @@ -259,7 +259,7 @@ private void Refill() throw new IOException("Corrupted"); } long minValue = minEquals0 ? 0L : ZigZagDecode(1L + ReadVInt64(@in)); - Debugging.Assert(() => minEquals0 || minValue != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => minEquals0 || minValue != 0); if (bitsPerValue == 0) { diff --git a/src/Lucene.Net/Util/Packed/BlockPackedWriter.cs b/src/Lucene.Net/Util/Packed/BlockPackedWriter.cs index 7dbb499f32..30a13be05a 100644 --- a/src/Lucene.Net/Util/Packed/BlockPackedWriter.cs +++ b/src/Lucene.Net/Util/Packed/BlockPackedWriter.cs @@ -71,7 +71,7 @@ public BlockPackedWriter(DataOutput @out, int blockSize) [MethodImpl(MethodImplOptions.NoInlining)] protected override void Flush() { - Debugging.Assert(() => m_off > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m_off > 0); long min = long.MaxValue, max = long.MinValue; for (int i = 0; i < m_off; ++i) { diff --git a/src/Lucene.Net/Util/Packed/BulkOperation.cs b/src/Lucene.Net/Util/Packed/BulkOperation.cs index eebd5a68e2..a49d896efa 100644 --- a/src/Lucene.Net/Util/Packed/BulkOperation.cs +++ b/src/Lucene.Net/Util/Packed/BulkOperation.cs @@ -164,12 +164,12 @@ public static BulkOperation Of(PackedInt32s.Format format, int bitsPerValue) { if (format == PackedInt32s.Format.PACKED) { - Debugging.Assert(() => packedBulkOps[bitsPerValue - 1] != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => packedBulkOps[bitsPerValue - 1] != null); return packedBulkOps[bitsPerValue - 1]; } else if (format == PackedInt32s.Format.PACKED_SINGLE_BLOCK) { - Debugging.Assert(() => packedSingleBlockBulkOps[bitsPerValue - 1] != null); + if (Debugging.AssertsEnabled) Debugging.Assert(() => packedSingleBlockBulkOps[bitsPerValue - 1] != null); return packedSingleBlockBulkOps[bitsPerValue - 1]; } else diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked.cs index 0ea2732093..f39c87e140 100644 --- a/src/Lucene.Net/Util/Packed/BulkOperationPacked.cs +++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked.cs @@ -36,7 +36,7 @@ internal class BulkOperationPacked : BulkOperation public BulkOperationPacked(int bitsPerValue) { this.bitsPerValue = bitsPerValue; - Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64); int blocks = bitsPerValue; while ((blocks & 1) == 0) { @@ -62,7 +62,7 @@ public BulkOperationPacked(int bitsPerValue) this.mask = (1L << bitsPerValue) - 1; } this.intMask = (int)mask; - Debugging.Assert(() => longValueCount * bitsPerValue == 64 * longBlockCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => longValueCount * bitsPerValue == 64 * longBlockCount); } /// @@ -125,7 +125,7 @@ public override void Decode(byte[] blocks, int blocksOffset, long[] values, int nextValue = (bytes & ((1L << bits) - 1)) << bitsLeft; } } - Debugging.Assert(() => bitsLeft == bitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsLeft == bitsPerValue); } public override void Decode(long[] blocks, int blocksOffset, int[] values, int valuesOffset, int iterations) @@ -178,7 +178,7 @@ public override void Decode(byte[] blocks, int blocksOffset, int[] values, int v nextValue = (bytes & ((1 << bits) - 1)) << bitsLeft; } } - Debugging.Assert(() => bitsLeft == bitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsLeft == bitsPerValue); } public override void Encode(long[] values, int valuesOffset, long[] blocks, int blocksOffset, int iterations) @@ -244,7 +244,7 @@ public override void Encode(long[] values, int valuesOffset, byte[] blocks, int for (int i = 0; i < byteValueCount * iterations; ++i) { long v = values[valuesOffset++]; - Debugging.Assert(() => bitsPerValue == 64 || PackedInt32s.BitsRequired(v) <= bitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerValue == 64 || PackedInt32s.BitsRequired(v) <= bitsPerValue); if (bitsPerValue < bitsLeft) { // just buffer @@ -266,7 +266,7 @@ public override void Encode(long[] values, int valuesOffset, byte[] blocks, int nextBlock = (int)((v & ((1L << bits) - 1)) << bitsLeft); } } - Debugging.Assert(() => bitsLeft == 8); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsLeft == 8); } public override void Encode(int[] values, int valuesOffset, byte[] blocks, int blocksOffset, int iterations) @@ -276,7 +276,7 @@ public override void Encode(int[] values, int valuesOffset, byte[] blocks, int b for (int i = 0; i < byteValueCount * iterations; ++i) { int v = values[valuesOffset++]; - Debugging.Assert(() => PackedInt32s.BitsRequired(v & 0xFFFFFFFFL) <= bitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => PackedInt32s.BitsRequired(v & 0xFFFFFFFFL) <= bitsPerValue); if (bitsPerValue < bitsLeft) { // just buffer @@ -298,7 +298,7 @@ public override void Encode(int[] values, int valuesOffset, byte[] blocks, int b nextBlock = (v & ((1 << bits) - 1)) << bitsLeft; } } - Debugging.Assert(() => bitsLeft == 8); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsLeft == 8); } } } \ No newline at end of file diff --git a/src/Lucene.Net/Util/Packed/Direct16.cs b/src/Lucene.Net/Util/Packed/Direct16.cs index 5101e9c9ec..ad72a39a09 100644 --- a/src/Lucene.Net/Util/Packed/Direct16.cs +++ b/src/Lucene.Net/Util/Packed/Direct16.cs @@ -88,9 +88,12 @@ public override object GetArray() public override int Get(int index, long[] arr, int off, int len) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); - Debugging.Assert(() => off + len <= arr.Length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(() => off + len <= arr.Length); + } int gets = Math.Min(m_valueCount - index, len); for (int i = index, o = off, end = index + gets; i < end; ++i, ++o) @@ -102,9 +105,12 @@ public override int Get(int index, long[] arr, int off, int len) public override int Set(int index, long[] arr, int off, int len) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); - Debugging.Assert(() => off + len <= arr.Length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(() => off + len <= arr.Length); + } int sets = Math.Min(m_valueCount - index, len); for (int i = index, o = off, end = index + sets; i < end; ++i, ++o) @@ -116,7 +122,7 @@ public override int Set(int index, long[] arr, int off, int len) public override void Fill(int fromIndex, int toIndex, long val) { - Debugging.Assert(() => val == (val & 0xFFFFL)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => val == (val & 0xFFFFL)); Arrays.Fill(values, fromIndex, toIndex, (short)val); } } diff --git a/src/Lucene.Net/Util/Packed/Direct32.cs b/src/Lucene.Net/Util/Packed/Direct32.cs index af9741b761..d2d1379d78 100644 --- a/src/Lucene.Net/Util/Packed/Direct32.cs +++ b/src/Lucene.Net/Util/Packed/Direct32.cs @@ -88,9 +88,12 @@ public override object GetArray() public override int Get(int index, long[] arr, int off, int len) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); - Debugging.Assert(() => off + len <= arr.Length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(() => off + len <= arr.Length); + } int gets = Math.Min(m_valueCount - index, len); for (int i = index, o = off, end = index + gets; i < end; ++i, ++o) @@ -102,9 +105,12 @@ public override int Get(int index, long[] arr, int off, int len) public override int Set(int index, long[] arr, int off, int len) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); - Debugging.Assert(() => off + len <= arr.Length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(() => off + len <= arr.Length); + } int sets = Math.Min(m_valueCount - index, len); for (int i = index, o = off, end = index + sets; i < end; ++i, ++o) @@ -116,7 +122,7 @@ public override int Set(int index, long[] arr, int off, int len) public override void Fill(int fromIndex, int toIndex, long val) { - Debugging.Assert(() => val == (val & 0xFFFFFFFFL)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => val == (val & 0xFFFFFFFFL)); Arrays.Fill(values, fromIndex, toIndex, (int)val); } } diff --git a/src/Lucene.Net/Util/Packed/Direct64.cs b/src/Lucene.Net/Util/Packed/Direct64.cs index 31c61ffafe..1635302b08 100644 --- a/src/Lucene.Net/Util/Packed/Direct64.cs +++ b/src/Lucene.Net/Util/Packed/Direct64.cs @@ -82,9 +82,12 @@ public override object GetArray() public override int Get(int index, long[] arr, int off, int len) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); - Debugging.Assert(() => off + len <= arr.Length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(() => off + len <= arr.Length); + } int gets = Math.Min(m_valueCount - index, len); System.Array.Copy(values, index, arr, off, gets); @@ -93,9 +96,12 @@ public override int Get(int index, long[] arr, int off, int len) public override int Set(int index, long[] arr, int off, int len) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); - Debugging.Assert(() => off + len <= arr.Length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(() => off + len <= arr.Length); + } int sets = Math.Min(m_valueCount - index, len); System.Array.Copy(arr, off, values, index, sets); diff --git a/src/Lucene.Net/Util/Packed/Direct8.cs b/src/Lucene.Net/Util/Packed/Direct8.cs index e0e6fe27d5..67b1bce068 100644 --- a/src/Lucene.Net/Util/Packed/Direct8.cs +++ b/src/Lucene.Net/Util/Packed/Direct8.cs @@ -85,9 +85,12 @@ public override object GetArray() public override int Get(int index, long[] arr, int off, int len) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); - Debugging.Assert(() => off + len <= arr.Length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(() => off + len <= arr.Length); + } int gets = Math.Min(m_valueCount - index, len); for (int i = index, o = off, end = index + gets; i < end; ++i, ++o) @@ -99,9 +102,12 @@ public override int Get(int index, long[] arr, int off, int len) public override int Set(int index, long[] arr, int off, int len) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); - Debugging.Assert(() => off + len <= arr.Length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(() => off + len <= arr.Length); + } int sets = Math.Min(m_valueCount - index, len); for (int i = index, o = off, end = index + sets; i < end; ++i, ++o) @@ -113,7 +119,7 @@ public override int Set(int index, long[] arr, int off, int len) public override void Fill(int fromIndex, int toIndex, long val) { - Debugging.Assert(() => val == (val & 0xFFL)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => val == (val & 0xFFL)); Arrays.Fill(values, fromIndex, toIndex, (byte)val); } } diff --git a/src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs b/src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs index 6cb2e90e2b..b0273ac250 100644 --- a/src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs +++ b/src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs @@ -126,7 +126,7 @@ private static long UnPackValue(long[] longArray, int numBits, long packIndex, l /// The low value for the current decoding index. private long CurrentLowValue() { - Debugging.Assert(() => ((efIndex >= 0) && (efIndex < numEncoded)), () => $"efIndex {efIndex.ToString(CultureInfo.InvariantCulture)}"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => ((efIndex >= 0) && (efIndex < numEncoded)), () => $"efIndex {efIndex.ToString(CultureInfo.InvariantCulture)}"); return UnPackValue(efEncoder.lowerLongs, efEncoder.numLowBits, efIndex, efEncoder.lowerBitsMask); } @@ -249,7 +249,7 @@ public virtual long NextValue() /// public virtual bool AdvanceToIndex(long index) { - Debugging.Assert(() => index > efIndex); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index > efIndex); if (index >= numEncoded) { efIndex = numEncoded; @@ -257,7 +257,7 @@ public virtual bool AdvanceToIndex(long index) } if (!ToAfterCurrentHighBit()) { - Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false); } /* CHECKME: Add a (binary) search in the upperZeroBitPositions here. */ int curSetBits = curHighLong.PopCount(); @@ -275,7 +275,7 @@ public virtual bool AdvanceToIndex(long index) */ if (!ToAfterCurrentHighBit()) { - Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(() => false); } ToNextHighValue(); } @@ -312,7 +312,7 @@ public virtual long AdvanceToValue(long target) indexEntryIndex = numIndexEntries - 1; // no further than last index entry } long indexHighValue = (indexEntryIndex + 1) * efEncoder.indexInterval; - Debugging.Assert(() => indexHighValue <= highTarget); + if (Debugging.AssertsEnabled) Debugging.Assert(() => indexHighValue <= highTarget); if (indexHighValue > (setBitForIndex - efIndex)) // advance to just after zero bit position of index entry. { setBitForIndex = UnPackValue(efEncoder.upperZeroBitPositionIndex, efEncoder.nIndexEntryBits, indexEntryIndex, indexMask); @@ -321,7 +321,7 @@ public virtual long AdvanceToValue(long target) upperLong = efEncoder.upperLongs[highIndex]; curHighLong = (long)((ulong)upperLong >> ((int)(setBitForIndex & ((sizeof(long) * 8) - 1)))); // may contain the unary 1 bit for efIndex } - Debugging.Assert(() => efIndex < numEncoded); // there is a high value to be found. + if (Debugging.AssertsEnabled) Debugging.Assert(() => efIndex < numEncoded); // there is a high value to be found. } int curSetBits = curHighLong.PopCount(); // shifted right. @@ -337,7 +337,7 @@ public virtual long AdvanceToValue(long target) } setBitForIndex += (sizeof(long) * 8) - (setBitForIndex & ((sizeof(long) * 8) - 1)); // highIndex = (int)(setBitForIndex >>> LOG2_LONG_SIZE); - Debugging.Assert(() => (highIndex + 1) == (int)((long)((ulong)setBitForIndex >> LOG2_INT64_SIZE))); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (highIndex + 1) == (int)((long)((ulong)setBitForIndex >> LOG2_INT64_SIZE))); highIndex += 1; upperLong = efEncoder.upperLongs[highIndex]; curHighLong = upperLong; @@ -348,7 +348,7 @@ public virtual long AdvanceToValue(long target) while (curHighLong == 0L) { setBitForIndex += (sizeof(long) * 8) - (setBitForIndex & ((sizeof(long) * 8) - 1)); - Debugging.Assert(() => (highIndex + 1) == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (highIndex + 1) == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE)); highIndex += 1; upperLong = efEncoder.upperLongs[highIndex]; curHighLong = upperLong; @@ -356,12 +356,12 @@ public virtual long AdvanceToValue(long target) // curHighLong has enough clear bits to reach highTarget, has at least 1 set bit, and may not have enough set bits. int rank = (int)(highTarget - (setBitForIndex - efIndex)); // the rank of the zero bit for highValue. - Debugging.Assert(() => (rank <= (sizeof(long) * 8)), () => ("rank " + rank)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (rank <= (sizeof(long) * 8)), () => ("rank " + rank)); if (rank >= 1) { long invCurHighLong = ~curHighLong; int clearBitForValue = (rank <= 8) ? BroadWord.SelectNaive(invCurHighLong, rank) : BroadWord.Select(invCurHighLong, rank); - Debugging.Assert(() => clearBitForValue <= ((sizeof(long) * 8) - 1)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => clearBitForValue <= ((sizeof(long) * 8) - 1)); setBitForIndex += clearBitForValue + 1; // the high bit just before setBitForIndex is zero int oneBitsBeforeClearBit = clearBitForValue - rank + 1; efIndex += oneBitsBeforeClearBit; // the high bit at setBitForIndex and belongs to the unary code for efIndex @@ -372,14 +372,14 @@ public virtual long AdvanceToValue(long target) if ((setBitForIndex & ((sizeof(long) * 8) - 1)) == 0L) // exhausted curHighLong { - Debugging.Assert(() => (highIndex + 1) == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (highIndex + 1) == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE)); highIndex += 1; upperLong = efEncoder.upperLongs[highIndex]; curHighLong = upperLong; } else { - Debugging.Assert(() => highIndex == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => highIndex == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE)); curHighLong = (long)((ulong)upperLong >> ((int)(setBitForIndex & ((sizeof(long) * 8) - 1)))); } // curHighLong has enough clear bits to reach highTarget, and may not have enough set bits. @@ -387,14 +387,14 @@ public virtual long AdvanceToValue(long target) while (curHighLong == 0L) { setBitForIndex += (sizeof(long) * 8) - (setBitForIndex & ((sizeof(long) * 8) - 1)); - Debugging.Assert(() => (highIndex + 1) == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (highIndex + 1) == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE)); highIndex += 1; upperLong = efEncoder.upperLongs[highIndex]; curHighLong = upperLong; } } setBitForIndex += curHighLong.TrailingZeroCount(); - Debugging.Assert(() => (setBitForIndex - efIndex) >= highTarget); // highTarget reached + if (Debugging.AssertsEnabled) Debugging.Assert(() => (setBitForIndex - efIndex) >= highTarget); // highTarget reached // Linear search also with low values long currentValue = CombineHighLowValues((setBitForIndex - efIndex), CurrentLowValue()); diff --git a/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs b/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs index 470b76ee1a..86db1cfaba 100644 --- a/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs +++ b/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs @@ -178,7 +178,7 @@ public EliasFanoEncoder(long numValues, long upperBound, long indexInterval) this.lowerLongs = new long[(int)numLongsForLowBits]; long numHighBitsClear = (long)((ulong)((this.upperBound > 0) ? this.upperBound : 0) >> this.numLowBits); - Debugging.Assert(() => numHighBitsClear <= (2 * this.numValues)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numHighBitsClear <= (2 * this.numValues)); long numHighBitsSet = this.numValues; long numLongsForHighBits = NumInt64sForBits(numHighBitsClear + numHighBitsSet); @@ -220,7 +220,7 @@ public EliasFanoEncoder(long numValues, long upperBound) /// private static long NumInt64sForBits(long numBits) // Note: int version in FixedBitSet.bits2words() { - Debugging.Assert(() => numBits >= 0, () => numBits.ToString(CultureInfo.InvariantCulture)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => numBits >= 0, () => numBits.ToString(CultureInfo.InvariantCulture)); return (long)((ulong)(numBits + (sizeof(long) * 8 - 1)) >> LOG2_INT64_SIZE); } diff --git a/src/Lucene.Net/Util/Packed/GrowableWriter.cs b/src/Lucene.Net/Util/Packed/GrowableWriter.cs index 818fa73d6c..635a363775 100644 --- a/src/Lucene.Net/Util/Packed/GrowableWriter.cs +++ b/src/Lucene.Net/Util/Packed/GrowableWriter.cs @@ -76,7 +76,7 @@ private void EnsureCapacity(long value) return; } int bitsRequired = value < 0 ? 64 : PackedInt32s.BitsRequired(value); - Debugging.Assert(() => bitsRequired > current.BitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsRequired > current.BitsPerValue); int valueCount = Count; PackedInt32s.Mutable next = PackedInt32s.GetMutable(valueCount, bitsRequired, acceptableOverheadRatio); PackedInt32s.Copy(current, 0, next, 0, valueCount, PackedInt32s.DEFAULT_BUFFER_SIZE); diff --git a/src/Lucene.Net/Util/Packed/MonotonicAppendingLongBuffer.cs b/src/Lucene.Net/Util/Packed/MonotonicAppendingLongBuffer.cs index 1fb52a9237..df969a1ab8 100644 --- a/src/Lucene.Net/Util/Packed/MonotonicAppendingLongBuffer.cs +++ b/src/Lucene.Net/Util/Packed/MonotonicAppendingLongBuffer.cs @@ -137,7 +137,7 @@ internal override void Grow(int newBlockCount) internal override void PackPendingValues() { - Debugging.Assert(() => pendingOff > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => pendingOff > 0); minValues[valuesOff] = pending[0]; averages[valuesOff] = pendingOff == 1 ? 0 : (float)(pending[pendingOff - 1] - pending[0]) / (pendingOff - 1); diff --git a/src/Lucene.Net/Util/Packed/MonotonicBlockPackedReader.cs b/src/Lucene.Net/Util/Packed/MonotonicBlockPackedReader.cs index fb9550a49a..643db0cdb9 100644 --- a/src/Lucene.Net/Util/Packed/MonotonicBlockPackedReader.cs +++ b/src/Lucene.Net/Util/Packed/MonotonicBlockPackedReader.cs @@ -78,7 +78,7 @@ public MonotonicBlockPackedReader(IndexInput @in, int packedIntsVersion, int blo public override long Get(long index) { - Debugging.Assert(() => index >= 0 && index < valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < valueCount); int block = (int)((long)((ulong)index >> blockShift)); int idx = (int)(index & blockMask); // LUCENENET NOTE: IMPORTANT: The cast to float is critical here for it to work in x86 diff --git a/src/Lucene.Net/Util/Packed/MonotonicBlockPackedWriter.cs b/src/Lucene.Net/Util/Packed/MonotonicBlockPackedWriter.cs index 5ddc277eec..eecb2556c2 100644 --- a/src/Lucene.Net/Util/Packed/MonotonicBlockPackedWriter.cs +++ b/src/Lucene.Net/Util/Packed/MonotonicBlockPackedWriter.cs @@ -66,14 +66,14 @@ public MonotonicBlockPackedWriter(DataOutput @out, int blockSize) public override void Add(long l) { - Debugging.Assert(() => l >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => l >= 0); base.Add(l); } [MethodImpl(MethodImplOptions.NoInlining)] protected override void Flush() { - Debugging.Assert(() => m_off > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m_off > 0); // TODO: perform a true linear regression? long min = m_values[0]; diff --git a/src/Lucene.Net/Util/Packed/Packed16ThreeBlocks.cs b/src/Lucene.Net/Util/Packed/Packed16ThreeBlocks.cs index d8a96b7704..c7d26fdd28 100644 --- a/src/Lucene.Net/Util/Packed/Packed16ThreeBlocks.cs +++ b/src/Lucene.Net/Util/Packed/Packed16ThreeBlocks.cs @@ -69,9 +69,12 @@ public override long Get(int index) public override int Get(int index, long[] arr, int off, int len) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); - Debugging.Assert(() => off + len <= arr.Length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(() => off + len <= arr.Length); + } int gets = Math.Min(m_valueCount - index, len); for (int i = index * 3, end = (index + gets) * 3; i < end; i += 3) @@ -91,9 +94,12 @@ public override void Set(int index, long value) public override int Set(int index, long[] arr, int off, int len) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); - Debugging.Assert(() => off + len <= arr.Length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(() => off + len <= arr.Length); + } int sets = Math.Min(m_valueCount - index, len); for (int i = off, o = index * 3, end = off + sets; i < end; ++i) diff --git a/src/Lucene.Net/Util/Packed/Packed64.cs b/src/Lucene.Net/Util/Packed/Packed64.cs index 16b94eb9ea..f3415dbe5b 100644 --- a/src/Lucene.Net/Util/Packed/Packed64.cs +++ b/src/Lucene.Net/Util/Packed/Packed64.cs @@ -79,11 +79,11 @@ public Packed64(int valueCount, int bitsPerValue) /*var a = ~0L << (int)((uint)(BLOCK_SIZE - bitsPerValue) >> (BLOCK_SIZE - bitsPerValue)); //original var b = (uint)(~0L << (BLOCK_SIZE - bitsPerValue)) >> (BLOCK_SIZE - bitsPerValue); //mod - Debugging.Assert(a == b, "a: " + a, ", b: " + b);*/ + if (Debugging.AssertsEnabled) Debugging.Assert(a == b, "a: " + a, ", b: " + b);*/ maskRight = (long)((ulong)(~0L << (BLOCK_SIZE - bitsPerValue)) >> (BLOCK_SIZE - bitsPerValue)); //mod - //Debugging.Assert((long)((ulong)(~0L << (BLOCK_SIZE - bitsPerValue)) >> (BLOCK_SIZE - bitsPerValue)) == (uint)(~0L << (BLOCK_SIZE - bitsPerValue)) >> (BLOCK_SIZE - bitsPerValue)); + //if (Debugging.AssertsEnabled) Debugging.Assert((long)((ulong)(~0L << (BLOCK_SIZE - bitsPerValue)) >> (BLOCK_SIZE - bitsPerValue)) == (uint)(~0L << (BLOCK_SIZE - bitsPerValue)) >> (BLOCK_SIZE - bitsPerValue)); bpvMinusBlockSize = bitsPerValue - BLOCK_SIZE; } @@ -157,7 +157,7 @@ public override long Get(int index) { var mod = (long) ((ulong) (Blocks[elementPos]) >> (int) (-endBits)) & MaskRight; var og = ((long) ((ulong) Blocks[elementPos] >> (int) -endBits)) & MaskRight; - Debugging.Assert(mod == og); + if (Debugging.AssertsEnabled) Debugging.Assert(mod == og); //return (long)((ulong)(Blocks[elementPos]) >> (int)(-endBits)) & MaskRight; return ((long)((ulong)Blocks[elementPos] >> (int)-endBits)) & MaskRight; @@ -166,7 +166,7 @@ public override long Get(int index) var a = (((Blocks[elementPos] << (int)endBits) | (long)(((ulong)(Blocks[elementPos + 1])) >> (int)(BLOCK_SIZE - endBits))) & MaskRight); var b = ((Blocks[elementPos] << (int)endBits) | ((long)((ulong)Blocks[elementPos + 1] >> (int)(BLOCK_SIZE - endBits)))) & MaskRight; - Debugging.Assert(a == b); + if (Debugging.AssertsEnabled) Debugging.Assert(a == b); //return (((Blocks[elementPos] << (int)endBits) | (long)(((ulong)(Blocks[elementPos + 1])) >> (int)(BLOCK_SIZE - endBits))) & MaskRight); return ((Blocks[elementPos] << (int)endBits) | ((long)((ulong)Blocks[elementPos + 1] >> (int)(BLOCK_SIZE - endBits)))) & MaskRight; @@ -174,10 +174,10 @@ public override long Get(int index) public override int Get(int index, long[] arr, int off, int len) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < m_valueCount); len = Math.Min(len, m_valueCount - index); - Debugging.Assert(() => off + len <= arr.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => off + len <= arr.Length); int originalIndex = index; PackedInt32s.IDecoder decoder = BulkOperation.Of(PackedInt32s.Format.PACKED, m_bitsPerValue); @@ -198,15 +198,15 @@ public override int Get(int index, long[] arr, int off, int len) } // bulk get - Debugging.Assert(() => index % decoder.Int64ValueCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index % decoder.Int64ValueCount == 0); int blockIndex = (int)((ulong)((long)index * m_bitsPerValue) >> BLOCK_BITS); - Debugging.Assert(() => (((long)index * m_bitsPerValue) & MOD_MASK) == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (((long)index * m_bitsPerValue) & MOD_MASK) == 0); int iterations = len / decoder.Int64ValueCount; decoder.Decode(blocks, blockIndex, arr, off, iterations); int gotValues = iterations * decoder.Int64ValueCount; index += gotValues; len -= gotValues; - Debugging.Assert(() => len >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => len >= 0); if (index > originalIndex) { @@ -216,7 +216,7 @@ public override int Get(int index, long[] arr, int off, int len) else { // no progress so far => already at a block boundary but no full block to get - Debugging.Assert(() => index == originalIndex); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index == originalIndex); return base.Get(index, arr, off, len); } } @@ -242,10 +242,10 @@ public override void Set(int index, long value) public override int Set(int index, long[] arr, int off, int len) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < m_valueCount); len = Math.Min(len, m_valueCount - index); - Debugging.Assert(() => off + len <= arr.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => off + len <= arr.Length); int originalIndex = index; PackedInt32s.IEncoder encoder = BulkOperation.Of(PackedInt32s.Format.PACKED, m_bitsPerValue); @@ -266,15 +266,15 @@ public override int Set(int index, long[] arr, int off, int len) } // bulk set - Debugging.Assert(() => index % encoder.Int64ValueCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index % encoder.Int64ValueCount == 0); int blockIndex = (int)((ulong)((long)index * m_bitsPerValue) >> BLOCK_BITS); - Debugging.Assert(() => (((long)index * m_bitsPerValue) & MOD_MASK) == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => (((long)index * m_bitsPerValue) & MOD_MASK) == 0); int iterations = len / encoder.Int64ValueCount; encoder.Encode(arr, off, blocks, blockIndex, iterations); int setValues = iterations * encoder.Int64ValueCount; index += setValues; len -= setValues; - Debugging.Assert(() => len >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => len >= 0); if (index > originalIndex) { @@ -284,7 +284,7 @@ public override int Set(int index, long[] arr, int off, int len) else { // no progress so far => already at a block boundary but no full block to get - Debugging.Assert(() => index == originalIndex); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index == originalIndex); return base.Set(index, arr, off, len); } } @@ -306,8 +306,11 @@ public override long RamBytesUsed() public override void Fill(int fromIndex, int toIndex, long val) { - Debugging.Assert(() => PackedInt32s.BitsRequired(val) <= BitsPerValue); - Debugging.Assert(() => fromIndex <= toIndex); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => PackedInt32s.BitsRequired(val) <= BitsPerValue); + Debugging.Assert(() => fromIndex <= toIndex); + } // minimum number of values that use an exact number of full blocks int nAlignedValues = 64 / Gcd(64, m_bitsPerValue); @@ -329,7 +332,7 @@ public override void Fill(int fromIndex, int toIndex, long val) Set(fromIndex++, val); } } - Debugging.Assert(() => fromIndex % nAlignedValues == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fromIndex % nAlignedValues == 0); // compute the long[] blocks for nAlignedValues consecutive values and // use them to set as many values as possible without applying any mask @@ -343,7 +346,7 @@ public override void Fill(int fromIndex, int toIndex, long val) values.Set(i, val); } nAlignedValuesBlocks = values.blocks; - Debugging.Assert(() => nAlignedBlocks <= nAlignedValuesBlocks.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => nAlignedBlocks <= nAlignedValuesBlocks.Length); } int startBlock = (int)((ulong)((long)fromIndex * m_bitsPerValue) >> 6); int endBlock = (int)((ulong)((long)toIndex * m_bitsPerValue) >> 6); diff --git a/src/Lucene.Net/Util/Packed/Packed64SingleBlock.cs b/src/Lucene.Net/Util/Packed/Packed64SingleBlock.cs index 39355a61bf..d6499c75ee 100644 --- a/src/Lucene.Net/Util/Packed/Packed64SingleBlock.cs +++ b/src/Lucene.Net/Util/Packed/Packed64SingleBlock.cs @@ -50,7 +50,7 @@ private static int RequiredCapacity(int valueCount, int valuesPerBlock) internal Packed64SingleBlock(int valueCount, int bitsPerValue) : base(valueCount, bitsPerValue) { - Debugging.Assert(() => IsSupported(bitsPerValue)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => IsSupported(bitsPerValue)); int valuesPerBlock = 64 / bitsPerValue; blocks = new long[RequiredCapacity(valueCount, valuesPerBlock)]; } @@ -71,10 +71,13 @@ public override long RamBytesUsed() public override int Get(int index, long[] arr, int off, int len) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + } len = Math.Min(len, m_valueCount - index); - Debugging.Assert(() => off + len <= arr.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => off + len <= arr.Length); int originalIndex = index; @@ -95,10 +98,13 @@ public override int Get(int index, long[] arr, int off, int len) } // bulk get - Debugging.Assert(() => index % valuesPerBlock == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index % valuesPerBlock == 0); PackedInt32s.IDecoder decoder = BulkOperation.Of(PackedInt32s.Format.PACKED_SINGLE_BLOCK, m_bitsPerValue); - Debugging.Assert(() => decoder.Int64BlockCount == 1); - Debugging.Assert(() => decoder.Int64ValueCount == valuesPerBlock); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => decoder.Int64BlockCount == 1); + Debugging.Assert(() => decoder.Int64ValueCount == valuesPerBlock); + } int blockIndex = index / valuesPerBlock; int nblocks = (index + len) / valuesPerBlock - blockIndex; decoder.Decode(blocks, blockIndex, arr, off, nblocks); @@ -115,17 +121,20 @@ public override int Get(int index, long[] arr, int off, int len) { // no progress so far => already at a block boundary but no full block to // get - Debugging.Assert(() => index == originalIndex); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index == originalIndex); return base.Get(index, arr, off, len); } } public override int Set(int index, long[] arr, int off, int len) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + } len = Math.Min(len, m_valueCount - index); - Debugging.Assert(() => off + len <= arr.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => off + len <= arr.Length); int originalIndex = index; @@ -146,10 +155,10 @@ public override int Set(int index, long[] arr, int off, int len) } // bulk set - Debugging.Assert(() => index % valuesPerBlock == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index % valuesPerBlock == 0); BulkOperation op = BulkOperation.Of(PackedInt32s.Format.PACKED_SINGLE_BLOCK, m_bitsPerValue); - Debugging.Assert(() => op.Int64BlockCount == 1); - Debugging.Assert(() => op.Int64ValueCount == valuesPerBlock); + if (Debugging.AssertsEnabled) Debugging.Assert(() => op.Int64BlockCount == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => op.Int64ValueCount == valuesPerBlock); int blockIndex = index / valuesPerBlock; int nblocks = (index + len) / valuesPerBlock - blockIndex; op.Encode(arr, off, blocks, blockIndex, nblocks); @@ -166,16 +175,19 @@ public override int Set(int index, long[] arr, int off, int len) { // no progress so far => already at a block boundary but no full block to // set - Debugging.Assert(() => index == originalIndex); + if (Debugging.AssertsEnabled) Debugging.Assert(() => index == originalIndex); return base.Set(index, arr, off, len); } } public override void Fill(int fromIndex, int toIndex, long val) { - Debugging.Assert(() => fromIndex >= 0); - Debugging.Assert(() => fromIndex <= toIndex); - Debugging.Assert(() => PackedInt32s.BitsRequired(val) <= m_bitsPerValue); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => fromIndex >= 0); + Debugging.Assert(() => fromIndex <= toIndex); + Debugging.Assert(() => PackedInt32s.BitsRequired(val) <= m_bitsPerValue); + } int valuesPerBlock = 64 / m_bitsPerValue; if (toIndex - fromIndex <= valuesPerBlock << 1) @@ -194,13 +206,13 @@ public override void Fill(int fromIndex, int toIndex, long val) { Set(fromIndex++, val); } - Debugging.Assert(() => fromIndex % valuesPerBlock == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fromIndex % valuesPerBlock == 0); } // bulk set of the inner blocks int fromBlock = fromIndex / valuesPerBlock; int toBlock = toIndex / valuesPerBlock; - Debugging.Assert(() => fromBlock * valuesPerBlock == fromIndex); + if (Debugging.AssertsEnabled) Debugging.Assert(() => fromBlock * valuesPerBlock == fromIndex); long blockValue = 0L; for (int i = 0; i < valuesPerBlock; ++i) diff --git a/src/Lucene.Net/Util/Packed/Packed8ThreeBlocks.cs b/src/Lucene.Net/Util/Packed/Packed8ThreeBlocks.cs index 45d690c01d..ff27983460 100644 --- a/src/Lucene.Net/Util/Packed/Packed8ThreeBlocks.cs +++ b/src/Lucene.Net/Util/Packed/Packed8ThreeBlocks.cs @@ -66,9 +66,12 @@ public override long Get(int index) public override int Get(int index, long[] arr, int off, int len) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); - Debugging.Assert(() => off + len <= arr.Length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(() => off + len <= arr.Length); + } int gets = Math.Min(m_valueCount - index, len); for (int i = index * 3, end = (index + gets) * 3; i < end; i += 3) @@ -88,9 +91,12 @@ public override void Set(int index, long value) public override int Set(int index, long[] arr, int off, int len) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); - Debugging.Assert(() => off + len <= arr.Length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(() => off + len <= arr.Length); + } int sets = Math.Min(m_valueCount - index, len); for (int i = off, o = index * 3, end = off + sets; i < end; ++i) diff --git a/src/Lucene.Net/Util/Packed/PackedDataInput.cs b/src/Lucene.Net/Util/Packed/PackedDataInput.cs index 89e266ac31..52eeb8769a 100644 --- a/src/Lucene.Net/Util/Packed/PackedDataInput.cs +++ b/src/Lucene.Net/Util/Packed/PackedDataInput.cs @@ -53,7 +53,7 @@ public PackedDataInput(DataInput @in) /// public long ReadInt64(int bitsPerValue) { - Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => bitsPerValue.ToString(CultureInfo.InvariantCulture)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => bitsPerValue.ToString(CultureInfo.InvariantCulture)); long r = 0; while (bitsPerValue > 0) { diff --git a/src/Lucene.Net/Util/Packed/PackedDataOutput.cs b/src/Lucene.Net/Util/Packed/PackedDataOutput.cs index 87f83e2710..168fd7b67f 100644 --- a/src/Lucene.Net/Util/Packed/PackedDataOutput.cs +++ b/src/Lucene.Net/Util/Packed/PackedDataOutput.cs @@ -53,7 +53,7 @@ public PackedDataOutput(DataOutput @out) /// public void WriteInt64(long value, int bitsPerValue) { - Debugging.Assert(() => bitsPerValue == 64 || (value >= 0 && value <= PackedInt32s.MaxValue(bitsPerValue))); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerValue == 64 || (value >= 0 && value <= PackedInt32s.MaxValue(bitsPerValue))); while (bitsPerValue > 0) { if (remainingBits == 0) diff --git a/src/Lucene.Net/Util/Packed/PackedInts.cs b/src/Lucene.Net/Util/Packed/PackedInts.cs index fac1a53af2..5bd0b9b00c 100644 --- a/src/Lucene.Net/Util/Packed/PackedInts.cs +++ b/src/Lucene.Net/Util/Packed/PackedInts.cs @@ -142,7 +142,7 @@ public override bool IsSupported(int bitsPerValue) /// public override float OverheadPerValue(int bitsPerValue) { - Debugging.Assert(() => IsSupported(bitsPerValue)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => IsSupported(bitsPerValue)); int valuesPerBlock = 64 / bitsPerValue; int overhead = 64 % bitsPerValue; return (float)overhead / valuesPerBlock; @@ -205,7 +205,7 @@ internal Format(int id) /// public virtual long ByteCount(int packedIntsVersion, int valueCount, int bitsPerValue) { - Debugging.Assert(() => bitsPerValue >= 0 && bitsPerValue <= 64, () => bitsPerValue.ToString(CultureInfo.InvariantCulture)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerValue >= 0 && bitsPerValue <= 64, () => bitsPerValue.ToString(CultureInfo.InvariantCulture)); // assume long-aligned return 8L * Int64Count(packedIntsVersion, valueCount, bitsPerValue); } @@ -218,9 +218,9 @@ public virtual long ByteCount(int packedIntsVersion, int valueCount, int bitsPer /// public virtual int Int64Count(int packedIntsVersion, int valueCount, int bitsPerValue) { - Debugging.Assert(() => bitsPerValue >= 0 && bitsPerValue <= 64, () => bitsPerValue.ToString(CultureInfo.InvariantCulture)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerValue >= 0 && bitsPerValue <= 64, () => bitsPerValue.ToString(CultureInfo.InvariantCulture)); long byteCount = ByteCount(packedIntsVersion, valueCount, bitsPerValue); - Debugging.Assert(() => byteCount < 8L * int.MaxValue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => byteCount < 8L * int.MaxValue); if ((byteCount % 8) == 0) return (int)(byteCount / 8); else @@ -241,7 +241,7 @@ public virtual bool IsSupported(int bitsPerValue) /// public virtual float OverheadPerValue(int bitsPerValue) { - Debugging.Assert(() => IsSupported(bitsPerValue)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => IsSupported(bitsPerValue)); return 0f; } @@ -250,7 +250,7 @@ public virtual float OverheadPerValue(int bitsPerValue) /// public virtual float OverheadRatio(int bitsPerValue) { - Debugging.Assert(() => IsSupported(bitsPerValue)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => IsSupported(bitsPerValue)); return OverheadPerValue(bitsPerValue) / bitsPerValue; } } @@ -531,9 +531,12 @@ public abstract class Reader : NumericDocValues /// public virtual int Get(int index, long[] arr, int off, int len) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < Count); - Debugging.Assert(() => off + len <= arr.Length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < Count); + Debugging.Assert(() => off + len <= arr.Length); + } int gets = Math.Min(Count - index, len); for (int i = index, o = off, end = index + gets; i < end; ++i, ++o) @@ -572,7 +575,7 @@ public virtual int Get(int index, long[] arr, int off, int len) /// public virtual object GetArray() { - Debugging.Assert(() => !HasArray); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !HasArray); return null; } @@ -632,7 +635,7 @@ protected ReaderIterator(int valueCount, int bitsPerValue, DataInput @in) public virtual long Next() { Int64sRef nextValues = Next(1); - Debugging.Assert(() => nextValues.Length > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => nextValues.Length > 0); long result = nextValues.Int64s[nextValues.Offset]; ++nextValues.Offset; --nextValues.Length; @@ -669,10 +672,13 @@ public abstract class Mutable : Reader /// public virtual int Set(int index, long[] arr, int off, int len) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < Count); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < Count); + } len = Math.Min(len, Count - index); - Debugging.Assert(() => off + len <= arr.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => off + len <= arr.Length); for (int i = index, o = off, end = index + len; i < end; ++i, ++o) { @@ -687,8 +693,11 @@ public virtual int Set(int index, long[] arr, int off, int len) /// public virtual void Fill(int fromIndex, int toIndex, long val) { - Debugging.Assert(() => val <= MaxValue(BitsPerValue)); - Debugging.Assert(() => fromIndex <= toIndex); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => val <= MaxValue(BitsPerValue)); + Debugging.Assert(() => fromIndex <= toIndex); + } for (int i = fromIndex; i < toIndex; ++i) { Set(i, val); @@ -737,7 +746,7 @@ internal abstract class ReaderImpl : Reader protected ReaderImpl(int valueCount, int bitsPerValue) { this.m_bitsPerValue = bitsPerValue; - Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); this.m_valueCount = valueCount; } @@ -756,7 +765,7 @@ public abstract class MutableImpl : Mutable protected MutableImpl(int valueCount, int bitsPerValue) { this.m_valueCount = valueCount; - Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); this.m_bitsPerValue = bitsPerValue; } @@ -785,8 +794,11 @@ public override long Get(int index) public override int Get(int index, long[] arr, int off, int len) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < valueCount); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(() => index >= 0 && index < valueCount); + } len = Math.Min(len, valueCount - index); Arrays.Fill(arr, off, off + len, 0); return len; @@ -815,8 +827,11 @@ public abstract class Writer protected Writer(DataOutput @out, int valueCount, int bitsPerValue) { - Debugging.Assert(() => bitsPerValue <= 64); - Debugging.Assert(() => valueCount >= 0 || valueCount == -1); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => bitsPerValue <= 64); + Debugging.Assert(() => valueCount >= 0 || valueCount == -1); + } this.m_out = @out; this.m_valueCount = valueCount; this.m_bitsPerValue = bitsPerValue; @@ -824,7 +839,7 @@ protected Writer(DataOutput @out, int valueCount, int bitsPerValue) internal virtual void WriteHeader() { - Debugging.Assert(() => m_valueCount != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => m_valueCount != -1); CodecUtil.WriteHeader(m_out, CODEC_NAME, VERSION_CURRENT); m_out.WriteVInt32(m_bitsPerValue); m_out.WriteVInt32(m_valueCount); @@ -971,7 +986,7 @@ public static Reader GetReader(DataInput @in) { int version = CodecUtil.CheckHeader(@in, CODEC_NAME, VERSION_START, VERSION_CURRENT); int bitsPerValue = @in.ReadVInt32(); - Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); int valueCount = @in.ReadVInt32(); Format format = Format.ById(@in.ReadVInt32()); @@ -1013,7 +1028,7 @@ public static IReaderIterator GetReaderIterator(DataInput @in, int mem) { int version = CodecUtil.CheckHeader(@in, CODEC_NAME, VERSION_START, VERSION_CURRENT); int bitsPerValue = @in.ReadVInt32(); - Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); int valueCount = @in.ReadVInt32(); Format format = Format.ById(@in.ReadVInt32()); return GetReaderIteratorNoHeader(@in, format, version, valueCount, bitsPerValue, mem); @@ -1049,7 +1064,7 @@ public static Reader GetDirectReaderNoHeader(IndexInput @in, Format format, int long byteCount = format.ByteCount(version, valueCount, bitsPerValue); if (byteCount != format.ByteCount(VERSION_CURRENT, valueCount, bitsPerValue)) { - Debugging.Assert(() => version == VERSION_START); + if (Debugging.AssertsEnabled) Debugging.Assert(() => version == VERSION_START); long endPointer = @in.GetFilePointer() + byteCount; // Some consumers of direct readers assume that reading the last value // will make the underlying IndexInput go to the end of the packed @@ -1136,7 +1151,7 @@ public static Reader GetDirectReader(IndexInput @in) { int version = CodecUtil.CheckHeader(@in, CODEC_NAME, VERSION_START, VERSION_CURRENT); int bitsPerValue = @in.ReadVInt32(); - Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); int valueCount = @in.ReadVInt32(); Format format = Format.ById(@in.ReadVInt32()); return GetDirectReaderNoHeader(@in, format, version, valueCount, bitsPerValue); @@ -1175,7 +1190,7 @@ public static Mutable GetMutable(int valueCount, int bitsPerValue, float accepta /// public static Mutable GetMutable(int valueCount, int bitsPerValue, PackedInt32s.Format format) { - Debugging.Assert(() => valueCount >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount >= 0); if (format == PackedInt32s.Format.PACKED_SINGLE_BLOCK) { @@ -1302,7 +1317,7 @@ public static Writer GetWriterNoHeader(DataOutput @out, Format format, int value /// If there is a low-level I/O error. public static Writer GetWriter(DataOutput @out, int valueCount, int bitsPerValue, float acceptableOverheadRatio) { - Debugging.Assert(() => valueCount >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount >= 0); FormatAndBits formatAndBits = FastestFormatAndBits(valueCount, bitsPerValue, acceptableOverheadRatio); Writer writer = GetWriterNoHeader(@out, formatAndBits.Format, valueCount, formatAndBits.BitsPerValue, DEFAULT_BUFFER_SIZE); @@ -1347,8 +1362,11 @@ public static long MaxValue(int bitsPerValue) /// public static void Copy(Reader src, int srcPos, Mutable dest, int destPos, int len, int mem) { - Debugging.Assert(() => srcPos + len <= src.Count); - Debugging.Assert(() => destPos + len <= dest.Count); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => srcPos + len <= src.Count); + Debugging.Assert(() => destPos + len <= dest.Count); + } int capacity = (int)((uint)mem >> 3); if (capacity == 0) { @@ -1369,17 +1387,17 @@ public static void Copy(Reader src, int srcPos, Mutable dest, int destPos, int l /// Same as but using a pre-allocated buffer. internal static void Copy(Reader src, int srcPos, Mutable dest, int destPos, int len, long[] buf) { - Debugging.Assert(() => buf.Length > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => buf.Length > 0); int remaining = 0; while (len > 0) { int read = src.Get(srcPos, buf, remaining, Math.Min(len, buf.Length - remaining)); - Debugging.Assert(() => read > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => read > 0); srcPos += read; len -= read; remaining += read; int written = dest.Set(destPos, buf, 0, remaining); - Debugging.Assert(() => written > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => written > 0); destPos += written; if (written < remaining) { @@ -1410,7 +1428,7 @@ public static Header ReadHeader(DataInput @in) { int version = CodecUtil.CheckHeader(@in, CODEC_NAME, VERSION_START, VERSION_CURRENT); int bitsPerValue = @in.ReadVInt32(); - Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); int valueCount = @in.ReadVInt32(); Format format = Format.ById(@in.ReadVInt32()); return new Header(format, valueCount, bitsPerValue, version); diff --git a/src/Lucene.Net/Util/Packed/PackedReaderIterator.cs b/src/Lucene.Net/Util/Packed/PackedReaderIterator.cs index 7b44c4506b..7b3bad99ed 100644 --- a/src/Lucene.Net/Util/Packed/PackedReaderIterator.cs +++ b/src/Lucene.Net/Util/Packed/PackedReaderIterator.cs @@ -41,7 +41,7 @@ internal PackedReaderIterator(PackedInt32s.Format format, int packedIntsVersion, this.packedIntsVersion = packedIntsVersion; bulkOperation = BulkOperation.Of(format, bitsPerValue); iterations = Iterations(mem); - Debugging.Assert(() => valueCount == 0 || iterations > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount == 0 || iterations > 0); nextBlocks = new byte[iterations * bulkOperation.ByteBlockCount]; nextValues = new Int64sRef(new long[iterations * bulkOperation.ByteValueCount], 0, 0); nextValues.Offset = nextValues.Int64s.Length; @@ -61,9 +61,12 @@ private int Iterations(int mem) public override Int64sRef Next(int count) { - Debugging.Assert(() => nextValues.Length >= 0); - Debugging.Assert(() => count > 0); - Debugging.Assert(() => nextValues.Offset + nextValues.Length <= nextValues.Int64s.Length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => nextValues.Length >= 0); + Debugging.Assert(() => count > 0); + Debugging.Assert(() => nextValues.Offset + nextValues.Length <= nextValues.Int64s.Length); + } nextValues.Offset += nextValues.Length; diff --git a/src/Lucene.Net/Util/Packed/PackedWriter.cs b/src/Lucene.Net/Util/Packed/PackedWriter.cs index 2df619f068..5023566b87 100644 --- a/src/Lucene.Net/Util/Packed/PackedWriter.cs +++ b/src/Lucene.Net/Util/Packed/PackedWriter.cs @@ -56,8 +56,11 @@ internal PackedWriter(PackedInt32s.Format format, DataOutput @out, int valueCoun public override void Add(long v) { - Debugging.Assert(() => m_bitsPerValue == 64 || (v >= 0 && v <= PackedInt32s.MaxValue(m_bitsPerValue)), () => m_bitsPerValue.ToString(CultureInfo.InvariantCulture)); - Debugging.Assert(() => !finished); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => m_bitsPerValue == 64 || (v >= 0 && v <= PackedInt32s.MaxValue(m_bitsPerValue)), () => m_bitsPerValue.ToString(CultureInfo.InvariantCulture)); + Debugging.Assert(() => !finished); + } if (m_valueCount != -1 && written >= m_valueCount) { throw new EndOfStreamException("Writing past end of stream"); @@ -72,7 +75,7 @@ public override void Add(long v) public override void Finish() { - Debugging.Assert(() => !finished); + if (Debugging.AssertsEnabled) Debugging.Assert(() => !finished); if (m_valueCount != -1) { while (written < m_valueCount) diff --git a/src/Lucene.Net/Util/Packed/PagedMutable.cs b/src/Lucene.Net/Util/Packed/PagedMutable.cs index 05f872cf53..83560f51e5 100644 --- a/src/Lucene.Net/Util/Packed/PagedMutable.cs +++ b/src/Lucene.Net/Util/Packed/PagedMutable.cs @@ -58,7 +58,7 @@ internal PagedMutable(long size, int pageSize, int bitsPerValue, PackedInt32s.Fo protected override Mutable NewMutable(int valueCount, int bitsPerValue) { - Debugging.Assert(() => this.bitsPerValue >= bitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(() => this.bitsPerValue >= bitsPerValue); return PackedInt32s.GetMutable(valueCount, this.bitsPerValue, format); } diff --git a/src/Lucene.Net/Util/PagedBytes.cs b/src/Lucene.Net/Util/PagedBytes.cs index dd392cb285..b7231bd635 100644 --- a/src/Lucene.Net/Util/PagedBytes.cs +++ b/src/Lucene.Net/Util/PagedBytes.cs @@ -96,8 +96,11 @@ internal Reader(PagedBytes pagedBytes) /// public void FillSlice(BytesRef b, long start, int length) { - Debugging.Assert(() => length >= 0, () => "length=" + length); - Debugging.Assert(() => length <= blockSize + 1, () => "length=" + length); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => length >= 0, () => "length=" + length); + Debugging.Assert(() => length <= blockSize + 1, () => "length=" + length); + } b.Length = length; if (length == 0) { @@ -145,7 +148,7 @@ public void Fill(BytesRef b, long start) { b.Length = ((block[offset] & 0x7f) << 8) | (block[1 + offset] & 0xff); b.Offset = offset + 2; - Debugging.Assert(() => b.Length > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => b.Length > 0); } } @@ -163,7 +166,7 @@ public long RamBytesUsed() /// public PagedBytes(int blockBits) { - Debugging.Assert(() => blockBits > 0 && blockBits <= 31, () => blockBits.ToString(CultureInfo.InvariantCulture)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => blockBits > 0 && blockBits <= 31, () => blockBits.ToString(CultureInfo.InvariantCulture)); this.blockSize = 1 << blockBits; this.blockBits = blockBits; blockMask = blockSize - 1; @@ -223,7 +226,7 @@ public void Copy(BytesRef bytes, BytesRef @out) currentBlock = new byte[blockSize]; upto = 0; //left = blockSize; // LUCENENET: Unnecessary assignment - Debugging.Assert(() => bytes.Length <= blockSize); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bytes.Length <= blockSize); // TODO: we could also support variable block sizes } @@ -377,7 +380,7 @@ public override byte ReadByte() public override void ReadBytes(byte[] b, int offset, int len) { - Debugging.Assert(() => b.Length >= offset + len); + if (Debugging.AssertsEnabled) Debugging.Assert(() => b.Length >= offset + len); int offsetEnd = offset + len; while (true) { @@ -433,7 +436,7 @@ public override void WriteByte(byte b) public override void WriteBytes(byte[] b, int offset, int length) { - Debugging.Assert(() => b.Length >= offset + length); + if (Debugging.AssertsEnabled) Debugging.Assert(() => b.Length >= offset + length); if (length == 0) { return; diff --git a/src/Lucene.Net/Util/QueryBuilder.cs b/src/Lucene.Net/Util/QueryBuilder.cs index 7ef3f13c38..6b0617371b 100644 --- a/src/Lucene.Net/Util/QueryBuilder.cs +++ b/src/Lucene.Net/Util/QueryBuilder.cs @@ -186,7 +186,7 @@ public virtual bool EnablePositionIncrements /// Slop factor for phrase/multiphrase queries. protected Query CreateFieldQuery(Analyzer analyzer, Occur @operator, string field, string queryText, bool quoted, int phraseSlop) { - Debugging.Assert(() => @operator == Occur.SHOULD || @operator == Occur.MUST); + if (Debugging.AssertsEnabled) Debugging.Assert(() => @operator == Occur.SHOULD || @operator == Occur.MUST); // Use the analyzer to get all the tokens, and then build a TermQuery, // PhraseQuery, or nothing based on the term count CachingTokenFilter buffer = null; @@ -263,7 +263,7 @@ protected Query CreateFieldQuery(Analyzer analyzer, Occur @operator, string fiel try { bool hasNext = buffer.IncrementToken(); - Debugging.Assert(() => hasNext == true); + if (Debugging.AssertsEnabled) Debugging.Assert(() => hasNext == true); termAtt.FillBytesRef(); } catch (IOException) @@ -289,7 +289,7 @@ protected Query CreateFieldQuery(Analyzer analyzer, Occur @operator, string fiel try { bool hasNext = buffer.IncrementToken(); - Debugging.Assert(() => hasNext == true); + if (Debugging.AssertsEnabled) Debugging.Assert(() => hasNext == true); termAtt.FillBytesRef(); } catch (IOException) @@ -311,7 +311,7 @@ protected Query CreateFieldQuery(Analyzer analyzer, Occur @operator, string fiel try { bool hasNext = buffer.IncrementToken(); - Debugging.Assert(() => hasNext == true); + if (Debugging.AssertsEnabled) Debugging.Assert(() => hasNext == true); termAtt.FillBytesRef(); } catch (IOException) @@ -354,7 +354,7 @@ protected Query CreateFieldQuery(Analyzer analyzer, Occur @operator, string fiel try { bool hasNext = buffer.IncrementToken(); - Debugging.Assert(() => hasNext == true); + if (Debugging.AssertsEnabled) Debugging.Assert(() => hasNext == true); termAtt.FillBytesRef(); if (posIncrAtt != null) { @@ -405,7 +405,7 @@ protected Query CreateFieldQuery(Analyzer analyzer, Occur @operator, string fiel try { bool hasNext = buffer.IncrementToken(); - Debugging.Assert(() => hasNext == true); + if (Debugging.AssertsEnabled) Debugging.Assert(() => hasNext == true); termAtt.FillBytesRef(); if (posIncrAtt != null) { diff --git a/src/Lucene.Net/Util/RamUsageEstimator.cs b/src/Lucene.Net/Util/RamUsageEstimator.cs index 298009bcab..ab06ccfc7c 100644 --- a/src/Lucene.Net/Util/RamUsageEstimator.cs +++ b/src/Lucene.Net/Util/RamUsageEstimator.cs @@ -779,8 +779,11 @@ public IdentityHashSet(int initialCapacity, float loadFactor) { initialCapacity = Math.Max(MIN_CAPACITY, initialCapacity); - Debugging.Assert(() => initialCapacity > 0, () => "Initial capacity must be between (0, " + int.MaxValue + "]."); - Debugging.Assert(() => loadFactor > 0 && loadFactor < 1, () => "Load factor must be between (0, 1)."); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => initialCapacity > 0, () => "Initial capacity must be between (0, " + int.MaxValue + "]."); + Debugging.Assert(() => loadFactor > 0 && loadFactor < 1, () => "Load factor must be between (0, 1)."); + } this.LoadFactor = loadFactor; AllocateBuffers(RoundCapacity(initialCapacity)); } @@ -790,7 +793,7 @@ public IdentityHashSet(int initialCapacity, float loadFactor) /// public bool Add(KType e) { - Debugging.Assert(() => e != null, () => "Null keys not allowed."); + if (Debugging.AssertsEnabled) Debugging.Assert(() => e != null, () => "Null keys not allowed."); if (Assigned >= resizeThreshold) { @@ -864,7 +867,7 @@ private void ExpandAndRehash() { object[] oldKeys = this.keys; - Debugging.Assert(() => Assigned >= resizeThreshold); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Assigned >= resizeThreshold); AllocateBuffers(NextCapacity(keys.Length)); /* @@ -903,8 +906,11 @@ private void AllocateBuffers(int capacity) /// private int NextCapacity(int current) // LUCENENET NOTE: made private, since protected is not valid in a sealed class { - Debugging.Assert(() => current > 0 && ((current & (current - 1)) == 0), () => "Capacity must be a power of two."); - Debugging.Assert(() => (current << 1) > 0, () => "Maximum capacity exceeded (" + ((int)((uint)0x80000000 >> 1)) + ")."); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => current > 0 && ((current & (current - 1)) == 0), () => "Capacity must be a power of two."); + Debugging.Assert(() => (current << 1) > 0, () => "Maximum capacity exceeded (" + ((int)((uint)0x80000000 >> 1)) + ")."); + } if (current < MIN_CAPACITY / 2) { diff --git a/src/Lucene.Net/Util/RecyclingByteBlockAllocator.cs b/src/Lucene.Net/Util/RecyclingByteBlockAllocator.cs index bd76164237..7143119206 100644 --- a/src/Lucene.Net/Util/RecyclingByteBlockAllocator.cs +++ b/src/Lucene.Net/Util/RecyclingByteBlockAllocator.cs @@ -109,7 +109,7 @@ public override void RecycleByteBlocks(byte[][] blocks, int start, int end) blocks[i] = null; } bytesUsed.AddAndGet(-(end - stop) * m_blockSize); - Debugging.Assert(() => bytesUsed.Get() >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bytesUsed.Get() >= 0); } /// The number of currently buffered blocks. @@ -129,7 +129,7 @@ public override void RecycleByteBlocks(byte[][] blocks, int start, int end) /// The number of actually removed buffers. public int FreeBlocks(int num) { - Debugging.Assert(() => num >= 0, () => "free blocks must be >= 0 but was: " + num); + if (Debugging.AssertsEnabled) Debugging.Assert(() => num >= 0, () => "free blocks must be >= 0 but was: " + num); int stop; int count; if (num > freeBlocks) @@ -147,7 +147,7 @@ public int FreeBlocks(int num) freeByteBlocks[--freeBlocks] = null; } bytesUsed.AddAndGet(-count * m_blockSize); - Debugging.Assert(() => bytesUsed.Get() >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bytesUsed.Get() >= 0); return count; } } diff --git a/src/Lucene.Net/Util/RecyclingIntBlockAllocator.cs b/src/Lucene.Net/Util/RecyclingIntBlockAllocator.cs index 41fd4869db..332c4b94a2 100644 --- a/src/Lucene.Net/Util/RecyclingIntBlockAllocator.cs +++ b/src/Lucene.Net/Util/RecyclingIntBlockAllocator.cs @@ -120,7 +120,7 @@ public override void RecycleInt32Blocks(int[][] blocks, int start, int end) blocks[i] = null; } bytesUsed.AddAndGet(-(end - stop) * (m_blockSize * RamUsageEstimator.NUM_BYTES_INT32)); - Debugging.Assert(() => bytesUsed.Get() >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bytesUsed.Get() >= 0); } /// The number of currently buffered blocks. @@ -140,7 +140,7 @@ public override void RecycleInt32Blocks(int[][] blocks, int start, int end) /// The number of actually removed buffers. public int FreeBlocks(int num) { - Debugging.Assert(() => num >= 0, () => "free blocks must be >= 0 but was: " + num); + if (Debugging.AssertsEnabled) Debugging.Assert(() => num >= 0, () => "free blocks must be >= 0 but was: " + num); int stop; int count; if (num > freeBlocks) @@ -158,7 +158,7 @@ public int FreeBlocks(int num) freeByteBlocks[--freeBlocks] = null; } bytesUsed.AddAndGet(-count * m_blockSize * RamUsageEstimator.NUM_BYTES_INT32); - Debugging.Assert(() => bytesUsed.Get() >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bytesUsed.Get() >= 0); return count; } } diff --git a/src/Lucene.Net/Util/RollingBuffer.cs b/src/Lucene.Net/Util/RollingBuffer.cs index 7558150343..7727d1e40c 100644 --- a/src/Lucene.Net/Util/RollingBuffer.cs +++ b/src/Lucene.Net/Util/RollingBuffer.cs @@ -138,7 +138,7 @@ public virtual T Get(int pos) nextPos++; count++; } - Debugging.Assert(() => InBounds(pos)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => InBounds(pos)); int index = GetIndex(pos); //System.out.println(" pos=" + pos + " nextPos=" + nextPos + " -> index=" + index); //assert buffer[index].pos == pos; @@ -154,8 +154,11 @@ public virtual T Get(int pos) public virtual void FreeBefore(int pos) { int toFree = count - (nextPos - pos); - Debugging.Assert(() => toFree >= 0); - Debugging.Assert(() => toFree <= count, () => "toFree=" + toFree + " count=" + count); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => toFree >= 0); + Debugging.Assert(() => toFree <= count, () => "toFree=" + toFree + " count=" + count); + } int index = nextWrite - count; if (index < 0) { diff --git a/src/Lucene.Net/Util/SentinelIntSet.cs b/src/Lucene.Net/Util/SentinelIntSet.cs index b1e4d7e804..ea7f13790a 100644 --- a/src/Lucene.Net/Util/SentinelIntSet.cs +++ b/src/Lucene.Net/Util/SentinelIntSet.cs @@ -114,7 +114,7 @@ public virtual int Hash(int key) /// (internal) Returns the slot for this key. public virtual int GetSlot(int key) { - Debugging.Assert(() => key != EmptyVal); + if (Debugging.AssertsEnabled) Debugging.Assert(() => key != EmptyVal); int h = Hash(key); int s = h & (keys.Length - 1); if (keys[s] == key || keys[s] == EmptyVal) @@ -134,7 +134,7 @@ public virtual int GetSlot(int key) /// (internal) Returns the slot for this key, or -slot-1 if not found. public virtual int Find(int key) { - Debugging.Assert(() => key != EmptyVal); + if (Debugging.AssertsEnabled) Debugging.Assert(() => key != EmptyVal); int h = Hash(key); int s = h & (keys.Length - 1); if (keys[s] == key) diff --git a/src/Lucene.Net/Util/Sorter.cs b/src/Lucene.Net/Util/Sorter.cs index 6ce3f33eeb..c1b4aa92d5 100644 --- a/src/Lucene.Net/Util/Sorter.cs +++ b/src/Lucene.Net/Util/Sorter.cs @@ -185,7 +185,7 @@ internal void Reverse(int from, int to) internal void Rotate(int lo, int mid, int hi) { - Debugging.Assert(() => lo <= mid && mid <= hi); + if (Debugging.AssertsEnabled) Debugging.Assert(() => lo <= mid && mid <= hi); if (lo == mid || mid == hi) { return; diff --git a/src/Lucene.Net/Util/TimSorter.cs b/src/Lucene.Net/Util/TimSorter.cs index c4622d99b5..d0cd914c47 100644 --- a/src/Lucene.Net/Util/TimSorter.cs +++ b/src/Lucene.Net/Util/TimSorter.cs @@ -67,7 +67,7 @@ protected TimSorter(int maxTempSlots) /// Minimum run length for an array of length . internal static int MinRun(int length) { - Debugging.Assert(() => length >= MINRUN); + if (Debugging.AssertsEnabled) Debugging.Assert(() => length >= MINRUN); int n = length; int r = 0; while (n >= 64) @@ -76,7 +76,7 @@ internal static int MinRun(int length) n = (int)((uint)n >> 1); } int minRun = n + r; - Debugging.Assert(() => minRun >= MINRUN && minRun <= THRESHOLD); + if (Debugging.AssertsEnabled) Debugging.Assert(() => minRun >= MINRUN && minRun <= THRESHOLD); return minRun; } @@ -114,7 +114,7 @@ internal virtual void PushRunLen(int len) internal virtual int NextRun() { int runBase = RunEnd(0); - Debugging.Assert(() => runBase < to); + if (Debugging.AssertsEnabled) Debugging.Assert(() => runBase < to); if (runBase == to - 1) { return 1; @@ -198,7 +198,7 @@ internal virtual void Reset(int from, int to) internal virtual void MergeAt(int n) { - Debugging.Assert(() => stackSize >= 2); + if (Debugging.AssertsEnabled) Debugging.Assert(() => stackSize >= 2); Merge(RunBase(n + 1), RunBase(n), RunEnd(n)); for (int j = n + 1; j > 0; --j) { @@ -249,7 +249,7 @@ public override void Sort(int from, int to) PushRunLen(NextRun()); } while (RunEnd(0) < to); ExhaustStack(); - Debugging.Assert(() => RunEnd(0) == to); + if (Debugging.AssertsEnabled) Debugging.Assert(() => RunEnd(0) == to); } internal override void DoRotate(int lo, int mid, int hi) @@ -297,7 +297,7 @@ internal override void DoRotate(int lo, int mid, int hi) internal virtual void MergeLo(int lo, int mid, int hi) { - Debugging.Assert(() => Compare(lo, mid) > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Compare(lo, mid) > 0); int len1 = mid - lo; Save(lo, len1); Copy(mid, lo); @@ -335,12 +335,12 @@ internal virtual void MergeLo(int lo, int mid, int hi) { Restore(i++, dest); } - Debugging.Assert(() => j == dest); + if (Debugging.AssertsEnabled) Debugging.Assert(() => j == dest); } internal virtual void MergeHi(int lo, int mid, int hi) { - Debugging.Assert(() => Compare(mid - 1, hi - 1) > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => Compare(mid - 1, hi - 1) > 0); int len2 = hi - mid; Save(mid, len2); Copy(mid - 1, hi - 1); @@ -378,7 +378,7 @@ internal virtual void MergeHi(int lo, int mid, int hi) { Restore(j--, dest); } - Debugging.Assert(() => i == dest); + if (Debugging.AssertsEnabled) Debugging.Assert(() => i == dest); } internal virtual int LowerSaved(int from, int to, int val) diff --git a/src/Lucene.Net/Util/UnicodeUtil.cs b/src/Lucene.Net/Util/UnicodeUtil.cs index 36e1086036..721a695cf8 100644 --- a/src/Lucene.Net/Util/UnicodeUtil.cs +++ b/src/Lucene.Net/Util/UnicodeUtil.cs @@ -830,7 +830,7 @@ public static void UTF8toUTF16(byte[] utf8, int offset, int length, CharsRef cha int b = utf8[offset++] & 0xff; if (b < 0xc0) { - Debugging.Assert(() => b < 0x80); + if (Debugging.AssertsEnabled) Debugging.Assert(() => b < 0x80); @out[out_offset++] = (char)b; } else if (b < 0xe0) @@ -844,7 +844,7 @@ public static void UTF8toUTF16(byte[] utf8, int offset, int length, CharsRef cha } else { - Debugging.Assert(() => b < 0xf8, () => "b = 0x" + b.ToString("x")); + if (Debugging.AssertsEnabled) Debugging.Assert(() => b < 0xf8, () => "b = 0x" + b.ToString("x")); int ch = ((b & 0x7) << 18) + ((utf8[offset] & 0x3f) << 12) + ((utf8[offset + 1] & 0x3f) << 6) + (utf8[offset + 2] & 0x3f); offset += 3; if (ch < UNI_MAX_BMP) diff --git a/src/Lucene.Net/Util/WAH8DocIdSet.cs b/src/Lucene.Net/Util/WAH8DocIdSet.cs index 82a81feaaf..05b88629e3 100644 --- a/src/Lucene.Net/Util/WAH8DocIdSet.cs +++ b/src/Lucene.Net/Util/WAH8DocIdSet.cs @@ -154,7 +154,7 @@ public static WAH8DocIdSet Intersect(ICollection docIdSets, int in wordNum = iterators[i].wordNum; goto mainContinue; } - Debugging.Assert(() => iterators[i].wordNum == wordNum); + if (Debugging.AssertsEnabled) Debugging.Assert(() => iterators[i].wordNum == wordNum); word &= iterators[i].word; if (word == 0) { @@ -164,7 +164,7 @@ public static WAH8DocIdSet Intersect(ICollection docIdSets, int in } } // Found a common word - Debugging.Assert(() => word != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => word != 0); builder.AddWord(wordNum, word); ++wordNum; mainContinue:; @@ -250,7 +250,7 @@ protected internal override bool LessThan(Iterator a, Iterator b) internal static int WordNum(int docID) { - Debugging.Assert(() => docID >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => docID >= 0); return (int)((uint)docID >> 3); } @@ -300,8 +300,11 @@ public virtual object SetIndexInterval(int indexInterval) internal virtual void WriteHeader(bool reverse, int cleanLength, int dirtyLength) { int cleanLengthMinus2 = cleanLength - 2; - Debugging.Assert(() => cleanLengthMinus2 >= 0); - Debugging.Assert(() => dirtyLength >= 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => cleanLengthMinus2 >= 0); + Debugging.Assert(() => dirtyLength >= 0); + } int token = ((cleanLengthMinus2 & 0x03) << 4) | (dirtyLength & 0x07); if (reverse) { @@ -326,7 +329,7 @@ internal virtual void WriteHeader(bool reverse, int cleanLength, int dirtyLength } } - private bool SequenceIsConsistent() + private bool SequenceIsConsistent() // Called only from assert { for (int i = 1; i < dirtyWords.Length; ++i) { @@ -338,7 +341,7 @@ private bool SequenceIsConsistent() internal virtual void WriteSequence() { - Debugging.Assert(SequenceIsConsistent); + if (Debugging.AssertsEnabled) Debugging.Assert(SequenceIsConsistent); try { WriteHeader(reverse, clean, dirtyWords.Length); @@ -354,8 +357,11 @@ internal virtual void WriteSequence() internal virtual void AddWord(int wordNum, byte word) { - Debugging.Assert(() => wordNum > lastWordNum); - Debugging.Assert(() => word != 0); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => wordNum > lastWordNum); + Debugging.Assert(() => word != 0); + } if (!reverse) { @@ -397,7 +403,7 @@ internal virtual void AddWord(int wordNum, byte word) } else { - Debugging.Assert(() => lastWordNum >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => lastWordNum >= 0); switch (wordNum - lastWordNum) { case 1: @@ -447,7 +453,7 @@ public virtual WAH8DocIdSet Build() { if (cardinality == 0) { - Debugging.Assert(() => lastWordNum == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(() => lastWordNum == -1); return EMPTY; } WriteSequence(); @@ -470,15 +476,18 @@ public virtual WAH8DocIdSet Build() positions.Add(0L); wordNums.Add(0L); Iterator it = new Iterator(data, cardinality, int.MaxValue, SINGLE_ZERO_BUFFER, SINGLE_ZERO_BUFFER); - Debugging.Assert(() => it.@in.Position == 0); - Debugging.Assert(() => it.wordNum == -1); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => it.@in.Position == 0); + Debugging.Assert(() => it.wordNum == -1); + } for (int i = 1; i < valueCount; ++i) { // skip indexInterval sequences for (int j = 0; j < indexInterval; ++j) { bool readSequence = it.ReadSequence(); - Debugging.Assert(() => readSequence); + if (Debugging.AssertsEnabled) Debugging.Assert(() => readSequence); it.SkipDirtyBytes(); } int position = it.@in.Position; @@ -678,15 +687,18 @@ internal virtual bool ReadSequence() allOnesLength = ReadCleanLength(@in, token); } dirtyLength = ReadDirtyLength(@in, token); - Debugging.Assert(() => @in.Length - @in.Position >= dirtyLength, () => @in.Position + " " + @in.Length + " " + dirtyLength); + if (Debugging.AssertsEnabled) Debugging.Assert(() => @in.Length - @in.Position >= dirtyLength, () => @in.Position + " " + @in.Length + " " + dirtyLength); ++sequenceNum; return true; } internal virtual void SkipDirtyBytes(int count) { - Debugging.Assert(() => count >= 0); - Debugging.Assert(() => count <= allOnesLength + dirtyLength); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => count >= 0); + Debugging.Assert(() => count <= allOnesLength + dirtyLength); + } wordNum += count; if (count <= allOnesLength) { @@ -732,7 +744,7 @@ internal virtual void NextWord() word = @in.ReadByte(); ++wordNum; --dirtyLength; - Debugging.Assert(() => word != 0); // never more than one consecutive 0 + if (Debugging.AssertsEnabled) Debugging.Assert(() => word != 0); // never more than one consecutive 0 return; } } @@ -747,8 +759,11 @@ internal virtual int ForwardBinarySearch(int targetWordNum) // advance forward and double the window at each step int indexSize = (int)wordNums.Count; int lo = sequenceNum / indexInterval, hi = lo + 1; - Debugging.Assert(() => sequenceNum == -1 || wordNums.Get(lo) <= wordNum); - Debugging.Assert(() => lo + 1 == wordNums.Count || wordNums.Get(lo + 1) > wordNum); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => sequenceNum == -1 || wordNums.Get(lo) <= wordNum); + Debugging.Assert(() => lo + 1 == wordNums.Count || wordNums.Get(lo + 1) > wordNum); + } while (true) { if (hi >= indexSize) @@ -779,14 +794,17 @@ internal virtual int ForwardBinarySearch(int targetWordNum) hi = mid - 1; } } - Debugging.Assert(() => wordNums.Get(hi) <= targetWordNum); - Debugging.Assert(() => hi + 1 == wordNums.Count || wordNums.Get(hi + 1) > targetWordNum); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => wordNums.Get(hi) <= targetWordNum); + Debugging.Assert(() => hi + 1 == wordNums.Count || wordNums.Get(hi + 1) > targetWordNum); + } return hi; } internal virtual void AdvanceWord(int targetWordNum) { - Debugging.Assert(() => targetWordNum > wordNum); + if (Debugging.AssertsEnabled) Debugging.Assert(() => targetWordNum > wordNum); int delta = targetWordNum - wordNum; if (delta <= allOnesLength + dirtyLength + 1) { @@ -795,7 +813,7 @@ internal virtual void AdvanceWord(int targetWordNum) else { SkipDirtyBytes(); - Debugging.Assert(() => dirtyLength == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => dirtyLength == 0); if (delta > indexThreshold) { // use the index @@ -847,7 +865,7 @@ public override int NextDoc() return docID = NO_MORE_DOCS; } bitList = BitUtil.BitList(word); - Debugging.Assert(() => bitList != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(() => bitList != 0); docID = (wordNum << 3) | ((bitList & 0x0F) - 1); bitList = (int)((uint)bitList >> 4); return docID; @@ -855,7 +873,7 @@ public override int NextDoc() public override int Advance(int target) { - Debugging.Assert(() => target > docID); + if (Debugging.AssertsEnabled) Debugging.Assert(() => target > docID); int targetWordNum = WordNum(target); if (targetWordNum > this.wordNum) { diff --git a/src/dotnet/Lucene.Net.Tests.ICU/Search/PostingsHighlight/TestICUPostingsHighlighter.cs b/src/dotnet/Lucene.Net.Tests.ICU/Search/PostingsHighlight/TestICUPostingsHighlighter.cs index 29a1360eeb..232e9a0753 100644 --- a/src/dotnet/Lucene.Net.Tests.ICU/Search/PostingsHighlight/TestICUPostingsHighlighter.cs +++ b/src/dotnet/Lucene.Net.Tests.ICU/Search/PostingsHighlight/TestICUPostingsHighlighter.cs @@ -774,8 +774,11 @@ public LoadFieldValuesPostingsHighlighter(int maxLength, string text) protected override IList LoadFieldValues(IndexSearcher searcher, string[] fields, int[] docids, int maxLength) { - Debugging.Assert(() => fields.Length == 1); - Debugging.Assert(() => docids.Length == 1); + if (Debugging.AssertsEnabled) + { + Debugging.Assert(() => fields.Length == 1); + Debugging.Assert(() => docids.Length == 1); + } String[][] contents = RectangularArrays.ReturnRectangularArray(1, 1); //= new String[1][1]; contents[0][0] = text; return contents; @@ -1178,7 +1181,7 @@ internal class GetMultiValuedSeparatorPostingsHighlighter : ICUPostingsHighlight { protected override char GetMultiValuedSeparator(string field) { - Debugging.Assert(() => field.Equals("body", StringComparison.Ordinal)); + if (Debugging.AssertsEnabled) Debugging.Assert(() => field.Equals("body", StringComparison.Ordinal)); return '\u2029'; } } From e4c155f88e89f6ecc156426ebf92e4ad79011ee7 Mon Sep 17 00:00:00 2001 From: Shad Storhaug Date: Wed, 19 Aug 2020 05:17:56 +0700 Subject: [PATCH 08/13] Lucene.Net.Diagnostics.Debugging: Changed AssertsEnabled to a static field --- src/Lucene.Net/Support/Diagnostics/Debugging.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Lucene.Net/Support/Diagnostics/Debugging.cs b/src/Lucene.Net/Support/Diagnostics/Debugging.cs index e4ce4672fc..32ea1eaa0c 100644 --- a/src/Lucene.Net/Support/Diagnostics/Debugging.cs +++ b/src/Lucene.Net/Support/Diagnostics/Debugging.cs @@ -34,7 +34,7 @@ internal static class Debugging /// , , /// , and . /// - public static bool AssertsEnabled { get; set; } = SystemProperties.GetPropertyAsBoolean("assert", false); + public static bool AssertsEnabled = SystemProperties.GetPropertyAsBoolean("assert", false); ///// ///// Checks for a condition; if the condition is false, throws an . From ea8f5a64324925b51a78850fcb75f092e59cb650 Mon Sep 17 00:00:00 2001 From: Shad Storhaug Date: Wed, 19 Aug 2020 05:37:55 +0700 Subject: [PATCH 09/13] Changed all Debugging.Assert() overloads to use bool instead of Func --- .../Analysis/CharFilter/BaseCharFilter.cs | 2 +- .../CharFilter/HTMLStripCharFilter.cs | 22 +- .../Analysis/CharFilter/MappingCharFilter.cs | 2 +- .../Analysis/CharFilter/NormalizeCharMap.cs | 2 +- .../Compound/CompoundWordTokenFilterBase.cs | 2 +- .../Analysis/Gl/GalicianStemmer.cs | 2 +- .../Analysis/Hunspell/Dictionary.cs | 4 +- .../Analysis/Hunspell/Stemmer.cs | 4 +- .../Miscellaneous/ASCIIFoldingFilter.cs | 2 +- .../Miscellaneous/SingleTokenTokenStream.cs | 4 +- .../Analysis/NGram/NGramTokenizer.cs | 4 +- .../Pattern/PatternCaptureGroupTokenFilter.cs | 2 +- .../Analysis/Pt/PortugueseStemmer.cs | 2 +- .../Analysis/Pt/RSLPStemmerBase.cs | 2 +- .../Analysis/Synonym/SynonymFilter.cs | 18 +- .../Analysis/Synonym/SynonymMap.cs | 6 +- .../Analysis/Util/CharArrayMap.cs | 2 +- .../Analysis/Util/CharTokenizer.cs | 4 +- .../Analysis/Util/CharacterUtils.cs | 20 +- .../Analysis/Util/RollingCharBuffer.cs | 18 +- .../Analysis/Util/SegmentingTokenizerBase.cs | 2 +- .../Analysis/Util/StemmerUtil.cs | 4 +- .../Analysis/Icu/ICUNormalizer2CharFilter.cs | 2 +- .../Analysis/Icu/Segmentation/ICUTokenizer.cs | 2 +- .../Icu/Segmentation/ICUTokenizerFactory.cs | 4 +- .../Dict/TokenInfoFST.cs | 2 +- .../GraphvizFormatter.cs | 4 +- .../JapaneseIterationMarkCharFilter.cs | 2 +- .../JapaneseTokenizer.cs | 36 +-- .../Tools/BinaryDictionaryWriter.cs | 24 +- .../Tools/ConnectionCostsBuilder.cs | 6 +- .../Tools/ConnectionCostsWriter.cs | 4 +- .../BeiderMorseFilter.cs | 2 +- .../ByTask/Utils/AnalyzerFactory.cs | 2 +- .../Quality/QualityStats.cs | 2 +- .../Quality/Trec/TrecJudge.cs | 2 +- .../BlockTerms/BlockTermsReader.cs | 32 +-- .../BlockTerms/BlockTermsWriter.cs | 12 +- .../BlockTerms/FixedGapTermsIndexReader.cs | 24 +- .../BlockTerms/FixedGapTermsIndexWriter.cs | 2 +- .../BlockTerms/VariableGapTermsIndexReader.cs | 2 +- .../BlockTerms/VariableGapTermsIndexWriter.cs | 2 +- .../Bloom/BloomFilteringPostingsFormat.cs | 2 +- src/Lucene.Net.Codecs/Bloom/FuzzySet.cs | 2 +- .../IntBlock/FixedIntBlockIndexInput.cs | 4 +- .../IntBlock/FixedIntBlockIndexOutput.cs | 2 +- .../IntBlock/VariableIntBlockIndexInput.cs | 2 +- .../IntBlock/VariableIntBlockIndexOutput.cs | 8 +- .../Memory/DirectDocValuesConsumer.cs | 2 +- .../Memory/DirectDocValuesProducer.cs | 2 +- .../Memory/DirectPostingsFormat.cs | 52 ++-- .../Memory/FSTOrdTermsReader.cs | 12 +- .../Memory/FSTTermOutputs.cs | 6 +- .../Memory/FSTTermsReader.cs | 8 +- .../Memory/MemoryDocValuesConsumer.cs | 2 +- .../Memory/MemoryDocValuesProducer.cs | 2 +- .../Memory/MemoryPostingsFormat.cs | 24 +- .../Pulsing/PulsingPostingsFormat.cs | 2 +- .../Pulsing/PulsingPostingsReader.cs | 10 +- .../Pulsing/PulsingPostingsWriter.cs | 20 +- .../Sep/SepPostingsReader.cs | 10 +- .../Sep/SepPostingsWriter.cs | 8 +- .../Sep/SepSkipListReader.cs | 2 +- .../Sep/SepSkipListWriter.cs | 2 +- .../SimpleText/SimpleTextDocValuesReader.cs | 42 ++-- .../SimpleText/SimpleTextDocValuesWriter.cs | 38 +-- .../SimpleText/SimpleTextFieldInfosReader.cs | 28 +-- .../SimpleText/SimpleTextFieldInfosWriter.cs | 2 +- .../SimpleText/SimpleTextFieldsReader.cs | 13 +- .../SimpleText/SimpleTextFieldsWriter.cs | 6 +- .../SimpleText/SimpleTextLiveDocsFormat.cs | 6 +- .../SimpleText/SimpleTextSegmentInfoReader.cs | 16 +- .../SimpleTextStoredFieldsReader.cs | 14 +- .../SimpleText/SimpleTextTermVectorsReader.cs | 34 +-- .../SimpleText/SimpleTextTermVectorsWriter.cs | 4 +- .../ExpressionComparator.cs | 4 +- .../ScoreFunctionValues.cs | 2 +- src/Lucene.Net.Facet/DrillDownQuery.cs | 2 +- src/Lucene.Net.Facet/DrillSideways.cs | 2 +- src/Lucene.Net.Facet/DrillSidewaysScorer.cs | 6 +- src/Lucene.Net.Facet/FacetsConfig.cs | 2 +- .../Range/LongRangeCounter.cs | 10 +- src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs | 4 +- .../Directory/DirectoryTaxonomyWriter.cs | 6 +- .../Taxonomy/Directory/TaxonomyIndexArrays.cs | 2 +- src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs | 2 +- .../Taxonomy/FloatTaxonomyFacets.cs | 2 +- .../Taxonomy/TaxonomyReader.cs | 2 +- .../AbstractFirstPassGroupingCollector.cs | 10 +- .../BlockGroupingCollector.cs | 8 +- src/Lucene.Net.Grouping/SearchGroup.cs | 10 +- .../Term/TermGroupFacetCollector.cs | 4 +- .../MultiTermHighlighting.cs | 4 +- .../PostingsHighlight/Passage.cs | 4 +- .../PostingsHighlight/PostingsHighlighter.cs | 16 +- .../VectorHighlight/BaseFragListBuilder.cs | 2 +- .../VectorHighlight/FieldTermStack.cs | 2 +- src/Lucene.Net.Join/ToChildBlockJoinQuery.cs | 10 +- .../ToParentBlockJoinCollector.cs | 4 +- src/Lucene.Net.Join/ToParentBlockJoinQuery.cs | 4 +- .../MemoryIndex.MemoryIndexReader.cs | 10 +- src/Lucene.Net.Memory/MemoryIndex.cs | 14 +- src/Lucene.Net.Misc/Document/LazyDocument.cs | 6 +- .../Index/MultiPassIndexSplitter.cs | 2 +- src/Lucene.Net.Misc/Index/PKIndexSplitter.cs | 2 +- src/Lucene.Net.Misc/Index/Sorter/Sorter.cs | 4 +- .../Index/Sorter/SortingAtomicReader.cs | 2 +- .../Index/Sorter/SortingMergePolicy.cs | 2 +- src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs | 4 +- .../Util/Fst/UpToTwoPositiveIntOutputs.cs | 36 +-- src/Lucene.Net.Queries/BooleanFilter.cs | 2 +- src/Lucene.Net.Queries/CommonTermsQuery.cs | 2 +- .../Processors/AnalyzerQueryNodeProcessor.cs | 10 +- .../Simple/SimpleQueryParser.cs | 4 +- .../IndexAndTaxonomyRevision.cs | 2 +- src/Lucene.Net.Replicator/IndexRevision.cs | 2 +- .../ReplicationClient.cs | 4 +- .../Queries/SortedSetSortField.cs | 2 +- .../Prefix/AbstractPrefixTreeFilter.cs | 2 +- .../AbstractVisitingPrefixTreeFilter.cs | 16 +- .../Prefix/ContainsPrefixTreeFilter.cs | 12 +- src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs | 4 +- .../Prefix/Tree/QuadPrefixTree.cs | 4 +- .../Prefix/Tree/SpatialPrefixTree.cs | 4 +- .../Prefix/WithinPrefixTreeFilter.cs | 4 +- .../Vector/DistanceValueSource.cs | 2 +- .../Suggest/Analyzing/AnalyzingSuggester.cs | 20 +- .../Analyzing/BlendedInfixSuggester.cs | 2 +- .../Suggest/Analyzing/FSTUtil.cs | 8 +- .../Suggest/Analyzing/FreeTextSuggester.cs | 14 +- .../Suggest/Analyzing/SuggestStopFilter.cs | 2 +- .../Suggest/Fst/FSTCompletion.cs | 2 +- .../Suggest/Fst/WFSTCompletionLookup.cs | 8 +- .../Suggest/UnsortedInputIterator.cs | 6 +- .../Analysis/LookaheadTokenFilter.cs | 22 +- .../Analysis/MockCharFilter.cs | 2 +- .../Analysis/MockReaderWrapper.cs | 6 +- .../Analysis/MockTokenizer.cs | 16 +- .../Asserting/AssertingDocValuesFormat.cs | 82 +++---- .../Codecs/Asserting/AssertingNormsFormat.cs | 6 +- .../Asserting/AssertingPostingsFormat.cs | 68 +++--- .../Asserting/AssertingStoredFieldsFormat.cs | 20 +- .../Asserting/AssertingTermVectorsFormat.cs | 46 ++-- .../Dummy/DummyCompressingCodec.cs | 2 +- .../Lucene3x/PreFlexRWFieldInfosWriter.cs | 4 +- .../Codecs/Lucene3x/PreFlexRWFieldsWriter.cs | 10 +- .../Codecs/Lucene3x/PreFlexRWNormsConsumer.cs | 2 +- .../Lucene3x/PreFlexRWStoredFieldsWriter.cs | 4 +- .../Lucene3x/PreFlexRWTermVectorsWriter.cs | 6 +- .../Codecs/Lucene3x/TermInfosWriter.cs | 12 +- .../Lucene40/Lucene40DocValuesWriter.cs | 16 +- .../Lucene40/Lucene40FieldInfosWriter.cs | 8 +- .../Codecs/Lucene40/Lucene40PostingsWriter.cs | 16 +- .../Codecs/Lucene40/Lucene40SkipListWriter.cs | 8 +- .../Lucene42/Lucene42DocValuesConsumer.cs | 2 +- .../Lucene42/Lucene42FieldInfosWriter.cs | 4 +- .../MockVariableIntBlockPostingsFormat.cs | 2 +- .../MockRandom/MockRandomPostingsFormat.cs | 2 +- .../Codecs/RAMOnly/RAMOnlyPostingsFormat.cs | 10 +- .../Index/AllDeletedFilterReader.cs | 2 +- .../Index/AssertingAtomicReader.cs | 222 +++++++++--------- .../Index/BaseDocValuesFormatTestCase.cs | 64 ++--- .../Index/BasePostingsFormatTestCase.cs | 2 +- .../Index/MockRandomMergePolicy.cs | 2 +- .../Index/RandomCodec.cs | 4 +- .../RandomDocumentsWriterPerThreadPool.cs | 8 +- .../Index/RandomIndexWriter.cs | 2 +- .../ThreadedIndexingAndSearchingTestCase.cs | 4 +- .../Search/AssertingBulkScorer.cs | 2 +- .../Search/AssertingCollector.cs | 2 +- .../Search/AssertingScorer.cs | 8 +- .../Search/QueryUtils.cs | 4 +- .../Search/RandomSimilarityProvider.cs | 2 +- .../Search/ShardSearchingTestBase.cs | 20 +- .../Store/MockDirectoryWrapper.cs | 6 +- .../JavaCompatibility/LuceneTestCase.cs | 2 +- .../Util/Automaton/AutomatonTestUtil.cs | 4 +- .../Util/BaseDocIdSetTestCase.cs | 2 +- .../Util/FailOnNonBulkMergesInfoStream.cs | 2 +- .../Util/Fst/FSTTester.cs | 8 +- .../Util/LuceneTestCase.cs | 14 +- .../Util/NullInfoStream.cs | 6 +- .../Util/TestRuleSetupAndRestoreClassEnv.cs | 14 +- .../Util/ThrottledIndexOutput.cs | 2 +- .../CharFilters/TestMappingCharFilter.cs | 2 +- .../Analysis/Core/TestFactories.cs | 2 +- .../Analysis/Core/TestRandomChains.cs | 4 +- .../Analysis/Hunspell/TestAllDictionaries.cs | 12 +- .../Analysis/Hunspell/TestAllDictionaries2.cs | 12 +- .../Analysis/Synonym/TestSynonymMapFilter.cs | 2 +- src/Lucene.Net.Tests.Facet/FacetTestCase.cs | 2 +- .../Range/TestRangeFacetCounts.cs | 2 +- .../TestTaxonomyFacetSumValueSource.cs | 2 +- .../TestDrillSideways.cs | 6 +- .../GroupFacetCollectorTest.cs | 4 +- .../TestPostingsHighlighter.cs | 6 +- src/Lucene.Net.Tests.Join/TestJoinUtil.cs | 2 +- .../Classic/TestQueryParser.cs | 14 +- .../Flexible/Standard/TestStandardQP.cs | 10 +- .../IndexAndTaxonomyReplicationClientTest.cs | 2 +- .../IndexReplicationClientTest.cs | 2 +- .../SpatialTestCase.cs | 6 +- .../Analyzing/AnalyzingSuggesterTest.cs | 2 +- .../Suggest/Analyzing/FuzzySuggesterTest.cs | 4 +- .../Analyzing/TestFreeTextSuggester.cs | 2 +- .../Suggest/LookupBenchmarkTest.cs | 4 +- .../Analysis/TestGraphTokenizers.cs | 2 +- .../Lucene41/TestBlockPostingsFormat3.cs | 2 +- .../PerField/TestPerFieldDocValuesFormat.cs | 2 +- .../Index/TestBackwardsCompatibility.cs | 2 +- .../Index/TestBackwardsCompatibility3x.cs | 2 +- src/Lucene.Net.Tests/Index/TestCodecs.cs | 6 +- src/Lucene.Net.Tests/Index/TestIndexWriter.cs | 4 +- .../Index/TestIndexWriterMerging.cs | 2 +- .../Index/TestIndexableField.cs | 4 +- .../Index/TestLongPostings.cs | 4 +- src/Lucene.Net.Tests/Index/TestNRTThreads.cs | 2 +- src/Lucene.Net.Tests/Index/TestPayloads.cs | 2 +- .../Index/TestPayloadsOnVectors.cs | 4 +- .../Index/TestPostingsOffsets.cs | 8 +- .../Index/TestStressIndexing2.cs | 4 +- src/Lucene.Net.Tests/Index/TestStressNRT.cs | 4 +- src/Lucene.Net.Tests/Index/TestTermsEnum.cs | 2 +- .../Search/Spans/MultiSpansWrapper.cs | 2 +- .../Search/TestBooleanScorer.cs | 2 +- .../Search/TestConstantScoreQuery.cs | 2 +- src/Lucene.Net.Tests/Search/TestFieldCache.cs | 2 +- .../Search/TestMinShouldMatch2.cs | 10 +- .../Search/TestMultiThreadTermVectors.cs | 2 +- .../Search/TestNumericRangeQuery32.cs | 4 +- .../Search/TestNumericRangeQuery64.cs | 4 +- .../Search/TestTimeLimitingCollector.cs | 2 +- .../Util/Automaton/TestUTF32ToUTF8.cs | 6 +- src/Lucene.Net.Tests/Util/Fst/TestFSTs.cs | 2 +- .../Util/Packed/TestEliasFanoDocIdSet.cs | 2 +- .../Util/Packed/TestEliasFanoSequence.cs | 6 +- src/Lucene.Net.Tests/Util/Test2BPagedBytes.cs | 2 +- src/Lucene.Net.Tests/Util/TestWAH8DocIdSet.cs | 2 +- src/Lucene.Net/Analysis/NumericTokenStream.cs | 2 +- .../Analysis/TokenStreamToAutomaton.cs | 4 +- src/Lucene.Net/Analysis/Tokenizer.cs | 2 +- src/Lucene.Net/Codecs/BlockTermState.cs | 2 +- src/Lucene.Net/Codecs/BlockTreeTermsReader.cs | 162 ++++++------- src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs | 66 +++--- src/Lucene.Net/Codecs/CodecUtil.cs | 2 +- .../CompressingStoredFieldsIndexWriter.cs | 8 +- .../CompressingStoredFieldsReader.cs | 18 +- .../CompressingStoredFieldsWriter.cs | 16 +- .../CompressingTermVectorsReader.cs | 20 +- .../CompressingTermVectorsWriter.cs | 38 +-- .../Codecs/Compressing/CompressionMode.cs | 6 +- src/Lucene.Net/Codecs/Compressing/LZ4.cs | 18 +- src/Lucene.Net/Codecs/DocValuesConsumer.cs | 4 +- src/Lucene.Net/Codecs/FieldsConsumer.cs | 2 +- .../Codecs/Lucene3x/Lucene3xFields.cs | 40 ++-- .../Codecs/Lucene3x/Lucene3xNormsProducer.cs | 6 +- .../Lucene3x/Lucene3xSegmentInfoReader.cs | 6 +- .../Lucene3x/Lucene3xStoredFieldsReader.cs | 4 +- .../Lucene3x/Lucene3xTermVectorsReader.cs | 14 +- .../Codecs/Lucene3x/SegmentTermDocs.cs | 2 +- .../Codecs/Lucene3x/SegmentTermEnum.cs | 4 +- .../Codecs/Lucene3x/SegmentTermPositions.cs | 2 +- src/Lucene.Net/Codecs/Lucene3x/TermBuffer.cs | 6 +- .../Codecs/Lucene3x/TermInfosReader.cs | 10 +- src/Lucene.Net/Codecs/Lucene40/BitVector.cs | 24 +- .../Codecs/Lucene40/Lucene40LiveDocsFormat.cs | 8 +- .../Codecs/Lucene40/Lucene40PostingsFormat.cs | 2 +- .../Codecs/Lucene40/Lucene40PostingsReader.cs | 36 +-- .../Lucene40/Lucene40StoredFieldsReader.cs | 8 +- .../Lucene40/Lucene40StoredFieldsWriter.cs | 10 +- .../Lucene40/Lucene40TermVectorsReader.cs | 24 +- .../Lucene40/Lucene40TermVectorsWriter.cs | 18 +- src/Lucene.Net/Codecs/Lucene41/ForUtil.cs | 22 +- .../Codecs/Lucene41/Lucene41PostingsFormat.cs | 4 +- .../Codecs/Lucene41/Lucene41PostingsReader.cs | 22 +- .../Codecs/Lucene41/Lucene41PostingsWriter.cs | 12 +- .../Codecs/Lucene41/Lucene41SkipReader.cs | 2 +- .../Codecs/Lucene42/Lucene42NormsConsumer.cs | 4 +- .../Lucene45/Lucene45DocValuesConsumer.cs | 4 +- .../Lucene46/Lucene46FieldInfosWriter.cs | 4 +- src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs | 2 +- .../Codecs/MultiLevelSkipListReader.cs | 2 +- .../Codecs/MultiLevelSkipListWriter.cs | 2 +- .../PerField/PerFieldDocValuesFormat.cs | 10 +- .../Codecs/PerField/PerFieldPostingsFormat.cs | 8 +- src/Lucene.Net/Codecs/PostingsConsumer.cs | 2 +- src/Lucene.Net/Codecs/TermVectorsWriter.cs | 16 +- src/Lucene.Net/Codecs/TermsConsumer.cs | 10 +- src/Lucene.Net/Index/AtomicReader.cs | 8 +- src/Lucene.Net/Index/AtomicReaderContext.cs | 2 +- src/Lucene.Net/Index/AutomatonTermsEnum.cs | 8 +- src/Lucene.Net/Index/BitsSlice.cs | 4 +- src/Lucene.Net/Index/BufferedUpdatesStream.cs | 48 ++-- src/Lucene.Net/Index/ByteSliceReader.cs | 16 +- src/Lucene.Net/Index/ByteSliceWriter.cs | 12 +- src/Lucene.Net/Index/CheckIndex.cs | 28 +-- src/Lucene.Net/Index/CompositeReader.cs | 4 +- .../Index/CompositeReaderContext.cs | 4 +- .../Index/ConcurrentMergeScheduler.cs | 2 +- src/Lucene.Net/Index/DirectoryReader.cs | 6 +- src/Lucene.Net/Index/DocFieldProcessor.cs | 8 +- src/Lucene.Net/Index/DocTermOrds.cs | 20 +- src/Lucene.Net/Index/DocValuesFieldUpdates.cs | 4 +- src/Lucene.Net/Index/DocValuesProcessor.cs | 4 +- src/Lucene.Net/Index/DocumentsWriter.cs | 38 +-- .../Index/DocumentsWriterDeleteQueue.cs | 6 +- .../Index/DocumentsWriterFlushControl.cs | 72 +++--- .../Index/DocumentsWriterFlushQueue.cs | 34 +-- .../Index/DocumentsWriterPerThread.cs | 30 +-- .../Index/DocumentsWriterPerThreadPool.cs | 24 +- .../Index/DocumentsWriterStallControl.cs | 8 +- src/Lucene.Net/Index/FieldInfo.cs | 28 +-- src/Lucene.Net/Index/FieldInfos.cs | 10 +- src/Lucene.Net/Index/FilteredTermsEnum.cs | 6 +- src/Lucene.Net/Index/FlushPolicy.cs | 6 +- src/Lucene.Net/Index/FreqProxTermsWriter.cs | 2 +- .../Index/FreqProxTermsWriterPerField.cs | 48 ++-- src/Lucene.Net/Index/FrozenBufferedUpdates.cs | 6 +- src/Lucene.Net/Index/IndexFileDeleter.cs | 42 ++-- src/Lucene.Net/Index/IndexFileNames.cs | 4 +- .../Index/IndexFormatTooNewException.cs | 2 +- .../Index/IndexFormatTooOldException.cs | 4 +- src/Lucene.Net/Index/IndexWriter.cs | 146 ++++++------ src/Lucene.Net/Index/LogMergePolicy.cs | 8 +- src/Lucene.Net/Index/MergePolicy.cs | 8 +- src/Lucene.Net/Index/MergeState.cs | 4 +- src/Lucene.Net/Index/MultiBits.cs | 12 +- src/Lucene.Net/Index/MultiDocValues.cs | 10 +- .../Index/MultiDocsAndPositionsEnum.cs | 4 +- src/Lucene.Net/Index/MultiDocsEnum.cs | 2 +- src/Lucene.Net/Index/MultiFields.cs | 10 +- src/Lucene.Net/Index/MultiTerms.cs | 2 +- src/Lucene.Net/Index/MultiTermsEnum.cs | 20 +- src/Lucene.Net/Index/NormsConsumer.cs | 6 +- .../Index/NumericDocValuesFieldUpdates.cs | 2 +- src/Lucene.Net/Index/OrdTermState.cs | 2 +- .../Index/ParallelCompositeReader.cs | 2 +- src/Lucene.Net/Index/PrefixCodedTerms.cs | 4 +- src/Lucene.Net/Index/ReadersAndUpdates.cs | 48 ++-- src/Lucene.Net/Index/SegmentCoreReaders.cs | 6 +- src/Lucene.Net/Index/SegmentDocValues.cs | 4 +- src/Lucene.Net/Index/SegmentInfo.cs | 4 +- src/Lucene.Net/Index/SegmentInfos.cs | 10 +- src/Lucene.Net/Index/SegmentMerger.cs | 4 +- src/Lucene.Net/Index/SegmentReader.cs | 12 +- .../Index/SimpleMergedSegmentWarmer.cs | 2 +- .../Index/SingletonSortedSetDocValues.cs | 2 +- .../Index/SlowCompositeReaderWrapper.cs | 4 +- .../Index/SnapshotDeletionPolicy.cs | 2 +- .../Index/SortedDocValuesTermsEnum.cs | 4 +- src/Lucene.Net/Index/SortedDocValuesWriter.cs | 4 +- .../Index/SortedSetDocValuesTermsEnum.cs | 4 +- .../Index/SortedSetDocValuesWriter.cs | 4 +- .../Index/StandardDirectoryReader.cs | 6 +- src/Lucene.Net/Index/StoredFieldsProcessor.cs | 6 +- src/Lucene.Net/Index/TermContext.cs | 12 +- src/Lucene.Net/Index/TermVectorsConsumer.cs | 12 +- .../Index/TermVectorsConsumerPerField.cs | 12 +- src/Lucene.Net/Index/TermsHashPerField.cs | 6 +- ...ThreadAffinityDocumentsWriterThreadPool.cs | 6 +- src/Lucene.Net/Search/CachingWrapperFilter.cs | 2 +- src/Lucene.Net/Search/CollectionStatistics.cs | 8 +- .../Search/ConstantScoreAutoRewrite.cs | 6 +- src/Lucene.Net/Search/ConstantScoreQuery.cs | 12 +- src/Lucene.Net/Search/DisjunctionScorer.cs | 4 +- src/Lucene.Net/Search/DocIdSetIterator.cs | 8 +- .../Search/DocTermOrdsRangeFilter.cs | 2 +- .../Search/DocTermOrdsRewriteMethod.cs | 2 +- src/Lucene.Net/Search/ExactPhraseScorer.cs | 2 +- src/Lucene.Net/Search/FieldCacheImpl.cs | 12 +- .../Search/FieldCacheRangeFilter.cs | 4 +- .../Search/FieldCacheRewriteMethod.cs | 2 +- src/Lucene.Net/Search/FieldComparator.cs | 12 +- src/Lucene.Net/Search/FieldValueHitQueue.cs | 8 +- src/Lucene.Net/Search/FilteredQuery.cs | 8 +- src/Lucene.Net/Search/FuzzyTermsEnum.cs | 2 +- src/Lucene.Net/Search/IndexSearcher.cs | 6 +- .../Search/MinShouldMatchSumScorer.cs | 4 +- src/Lucene.Net/Search/MultiPhraseQuery.cs | 4 +- .../Search/MultiTermQueryWrapperFilter.cs | 2 +- src/Lucene.Net/Search/NumericRangeQuery.cs | 14 +- src/Lucene.Net/Search/PhraseQuery.cs | 6 +- src/Lucene.Net/Search/QueryRescorer.cs | 2 +- src/Lucene.Net/Search/ReferenceManager.cs | 6 +- src/Lucene.Net/Search/ReqOptSumScorer.cs | 4 +- src/Lucene.Net/Search/ScoringRewrite.cs | 10 +- src/Lucene.Net/Search/SearcherManager.cs | 4 +- .../Search/Similarities/SimilarityBase.cs | 2 +- src/Lucene.Net/Search/SloppyPhraseScorer.cs | 4 +- src/Lucene.Net/Search/SortField.cs | 2 +- src/Lucene.Net/Search/SortRescorer.cs | 2 +- .../Search/Spans/NearSpansOrdered.cs | 6 +- src/Lucene.Net/Search/Spans/SpanFirstQuery.cs | 2 +- .../Search/Spans/SpanPositionRangeQuery.cs | 2 +- src/Lucene.Net/Search/Spans/TermSpans.cs | 2 +- .../Search/TermCollectingRewrite.cs | 2 +- src/Lucene.Net/Search/TermQuery.cs | 10 +- src/Lucene.Net/Search/TermScorer.cs | 2 +- src/Lucene.Net/Search/TermStatistics.cs | 4 +- src/Lucene.Net/Search/TopDocs.cs | 10 +- src/Lucene.Net/Search/TopScoreDocCollector.cs | 12 +- src/Lucene.Net/Search/TopTermsRewrite.cs | 18 +- src/Lucene.Net/Store/BaseDirectory.cs | 2 +- src/Lucene.Net/Store/BufferedIndexInput.cs | 2 +- src/Lucene.Net/Store/ByteArrayDataOutput.cs | 4 +- src/Lucene.Net/Store/ByteBufferIndexInput.cs | 8 +- src/Lucene.Net/Store/CompoundFileDirectory.cs | 8 +- src/Lucene.Net/Store/CompoundFileWriter.cs | 18 +- src/Lucene.Net/Store/DataInput.cs | 2 +- src/Lucene.Net/Store/DataOutput.cs | 4 +- src/Lucene.Net/Store/IOContext.cs | 6 +- src/Lucene.Net/Store/MMapDirectory.cs | 2 +- src/Lucene.Net/Store/NIOFSDirectory.cs | 6 +- src/Lucene.Net/Store/RAMOutputStream.cs | 2 +- src/Lucene.Net/Store/SimpleFSDirectory.cs | 2 +- src/Lucene.Net/Support/Collections.cs | 2 +- .../Support/Diagnostics/Debugging.cs | 65 ++--- src/Lucene.Net/Util/ArrayUtil.cs | 40 ++-- src/Lucene.Net/Util/AttributeSource.cs | 2 +- src/Lucene.Net/Util/Automaton/Automaton.cs | 4 +- .../Util/Automaton/BasicOperations.cs | 8 +- .../Util/Automaton/CompiledAutomaton.cs | 8 +- .../Automaton/DaciukMihovAutomatonBuilder.cs | 14 +- .../Automaton/Lev1ParametricDescription.cs | 4 +- .../Automaton/Lev1TParametricDescription.cs | 4 +- .../Automaton/Lev2ParametricDescription.cs | 4 +- .../Automaton/Lev2TParametricDescription.cs | 4 +- .../Util/Automaton/LevenshteinAutomata.cs | 2 +- src/Lucene.Net/Util/Automaton/SortedIntSet.cs | 2 +- src/Lucene.Net/Util/Automaton/State.cs | 2 +- src/Lucene.Net/Util/Automaton/Transition.cs | 6 +- src/Lucene.Net/Util/Automaton/UTF32ToUTF8.cs | 4 +- src/Lucene.Net/Util/BroadWord.cs | 4 +- src/Lucene.Net/Util/ByteBlockPool.cs | 2 +- src/Lucene.Net/Util/BytesRef.cs | 12 +- src/Lucene.Net/Util/BytesRefArray.cs | 2 +- src/Lucene.Net/Util/BytesRefHash.cs | 38 +-- src/Lucene.Net/Util/CharsRef.cs | 4 +- src/Lucene.Net/Util/FilterIterator.cs | 2 +- src/Lucene.Net/Util/FixedBitSet.cs | 30 +-- src/Lucene.Net/Util/Fst/Builder.cs | 44 ++-- .../Util/Fst/ByteSequenceOutputs.cs | 22 +- src/Lucene.Net/Util/Fst/BytesRefFSTEnum.cs | 2 +- src/Lucene.Net/Util/Fst/BytesStore.cs | 18 +- .../Util/Fst/CharSequenceOutputs.cs | 22 +- src/Lucene.Net/Util/Fst/FST.cs | 76 +++--- src/Lucene.Net/Util/Fst/FSTEnum.cs | 20 +- src/Lucene.Net/Util/Fst/IntSequenceOutputs.cs | 22 +- src/Lucene.Net/Util/Fst/IntsRefFSTEnum.cs | 2 +- src/Lucene.Net/Util/Fst/NoOutputs.cs | 16 +- src/Lucene.Net/Util/Fst/NodeHash.cs | 2 +- src/Lucene.Net/Util/Fst/PairOutputs.cs | 18 +- src/Lucene.Net/Util/Fst/PositiveIntOutputs.cs | 24 +- src/Lucene.Net/Util/Fst/Util.cs | 12 +- .../Util/IndexableBinaryStringTools.cs | 4 +- src/Lucene.Net/Util/InfoStream.cs | 2 +- src/Lucene.Net/Util/IntBlockPool.cs | 14 +- src/Lucene.Net/Util/IntsRef.cs | 4 +- src/Lucene.Net/Util/LongBitSet.cs | 30 +-- src/Lucene.Net/Util/LongsRef.cs | 4 +- src/Lucene.Net/Util/MergedIterator.cs | 2 +- src/Lucene.Net/Util/OfflineSorter.cs | 12 +- src/Lucene.Net/Util/OpenBitSet.cs | 34 +-- src/Lucene.Net/Util/PForDeltaDocIdSet.cs | 28 +-- .../Packed/AbstractAppendingLongBuffer.cs | 10 +- .../Util/Packed/AbstractBlockPackedWriter.cs | 2 +- .../Util/Packed/AbstractPagedMutable.cs | 6 +- .../Util/Packed/BlockPackedReader.cs | 2 +- .../Util/Packed/BlockPackedReaderIterator.cs | 12 +- .../Util/Packed/BlockPackedWriter.cs | 2 +- src/Lucene.Net/Util/Packed/BulkOperation.cs | 4 +- .../Util/Packed/BulkOperationPacked.cs | 16 +- src/Lucene.Net/Util/Packed/Direct16.cs | 14 +- src/Lucene.Net/Util/Packed/Direct32.cs | 14 +- src/Lucene.Net/Util/Packed/Direct64.cs | 12 +- src/Lucene.Net/Util/Packed/Direct8.cs | 14 +- .../Util/Packed/EliasFanoDecoder.cs | 28 +-- .../Util/Packed/EliasFanoEncoder.cs | 4 +- src/Lucene.Net/Util/Packed/GrowableWriter.cs | 2 +- .../Packed/MonotonicAppendingLongBuffer.cs | 2 +- .../Util/Packed/MonotonicBlockPackedReader.cs | 2 +- .../Util/Packed/MonotonicBlockPackedWriter.cs | 4 +- .../Util/Packed/Packed16ThreeBlocks.cs | 12 +- src/Lucene.Net/Util/Packed/Packed64.cs | 36 +-- .../Util/Packed/Packed64SingleBlock.cs | 40 ++-- .../Util/Packed/Packed8ThreeBlocks.cs | 12 +- src/Lucene.Net/Util/Packed/PackedDataInput.cs | 2 +- .../Util/Packed/PackedDataOutput.cs | 2 +- src/Lucene.Net/Util/Packed/PackedInts.cs | 70 +++--- .../Util/Packed/PackedReaderIterator.cs | 8 +- src/Lucene.Net/Util/Packed/PackedWriter.cs | 6 +- src/Lucene.Net/Util/Packed/PagedMutable.cs | 2 +- src/Lucene.Net/Util/PagedBytes.cs | 14 +- src/Lucene.Net/Util/QueryBuilder.cs | 12 +- src/Lucene.Net/Util/RamUsageEstimator.cs | 12 +- .../Util/RecyclingByteBlockAllocator.cs | 6 +- .../Util/RecyclingIntBlockAllocator.cs | 6 +- src/Lucene.Net/Util/RollingBuffer.cs | 6 +- src/Lucene.Net/Util/SentinelIntSet.cs | 4 +- src/Lucene.Net/Util/Sorter.cs | 2 +- src/Lucene.Net/Util/TimSorter.cs | 18 +- src/Lucene.Net/Util/UnicodeUtil.cs | 4 +- src/Lucene.Net/Util/WAH8DocIdSet.cs | 54 ++--- .../TestICUPostingsHighlighter.cs | 6 +- 504 files changed, 2598 insertions(+), 2598 deletions(-) diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs index f490dc0f37..f566eefe26 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs @@ -114,7 +114,7 @@ protected virtual void AddOffCorrectMap(int off, int cumulativeDiff) } int offset = offsets[(size == 0) ? 0 : size - 1]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => size == 0 || off >= offset, + if (Debugging.AssertsEnabled) Debugging.Assert(size == 0 || off >= offset, () => "Offset #" + size + "(" + off + ") is less than the last recorded offset " + offset + "\n" + Arrays.ToString(offsets) + "\n" + Arrays.ToString(diffs)); if (size == 0 || off != offsets[size - 1]) diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs index 611e72b08c..5bff511b29 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs @@ -30953,7 +30953,7 @@ internal void Restart() /// internal int NextChar() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !IsRead, () => "Attempting to read past the end of a segment."); + if (Debugging.AssertsEnabled) Debugging.Assert(!IsRead, () => "Attempting to read past the end of a segment."); return m_buf[pos++]; } @@ -31378,7 +31378,7 @@ private int NextChar() } catch (Exception /*e*/) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "Exception parsing code point '" + decimalCharRef + "'"); + if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "Exception parsing code point '" + decimalCharRef + "'"); } if (codePoint <= 0x10FFFF) { @@ -31638,7 +31638,7 @@ string hexCharRef } catch (Exception /*e*/) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "Exception parsing hex code point '" + hexCharRef + "'"); + if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "Exception parsing hex code point '" + hexCharRef + "'"); } if (codePoint <= 0x10FFFF) { @@ -31901,7 +31901,7 @@ string hexCharRef } catch (Exception /*e*/) { // should never happen - if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "Exception parsing high surrogate '" + if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "Exception parsing high surrogate '" + surrogatePair.Substring(2, 6 - 2) + "'"); } try @@ -31910,7 +31910,7 @@ string hexCharRef } catch (Exception /*e*/) { // should never happen - if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "Exception parsing low surrogate '" + surrogatePair.Substring(10, 14 - 10) + "'"); + if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "Exception parsing low surrogate '" + surrogatePair.Substring(10, 14 - 10) + "'"); } // add (previously matched input length) + (this match length) - (substitution length) cumulativeDiff += inputSegment.Length + YyLength - 2; @@ -31932,7 +31932,7 @@ string hexCharRef } catch (Exception /*e*/) { // should never happen - if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "Exception parsing high surrogate '" + if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "Exception parsing high surrogate '" + surrogatePair.Substring(2, 6 - 2) + "'"); } try @@ -31941,7 +31941,7 @@ string hexCharRef } catch (Exception /*e*/) { // should never happen - if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "Exception parsing low surrogate '" + if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "Exception parsing low surrogate '" + surrogatePair.Substring(9, 14 - 9) + "'"); } if (char.IsLowSurrogate(lowSurrogate)) @@ -31973,7 +31973,7 @@ string hexCharRef } catch (Exception /*e*/) { // should never happen - if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "Exception parsing high surrogate '" + if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "Exception parsing high surrogate '" + surrogatePair.Substring(1, 6 - 1) + "'"); } if (char.IsHighSurrogate(highSurrogate)) @@ -31986,7 +31986,7 @@ string hexCharRef } catch (Exception /*e*/) { // should never happen - if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "Exception parsing low surrogate '" + if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "Exception parsing low surrogate '" + surrogatePair.Substring(10, 14 - 10) + "'"); } // add (previously matched input length) + (this match length) - (substitution length) @@ -32013,7 +32013,7 @@ string hexCharRef } catch (Exception /*e*/) { // should never happen - if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "Exception parsing high surrogate '" + if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "Exception parsing high surrogate '" + surrogatePair.Substring(1, 6 - 1) + "'"); } if (char.IsHighSurrogate(highSurrogate)) @@ -32025,7 +32025,7 @@ string hexCharRef } catch (Exception /*e*/) { // should never happen - if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "Exception parsing low surrogate '" + if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "Exception parsing low surrogate '" + surrogatePair.Substring(9, 14 - 9) + "'"); } if (char.IsLowSurrogate(lowSurrogate)) diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilter.cs index a921887db2..dec5db1be3 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilter.cs @@ -136,7 +136,7 @@ public override int Read() if (!FST.TargetHasArcs(arc)) { // Fast pass for single character match: - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.IsFinal); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.IsFinal); lastMatchLen = 1; lastMatch = arc.Output; } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/NormalizeCharMap.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/NormalizeCharMap.cs index 7186c94660..e3963d21e5 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/NormalizeCharMap.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/NormalizeCharMap.cs @@ -54,7 +54,7 @@ private NormalizeCharMap(FST map) map.ReadFirstRealTargetArc(scratchArc.Target, scratchArc, fstReader); while (true) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => scratchArc.Label != FST.END_LABEL); + if (Debugging.AssertsEnabled) Debugging.Assert(scratchArc.Label != FST.END_LABEL); cachedRootArcs[Convert.ToChar((char)scratchArc.Label)] = (new FST.Arc()).CopyFrom(scratchArc); if (scratchArc.IsLast) { diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Compound/CompoundWordTokenFilterBase.cs b/src/Lucene.Net.Analysis.Common/Analysis/Compound/CompoundWordTokenFilterBase.cs index b60791e833..bf5ecabd2f 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Compound/CompoundWordTokenFilterBase.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Compound/CompoundWordTokenFilterBase.cs @@ -110,7 +110,7 @@ public override sealed bool IncrementToken() { if (m_tokens.Count > 0) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => current != null); + if (Debugging.AssertsEnabled) Debugging.Assert(current != null); CompoundToken token = m_tokens.Dequeue(); RestoreState(current); // keep all other attributes untouched m_termAtt.SetEmpty().Append(token.Text); diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianStemmer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianStemmer.cs index bf49cde4a2..5d41c25b0c 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianStemmer.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianStemmer.cs @@ -47,7 +47,7 @@ static GalicianStemmer() /// new valid length, stemmed public virtual int Stem(char[] s, int len) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => s.Length >= len + 1, () => "this stemmer requires an oversized array of at least 1"); + if (Debugging.AssertsEnabled) Debugging.Assert(s.Length >= len + 1, () => "this stemmer requires an oversized array of at least 1"); len = plural.Apply(s, len); len = unification.Apply(s, len); diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs index 305f0d1e13..0f4e984970 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs @@ -375,7 +375,7 @@ private void ReadAffixFile(Stream affixStream, Encoding decoder) strip.CopyTo(0, stripData, currentOffset, strip.Length - 0); currentOffset += strip.Length; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentIndex == seenStrips.Count); + if (Debugging.AssertsEnabled) Debugging.Assert(currentIndex == seenStrips.Count); stripOffsets[currentIndex] = currentOffset; } @@ -424,7 +424,7 @@ private void ParseAffix(JCG.SortedDictionary> affixes, stri for (int i = 0; i < numLines; i++) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => affixWriter.Position == currentAffix << 3); + if (Debugging.AssertsEnabled) Debugging.Assert(affixWriter.Position == currentAffix << 3); string line = reader.ReadLine(); string[] ruleArgs = whitespacePattern.Split(line).TrimEnd(); diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Stemmer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Stemmer.cs index 4ad0ee10a8..93883168aa 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Stemmer.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Stemmer.cs @@ -210,7 +210,7 @@ private IList Stem(char[] word, int length, int previous, int prevFlag // cross check incoming continuation class (flag of previous affix) against list. dictionary.flagLookup.Get(append, scratch); char[] appendFlags = Dictionary.DecodeFlags(scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => prevFlag >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(prevFlag >= 0); compatible = HasCrossCheckedFlag((char)prevFlag, appendFlags, false); } else @@ -279,7 +279,7 @@ private IList Stem(char[] word, int length, int previous, int prevFlag // cross check incoming continuation class (flag of previous affix) against list. dictionary.flagLookup.Get(append, scratch); char[] appendFlags = Dictionary.DecodeFlags(scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => prevFlag >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(prevFlag >= 0); compatible = HasCrossCheckedFlag((char)prevFlag, appendFlags, previousWasPrefix); } else diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ASCIIFoldingFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ASCIIFoldingFilter.cs index aa76247b45..f219184f36 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ASCIIFoldingFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ASCIIFoldingFilter.cs @@ -92,7 +92,7 @@ public override bool IncrementToken() { if (state != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => preserveOriginal, () => "state should only be captured if preserveOriginal is true"); + if (Debugging.AssertsEnabled) Debugging.Assert(preserveOriginal, () => "state should only be captured if preserveOriginal is true"); RestoreState(state); posIncAttr.PositionIncrement = 0; state = null; diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/SingleTokenTokenStream.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/SingleTokenTokenStream.cs index 225fae8a66..3e384bde4e 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/SingleTokenTokenStream.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/SingleTokenTokenStream.cs @@ -35,11 +35,11 @@ public sealed class SingleTokenTokenStream : TokenStream public SingleTokenTokenStream(Token token) : base(Token.TOKEN_ATTRIBUTE_FACTORY) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => token != null); + if (Debugging.AssertsEnabled) Debugging.Assert(token != null); this.singleToken = (Token)token.Clone(); tokenAtt = AddAttribute(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => tokenAtt is Token); + if (Debugging.AssertsEnabled) Debugging.Assert(tokenAtt is Token); } public override sealed bool IncrementToken() diff --git a/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenizer.cs b/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenizer.cs index 60bc879315..d03c06d794 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenizer.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenizer.cs @@ -232,7 +232,7 @@ public override sealed bool IncrementToken() { if (bufferStart + 1 + minGram > bufferEnd) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => exhausted); + if (Debugging.AssertsEnabled) Debugging.Assert(exhausted); return false; } Consume(); @@ -295,7 +295,7 @@ protected virtual bool IsTokenChar(int chr) public override sealed void End() { base.End(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => bufferStart <= bufferEnd); + if (Debugging.AssertsEnabled) Debugging.Assert(bufferStart <= bufferEnd); int endOffset = offset; for (int i = bufferStart; i < bufferEnd; ++i) { diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternCaptureGroupTokenFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternCaptureGroupTokenFilter.cs index e901768536..1b6f97680f 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternCaptureGroupTokenFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternCaptureGroupTokenFilter.cs @@ -155,7 +155,7 @@ public override bool IncrementToken() { if (currentMatcher != -1 && NextCapture()) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != null); + if (Debugging.AssertsEnabled) Debugging.Assert(state != null); ClearAttributes(); RestoreState(state); int start = matchers[currentMatcher].Groups[currentGroup[currentMatcher]].Index; diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseStemmer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseStemmer.cs index 8943de0589..0d0a94c103 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseStemmer.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseStemmer.cs @@ -46,7 +46,7 @@ static PortugueseStemmer() /// new valid length, stemmed public virtual int Stem(char[] s, int len) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => s.Length >= len + 1, () => "this stemmer requires an oversized array of at least 1"); + if (Debugging.AssertsEnabled) Debugging.Assert(s.Length >= len + 1, () => "this stemmer requires an oversized array of at least 1"); len = plural.Apply(s, len); len = adverb.Apply(s, len); diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs index 594c88bf68..108a87aa88 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Pt/RSLPStemmerBase.cs @@ -305,7 +305,7 @@ private static Step ParseStep(TextReader r, string header) { throw new Exception("Illegal Step header specified at line " /*+ r.LineNumber*/); // TODO Line number } - //if (Debugging.AssertsEnabled) Debugging.Assert(() => headerPattern.GetGroupNumbers().Length == 4); // Not possible to read the number of groups that matched in .NET + //if (Debugging.AssertsEnabled) Debugging.Assert(headerPattern.GetGroupNumbers().Length == 4); // Not possible to read the number of groups that matched in .NET string name = matcher.Groups[1].Value; int min = int.Parse(matcher.Groups[2].Value, CultureInfo.InvariantCulture); int type = int.Parse(matcher.Groups[3].Value, CultureInfo.InvariantCulture); diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilter.cs index a0c0786d5b..e118268732 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilter.cs @@ -176,7 +176,7 @@ public virtual void Reset() public virtual CharsRef PullNext() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => upto < count); + if (Debugging.AssertsEnabled) Debugging.Assert(upto < count); lastEndOffset = endOffsets[upto]; lastPosLength = posLengths[upto]; CharsRef result = outputs[upto++]; @@ -306,7 +306,7 @@ private void Capture() nextWrite = RollIncr(nextWrite); // Buffer head should never catch up to tail: - if (Debugging.AssertsEnabled) Debugging.Assert(() => nextWrite != nextRead); + if (Debugging.AssertsEnabled) Debugging.Assert(nextWrite != nextRead); } /* @@ -325,7 +325,7 @@ private void Parse() { //System.out.println("\nS: parse"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => inputSkipCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(inputSkipCount == 0); int curNextRead = nextRead; @@ -337,7 +337,7 @@ private void Parse() BytesRef pendingOutput = fst.Outputs.NoOutput; fst.GetFirstArc(scratchArc); - if (Debugging.AssertsEnabled) Debugging.Assert(() => scratchArc.Output == fst.Outputs.NoOutput); + if (Debugging.AssertsEnabled) Debugging.Assert(scratchArc.Output == fst.Outputs.NoOutput); int tokenCount = 0; @@ -364,7 +364,7 @@ private void Parse() else { //System.out.println(" input.incrToken"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => futureInputs[nextWrite].consumed); + if (Debugging.AssertsEnabled) Debugging.Assert(futureInputs[nextWrite].consumed); // Not correct: a syn match whose output is longer // than its input can set future inputs keepOrig // to true: @@ -480,7 +480,7 @@ private void Parse() } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => finished); + if (Debugging.AssertsEnabled) Debugging.Assert(finished); } //System.out.println(" parse done inputSkipCount=" + inputSkipCount + " nextRead=" + nextRead + " nextWrite=" + nextWrite); @@ -510,7 +510,7 @@ private void AddOutput(BytesRef bytes, int matchInputLength, int matchEndOffset) int outputLen = chIDX - lastStart; // Caller is not allowed to have empty string in // the output: - if (Debugging.AssertsEnabled) Debugging.Assert(() => outputLen > 0, () => "output contains empty string: " + scratchChars); + if (Debugging.AssertsEnabled) Debugging.Assert(outputLen > 0, () => "output contains empty string: " + scratchChars); int endOffset; int posLen; if (chIDX == chEnd && lastStart == scratchChars.Offset) @@ -536,7 +536,7 @@ private void AddOutput(BytesRef bytes, int matchInputLength, int matchEndOffset) lastStart = 1 + chIDX; //System.out.println(" slot=" + outputUpto + " keepOrig=" + keepOrig); outputUpto = RollIncr(outputUpto); - if (Debugging.AssertsEnabled) Debugging.Assert(() => futureOutputs[outputUpto].posIncr == 1, () => "outputUpto=" + outputUpto + " vs nextWrite=" + nextWrite); + if (Debugging.AssertsEnabled) Debugging.Assert(futureOutputs[outputUpto].posIncr == 1, () => "outputUpto=" + outputUpto + " vs nextWrite=" + nextWrite); } } } @@ -602,7 +602,7 @@ public override bool IncrementToken() { // Pass-through case: return token we just pulled // but didn't capture: - if (Debugging.AssertsEnabled) Debugging.Assert(() => inputSkipCount == 1, () => "inputSkipCount=" + inputSkipCount + " nextRead=" + nextRead); + if (Debugging.AssertsEnabled) Debugging.Assert(inputSkipCount == 1, () => "inputSkipCount=" + inputSkipCount + " nextRead=" + nextRead); } input.Reset(); if (outputs.count > 0) diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs index 539233ee90..2e2e9b5b47 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymMap.cs @@ -175,8 +175,8 @@ internal virtual void Add(CharsRef input, int numInputWords, CharsRef output, in if (Debugging.AssertsEnabled) { - Debugging.Assert(() => !HasHoles(input), () => "input has holes: " + input); - Debugging.Assert(() => !HasHoles(output), () => "output has holes: " + output); + Debugging.Assert(!HasHoles(input), () => "input has holes: " + input); + Debugging.Assert(!HasHoles(output), () => "output has holes: " + output); } //System.out.println("fmap.add input=" + input + " numInputWords=" + numInputWords + " output=" + output + " numOutputWords=" + numOutputWords); @@ -284,7 +284,7 @@ public virtual SynonymMap Build() scratch.Grow(estimatedSize); scratchOutput.Reset(scratch.Bytes, scratch.Offset, scratch.Bytes.Length); - if (Debugging.AssertsEnabled) Debugging.Assert(() => scratch.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(scratch.Offset == 0); // now write our output data: int count = 0; diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs index a4be3e6b85..b2dd01af15 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs @@ -632,7 +632,7 @@ public virtual void PutAll(IEnumerable> collection) private void Rehash() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => keys.Length == values.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(keys.Length == values.Length); int newSize = 2 * keys.Length; char[][] oldkeys = keys; MapValue[] oldvalues = values; diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharTokenizer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharTokenizer.cs index 9eda6068b5..bfb15ba8f8 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharTokenizer.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharTokenizer.cs @@ -169,7 +169,7 @@ public override sealed bool IncrementToken() { if (length == 0) // start of token { - if (Debugging.AssertsEnabled) Debugging.Assert(() => start == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(start == -1); start = offset + bufferIndex - charCount; end = start; } // check if a supplementary could run out of bounds @@ -191,7 +191,7 @@ public override sealed bool IncrementToken() } termAtt.Length = length; - if (Debugging.AssertsEnabled) Debugging.Assert(() => start != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(start != -1); offsetAtt.SetOffset(CorrectOffset(start), finalOffset = CorrectOffset(end)); return true; } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharacterUtils.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharacterUtils.cs index 38229512cd..19820649ba 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharacterUtils.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharacterUtils.cs @@ -178,8 +178,8 @@ public static CharacterBuffer NewCharacterBuffer(int bufferSize) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => buffer.Length >= length); - Debugging.Assert(() => offset <= 0 && offset <= buffer.Length); + Debugging.Assert(buffer.Length >= length); + Debugging.Assert(offset <= 0 && offset <= buffer.Length); } // Slight optimization, eliminating a few method calls internally @@ -212,8 +212,8 @@ public static CharacterBuffer NewCharacterBuffer(int bufferSize) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => buffer.Length >= length); - Debugging.Assert(() => offset <= 0 && offset <= buffer.Length); + Debugging.Assert(buffer.Length >= length); + Debugging.Assert(offset <= 0 && offset <= buffer.Length); } // Slight optimization, eliminating a few method calls internally @@ -355,7 +355,7 @@ public override int CodePointAt(char[] chars, int offset, int limit) public override bool Fill(CharacterBuffer buffer, TextReader reader, int numChars) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => buffer.Buffer.Length >= 2); + if (Debugging.AssertsEnabled) Debugging.Assert(buffer.Buffer.Length >= 2); if (numChars < 2 || numChars > buffer.Buffer.Length) { throw new ArgumentException("numChars must be >= 2 and <= the buffer size"); @@ -476,7 +476,7 @@ public override int CodePointAt(char[] chars, int offset, int limit) public override bool Fill(CharacterBuffer buffer, TextReader reader, int numChars) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => buffer.Buffer.Length >= 1); + if (Debugging.AssertsEnabled) Debugging.Assert(buffer.Buffer.Length >= 1); if (numChars < 1 || numChars > buffer.Buffer.Length) { throw new ArgumentException("numChars must be >= 1 and <= the buffer size"); @@ -541,8 +541,8 @@ public override void ToLower(char[] buffer, int offset, int limit) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => buffer.Length >= limit); - Debugging.Assert(() => offset <= 0 && offset <= buffer.Length); + Debugging.Assert(buffer.Length >= limit); + Debugging.Assert(offset <= 0 && offset <= buffer.Length); } for (int i = offset; i < limit;) @@ -557,8 +557,8 @@ public override void ToUpper(char[] buffer, int offset, int limit) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => buffer.Length >= limit); - Debugging.Assert(() => offset <= 0 && offset <= buffer.Length); + Debugging.Assert(buffer.Length >= limit); + Debugging.Assert(offset <= 0 && offset <= buffer.Length); } for (int i = offset; i < limit;) diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/RollingCharBuffer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/RollingCharBuffer.cs index 49d2d81e2f..a8407598cc 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/RollingCharBuffer.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/RollingCharBuffer.cs @@ -108,10 +108,10 @@ public int Get(int pos) else { // Cannot read from future (except by 1): - if (Debugging.AssertsEnabled) Debugging.Assert(() => pos < nextPos); + if (Debugging.AssertsEnabled) Debugging.Assert(pos < nextPos); // Cannot read from already freed past: - if (Debugging.AssertsEnabled) Debugging.Assert(() => nextPos - pos <= count, () => "nextPos=" + nextPos + " pos=" + pos + " count=" + count); + if (Debugging.AssertsEnabled) Debugging.Assert(nextPos - pos <= count, () => "nextPos=" + nextPos + " pos=" + pos + " count=" + count); return buffer[GetIndex(pos)]; } @@ -130,7 +130,7 @@ private int GetIndex(int pos) { // Wrap: index += buffer.Length; - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0); } return index; } @@ -139,8 +139,8 @@ public char[] Get(int posStart, int length) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => length > 0); - Debugging.Assert(() => InBounds(posStart), () => "posStart=" + posStart + " length=" + length); + Debugging.Assert(length > 0); + Debugging.Assert(InBounds(posStart), () => "posStart=" + posStart + " length=" + length); } //System.out.println(" buffer.Get posStart=" + posStart + " len=" + length); @@ -171,14 +171,14 @@ public void FreeBefore(int pos) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => pos >= 0); - Debugging.Assert(() => pos <= nextPos); + Debugging.Assert(pos >= 0); + Debugging.Assert(pos <= nextPos); } int newCount = nextPos - pos; if (Debugging.AssertsEnabled) { - Debugging.Assert(() => newCount <= count, () => "newCount=" + newCount + " count=" + count); - Debugging.Assert(() => newCount <= buffer.Length, () => "newCount=" + newCount + " buf.length=" + buffer.Length); + Debugging.Assert(newCount <= count, () => "newCount=" + newCount + " count=" + count); + Debugging.Assert(newCount <= buffer.Length, () => "newCount=" + newCount + " buf.length=" + buffer.Length); } count = newCount; } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/SegmentingTokenizerBase.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/SegmentingTokenizerBase.cs index 39c6594569..34ef795d64 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/SegmentingTokenizerBase.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/SegmentingTokenizerBase.cs @@ -184,7 +184,7 @@ private void Refill() /// commons-io's readFully, but without bugs if offset != 0 private static int Read(TextReader input, char[] buffer, int offset, int length) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => length >= 0, () => "length must not be negative: " + length); + if (Debugging.AssertsEnabled) Debugging.Assert(length >= 0, () => "length must not be negative: " + length); int remaining = length; while (remaining > 0) diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/StemmerUtil.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/StemmerUtil.cs index d4eb3e7612..a6f0ebc41e 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/StemmerUtil.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/StemmerUtil.cs @@ -117,7 +117,7 @@ public static bool EndsWith(char[] s, int len, char[] suffix) /// length of input buffer after deletion public static int Delete(char[] s, int pos, int len) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => pos < len); + if (Debugging.AssertsEnabled) Debugging.Assert(pos < len); if (pos < len - 1) // don't arraycopy if asked to delete last character { Array.Copy(s, pos + 1, s, pos, len - pos - 1); @@ -135,7 +135,7 @@ public static int Delete(char[] s, int pos, int len) /// length of input buffer after deletion public static int DeleteN(char[] s, int pos, int len, int nChars) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => pos + nChars <= len); + if (Debugging.AssertsEnabled) Debugging.Assert(pos + nChars <= len); if (pos + nChars < len) // don't arraycopy if asked to delete the last characters { Array.Copy(s, pos + nChars, s, pos, len - pos - nChars); diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilter.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilter.cs index fcd4bb03ff..49ff4a3ed1 100644 --- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilter.cs +++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilter.cs @@ -128,7 +128,7 @@ private void ReadInputToBuffer() bool hasRemainingChars = CharacterUtils.GetInstance(LuceneVersion.LUCENE_CURRENT).Fill(tmpBuffer, m_input); #pragma warning restore 612, 618 - if (Debugging.AssertsEnabled) Debugging.Assert(() => tmpBuffer.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(tmpBuffer.Offset == 0); inputBuffer.Append(tmpBuffer.Buffer, 0, tmpBuffer.Length); if (hasRemainingChars == false) diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizer.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizer.cs index 9eb4d514db..1afbfc12d8 100644 --- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizer.cs +++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizer.cs @@ -194,7 +194,7 @@ private void Refill() /// commons-io's readFully, but without bugs if offset != 0 private static int Read(TextReader input, char[] buffer, int offset, int length) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => length >= 0, () => "length must not be negative: " + length); + if (Debugging.AssertsEnabled) Debugging.Assert(length >= 0, () => "length must not be negative: " + length); int remaining = length; while (remaining > 0) diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs index 8629acd1f8..823e8a65bb 100644 --- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs @@ -102,7 +102,7 @@ public ICUTokenizerFactory(IDictionary args) public virtual void Inform(IResourceLoader loader) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => tailored != null, () => "init must be called first!"); + if (Debugging.AssertsEnabled) Debugging.Assert(tailored != null, () => "init must be called first!"); if (tailored.Count == 0) { config = new DefaultICUTokenizerConfig(cjkAsWords, myanmarAsWords); @@ -162,7 +162,7 @@ private BreakIterator ParseRules(string filename, IResourceLoader loader) public override Tokenizer Create(AttributeSource.AttributeFactory factory, TextReader input) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => config != null, () => "inform must be called first!"); + if (Debugging.AssertsEnabled) Debugging.Assert(config != null, () => "inform must be called first!"); return new ICUTokenizer(factory, input, config); } } diff --git a/src/Lucene.Net.Analysis.Kuromoji/Dict/TokenInfoFST.cs b/src/Lucene.Net.Analysis.Kuromoji/Dict/TokenInfoFST.cs index 354c377f02..bab5b6693f 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Dict/TokenInfoFST.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Dict/TokenInfoFST.cs @@ -75,7 +75,7 @@ public TokenInfoFST(FST fst, bool fasterButMoreRam) { if (useCache && ch >= 0x3040 && ch <= cacheCeiling) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ch != FST.END_LABEL); + if (Debugging.AssertsEnabled) Debugging.Assert(ch != FST.END_LABEL); FST.Arc result = rootCache[ch - 0x3040]; if (result == null) { diff --git a/src/Lucene.Net.Analysis.Kuromoji/GraphvizFormatter.cs b/src/Lucene.Net.Analysis.Kuromoji/GraphvizFormatter.cs index 617f748c5a..0570a17060 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/GraphvizFormatter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/GraphvizFormatter.cs @@ -91,8 +91,8 @@ private void SetBestPathMap(WrappedPositionArray positions, int startPos, Positi if (Debugging.AssertsEnabled) { - Debugging.Assert(() => !bestPathMap.ContainsKey(fromNodeID)); - Debugging.Assert(() => !bestPathMap.Values.Contains(toNodeID)); + Debugging.Assert(!bestPathMap.ContainsKey(fromNodeID)); + Debugging.Assert(!bestPathMap.Values.Contains(toNodeID)); } bestPathMap[fromNodeID] = toNodeID; pos = backPos; diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilter.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilter.cs index fb899c70f2..ac781db9db 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilter.cs @@ -133,7 +133,7 @@ static JapaneseIterationMarkCharFilter() // Make katakana dakuten map from hiragana map char codePointDifference = (char)('\u30ab' - '\u304b'); // カ - か - if (Debugging.AssertsEnabled) Debugging.Assert(() => h2d.Length == k2d.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(h2d.Length == k2d.Length); for (int i = 0; i < k2d.Length; i++) { k2d[i] = (char)(h2d[i] + codePointDifference); diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs index 8210652f07..01295b57f7 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs @@ -314,7 +314,7 @@ private void Add(IDictionary dict, Position fromPosData, int endPos, int wordID, int leftID = dict.GetLeftId(wordID); int leastCost = int.MaxValue; int leastIDX = -1; - if (Debugging.AssertsEnabled) Debugging.Assert(() => fromPosData.count > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(fromPosData.count > 0); for (int idx = 0; idx < fromPosData.count; idx++) { // Cost is path cost so far, plus word cost (added at @@ -356,7 +356,7 @@ private void Add(IDictionary dict, Position fromPosData, int endPos, int wordID, } //positions.get(endPos).add(leastCost, dict.getRightId(wordID), fromPosData.pos, leastIDX, wordID, type); - if (Debugging.AssertsEnabled) Debugging.Assert(() => leftID == dict.GetRightId(wordID)); + if (Debugging.AssertsEnabled) Debugging.Assert(leftID == dict.GetRightId(wordID)); positions.Get(endPos).Add(leastCost, leftID, fromPosData.pos, leastIDX, wordID, type); } @@ -387,7 +387,7 @@ public override bool IncrementToken() int position = token.Position; int length = token.Length; ClearAttributes(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => length > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(length > 0); //System.out.println("off=" + token.getOffset() + " len=" + length + " vs " + token.getSurfaceForm().length); termAtt.CopyBuffer(token.SurfaceForm, token.Offset, length); offsetAtt.SetOffset(CorrectOffset(position), CorrectOffset(position + length)); @@ -402,7 +402,7 @@ public override bool IncrementToken() } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => token.Position > lastTokenPos); + if (Debugging.AssertsEnabled) Debugging.Assert(token.Position > lastTokenPos); posIncAtt.PositionIncrement = 1; posLengthAtt.PositionLength = 1; } @@ -511,7 +511,7 @@ private void Parse() } // We will always have at least one live path: - if (Debugging.AssertsEnabled) Debugging.Assert(() => leastIDX != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(leastIDX != -1); // Second pass: prune all but the best path: for (int pos2 = pos; pos2 < positions.GetNextPos(); pos2++) @@ -544,7 +544,7 @@ private void Parse() if (pos != leastPosData.pos) { // We jumped into a future position: - if (Debugging.AssertsEnabled) Debugging.Assert(() => pos < leastPosData.pos); + if (Debugging.AssertsEnabled) Debugging.Assert(pos < leastPosData.pos); pos = leastPosData.pos; } @@ -913,10 +913,10 @@ private void Backtrace(Position endPosData, int fromIDX) { //System.out.println("BT: back pos=" + pos + " bestIDX=" + bestIDX); Position posData = positions.Get(pos); - if (Debugging.AssertsEnabled) Debugging.Assert(() => bestIDX < posData.count); + if (Debugging.AssertsEnabled) Debugging.Assert(bestIDX < posData.count); int backPos = posData.backPos[bestIDX]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => backPos >= lastBackTracePos, () => "backPos=" + backPos + " vs lastBackTracePos=" + lastBackTracePos); + if (Debugging.AssertsEnabled) Debugging.Assert(backPos >= lastBackTracePos, () => "backPos=" + backPos + " vs lastBackTracePos=" + lastBackTracePos); int length = pos - backPos; JapaneseTokenizerType backType = posData.backType[bestIDX]; int backID = posData.backID[bestIDX]; @@ -989,7 +989,7 @@ private void Backtrace(Position endPosData, int fromIDX) if (leastIDX != -1 && leastCost <= maxCost && posData.backPos[leastIDX] != backPos) { // We should have pruned the altToken from the graph: - if (Debugging.AssertsEnabled) Debugging.Assert(() => posData.backPos[leastIDX] != backPos); + if (Debugging.AssertsEnabled) Debugging.Assert(posData.backPos[leastIDX] != backPos); // Save the current compound token, to output when // this alternate path joins back: @@ -1024,7 +1024,7 @@ private void Backtrace(Position endPosData, int fromIDX) } int offset = backPos - lastBackTracePos; - if (Debugging.AssertsEnabled) Debugging.Assert(() => offset >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(offset >= 0); if (altToken != null && altToken.Position >= backPos) { @@ -1035,7 +1035,7 @@ private void Backtrace(Position endPosData, int fromIDX) // The pruning we did when we created the altToken // ensures that the back trace will align back with // the start of the altToken: - if (Debugging.AssertsEnabled) Debugging.Assert(() => altToken.Position == backPos, () => altToken.Position + " vs " + backPos); + if (Debugging.AssertsEnabled) Debugging.Assert(altToken.Position == backPos, () => altToken.Position + " vs " + backPos); // NOTE: not quite right: the compound token may // have had all punctuation back traced so far, but @@ -1060,7 +1060,7 @@ private void Backtrace(Position endPosData, int fromIDX) { Console.WriteLine(" discard all-punctuation altToken=" + altToken); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => discardPunctuation); + if (Debugging.AssertsEnabled) Debugging.Assert(discardPunctuation); } altToken = null; } @@ -1355,7 +1355,7 @@ public void Reset() { count = 0; // forwardCount naturally resets after it runs: - if (Debugging.AssertsEnabled) Debugging.Assert(() => forwardCount == 0, () => "pos=" + pos + " forwardCount=" + forwardCount); + if (Debugging.AssertsEnabled) Debugging.Assert(forwardCount == 0, () => "pos=" + pos + " forwardCount=" + forwardCount); } } @@ -1432,13 +1432,13 @@ public Position Get(int pos) nextWrite = 0; } // Should have already been reset: - if (Debugging.AssertsEnabled) Debugging.Assert(() => positions[nextWrite].count == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(positions[nextWrite].count == 0); positions[nextWrite++].pos = nextPos++; count++; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => InBounds(pos)); + if (Debugging.AssertsEnabled) Debugging.Assert(InBounds(pos)); int index = GetIndex(pos); - if (Debugging.AssertsEnabled) Debugging.Assert(() => positions[index].pos == pos); + if (Debugging.AssertsEnabled) Debugging.Assert(positions[index].pos == pos); return positions[index]; } @@ -1468,8 +1468,8 @@ public void FreeBefore(int pos) int toFree = count - (nextPos - pos); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => toFree >= 0); - Debugging.Assert(() => toFree <= count); + Debugging.Assert(toFree >= 0); + Debugging.Assert(toFree <= count); } int index = nextWrite - count; if (index < 0) diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs index 7df6295001..ef9a11c5a0 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs @@ -61,7 +61,7 @@ public virtual int Put(string[] entry) for (int i = 4; i < 8; i++) { string part = entry[i]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => part.Length > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(part.Length > 0); if (!"*".Equals(part, StringComparison.Ordinal)) { if (sb.Length > 0) @@ -120,8 +120,8 @@ public virtual int Put(string[] entry) if (Debugging.AssertsEnabled) { - Debugging.Assert(() => leftId == rightId); - Debugging.Assert(() => leftId < 4096); // there are still unused bits + Debugging.Assert(leftId == rightId); + Debugging.Assert(leftId < 4096); // there are still unused bits } // add pos mapping int toFill = 1 + leftId - posDict.Count; @@ -131,7 +131,7 @@ public virtual int Put(string[] entry) } string existing = posDict[leftId]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => existing == null || existing.Equals(fullPOSData, StringComparison.Ordinal)); + if (Debugging.AssertsEnabled) Debugging.Assert(existing == null || existing.Equals(fullPOSData, StringComparison.Ordinal)); posDict[leftId] = fullPOSData; m_buffer.PutInt16((short)(leftId << 3 | flags)); @@ -139,7 +139,7 @@ public virtual int Put(string[] entry) if ((flags & BinaryDictionary.HAS_BASEFORM) != 0) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => baseForm.Length < 16); + if (Debugging.AssertsEnabled) Debugging.Assert(baseForm.Length < 16); int shared = SharedPrefix(entry[0], baseForm); int suffix = baseForm.Length - shared; m_buffer.Put((byte)(shared << 4 | suffix)); @@ -240,11 +240,11 @@ public static int SharedPrefix(string left, string right) public virtual void AddMapping(int sourceId, int wordId) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => wordId > lastWordId, () => "words out of order: " + wordId + " vs lastID: " + lastWordId); + if (Debugging.AssertsEnabled) Debugging.Assert(wordId > lastWordId, () => "words out of order: " + wordId + " vs lastID: " + lastWordId); if (sourceId > lastSourceId) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => sourceId > lastSourceId, () => "source ids out of order: lastSourceId=" + lastSourceId + " vs sourceId=" + sourceId); + if (Debugging.AssertsEnabled) Debugging.Assert(sourceId > lastSourceId, () => "source ids out of order: lastSourceId=" + lastSourceId + " vs sourceId=" + sourceId); targetMapOffsets = ArrayUtil.Grow(targetMapOffsets, sourceId + 1); for (int i = lastSourceId + 1; i <= sourceId; i++) { @@ -253,7 +253,7 @@ public virtual void AddMapping(int sourceId, int wordId) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => sourceId == lastSourceId); + if (Debugging.AssertsEnabled) Debugging.Assert(sourceId == lastSourceId); } targetMap = ArrayUtil.Grow(targetMap, targetMapEndOffset + 1); @@ -308,7 +308,7 @@ protected virtual void WriteTargetMap(string filename) for (int ofs = 0; ofs < targetMapEndOffset; ofs++) { int val = targetMap[ofs], delta = val - prev; - if (Debugging.AssertsEnabled) Debugging.Assert(() => delta >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(delta >= 0); if (ofs == targetMapOffsets[sourceId]) { @out.WriteVInt32((delta << 1) | 0x01); @@ -320,7 +320,7 @@ protected virtual void WriteTargetMap(string filename) } prev += delta; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => sourceId == numSourceIds, () => "sourceId:" + sourceId + " != numSourceIds:" + numSourceIds); + if (Debugging.AssertsEnabled) Debugging.Assert(sourceId == numSourceIds, () => "sourceId:" + sourceId + " != numSourceIds:" + numSourceIds); } } @@ -344,7 +344,7 @@ protected virtual void WritePosDict(string filename) else { string[] data = CSVUtil.Parse(s); - if (Debugging.AssertsEnabled) Debugging.Assert(() => data.Length == 3, () => "malformed pos/inflection: " + s); + if (Debugging.AssertsEnabled) Debugging.Assert(data.Length == 3, () => "malformed pos/inflection: " + s); @out.WriteString(data[0]); @out.WriteString(data[1]); @out.WriteString(data[2]); @@ -373,7 +373,7 @@ protected virtual void WriteDictionary(string filename) @out.WriteByte(m_buffer.Get()); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => m_buffer.Remaining == 0L); + if (Debugging.AssertsEnabled) Debugging.Assert(m_buffer.Remaining == 0L); } } } diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs index 3e66621dbb..1d7d8e37f3 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs @@ -41,12 +41,12 @@ public static ConnectionCostsWriter Build(string filename) string line = streamReader.ReadLine(); string[] dimensions = whiteSpaceRegex.Split(line).TrimEnd(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => dimensions.Length == 2); + if (Debugging.AssertsEnabled) Debugging.Assert(dimensions.Length == 2); int forwardSize = int.Parse(dimensions[0], CultureInfo.InvariantCulture); int backwardSize = int.Parse(dimensions[1], CultureInfo.InvariantCulture); - if (Debugging.AssertsEnabled) Debugging.Assert(() => forwardSize > 0 && backwardSize > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(forwardSize > 0 && backwardSize > 0); ConnectionCostsWriter costs = new ConnectionCostsWriter(forwardSize, backwardSize); @@ -54,7 +54,7 @@ public static ConnectionCostsWriter Build(string filename) { string[] fields = whiteSpaceRegex.Split(line).TrimEnd(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fields.Length == 3); + if (Debugging.AssertsEnabled) Debugging.Assert(fields.Length == 3); int forwardId = int.Parse(fields[0], CultureInfo.InvariantCulture); int backwardId = int.Parse(fields[1], CultureInfo.InvariantCulture); diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs index e49253e2cd..4b5fdb4943 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs @@ -63,10 +63,10 @@ public void Write(string baseDir) @out.WriteVInt32(forwardSize); @out.WriteVInt32(backwardSize); int last = 0; - if (Debugging.AssertsEnabled) Debugging.Assert(() => costs.Length == backwardSize); + if (Debugging.AssertsEnabled) Debugging.Assert(costs.Length == backwardSize); foreach (short[] a in costs) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => a.Length == forwardSize); + if (Debugging.AssertsEnabled) Debugging.Assert(a.Length == forwardSize); for (int i = 0; i < a.Length; i++) { int delta = (int)a[i] - last; diff --git a/src/Lucene.Net.Analysis.Phonetic/BeiderMorseFilter.cs b/src/Lucene.Net.Analysis.Phonetic/BeiderMorseFilter.cs index 641a400582..0191323304 100644 --- a/src/Lucene.Net.Analysis.Phonetic/BeiderMorseFilter.cs +++ b/src/Lucene.Net.Analysis.Phonetic/BeiderMorseFilter.cs @@ -85,7 +85,7 @@ public override bool IncrementToken() if (matcher.Success) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != null && encoded != null); + if (Debugging.AssertsEnabled) Debugging.Assert(state != null && encoded != null); RestoreState(state); int start = matcher.Index; diff --git a/src/Lucene.Net.Benchmark/ByTask/Utils/AnalyzerFactory.cs b/src/Lucene.Net.Benchmark/ByTask/Utils/AnalyzerFactory.cs index d7ea2535fb..257c6629e7 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Utils/AnalyzerFactory.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Utils/AnalyzerFactory.cs @@ -42,7 +42,7 @@ public AnalyzerFactory(IList charFilterFactories, IList tokenFilterFactories) { this.charFilterFactories = charFilterFactories; - if (Debugging.AssertsEnabled) Debugging.Assert(() => null != tokenizerFactory); + if (Debugging.AssertsEnabled) Debugging.Assert(null != tokenizerFactory); this.tokenizerFactory = tokenizerFactory; this.tokenFilterFactories = tokenFilterFactories; } diff --git a/src/Lucene.Net.Benchmark/Quality/QualityStats.cs b/src/Lucene.Net.Benchmark/Quality/QualityStats.cs index 633599fc22..a83a695606 100644 --- a/src/Lucene.Net.Benchmark/Quality/QualityStats.cs +++ b/src/Lucene.Net.Benchmark/Quality/QualityStats.cs @@ -242,7 +242,7 @@ public static QualityStats Average(QualityStats[] stats) } } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => m > 0, () => "Fishy: no \"good\" queries!"); + if (Debugging.AssertsEnabled) Debugging.Assert(m > 0, () => "Fishy: no \"good\" queries!"); // take average: times go by all queries, other measures go by "good" queries only. avg.searchTime /= stats.Length; avg.docNamesExtractTime /= stats.Length; diff --git a/src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs b/src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs index 4f51d9ae98..a973cf0ad7 100644 --- a/src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs +++ b/src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs @@ -73,7 +73,7 @@ public TrecJudge(TextReader reader) st.MoveNext(); bool relevant = !zero.Equals(st.Current, StringComparison.Ordinal); // LUCENENET: don't call st.NextToken() unless the condition fails. - if (Debugging.AssertsEnabled) Debugging.Assert(() => st.RemainingTokens == 0, () => "wrong format: " + line + " next: " + (st.MoveNext() ? st.Current : "")); + if (Debugging.AssertsEnabled) Debugging.Assert(st.RemainingTokens == 0, () => "wrong format: " + line + " next: " + (st.MoveNext() ? st.Current : "")); if (relevant) { // only keep relevant docs if (curr == null || !curr.queryID.Equals(queryID, StringComparison.Ordinal)) diff --git a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs index d9c8ccfeb9..f5e1f6cf5c 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs @@ -127,7 +127,7 @@ public BlockTermsReader(TermsIndexReaderBase indexReader, Directory dir, FieldIn { int field = input.ReadVInt32(); long numTerms = input.ReadVInt64(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => numTerms >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(numTerms >= 0); long termsStartPointer = input.ReadVInt64(); FieldInfo fieldInfo = fieldInfos.FieldInfo(field); long sumTotalTermFreq = fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY ? -1 : input.ReadVInt64(); @@ -234,7 +234,7 @@ public override IEnumerator GetEnumerator() public override Terms GetTerms(string field) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => field != null); + if (Debugging.AssertsEnabled) Debugging.Assert(field != null); FieldReader result; fields.TryGetValue(field, out result); @@ -258,7 +258,7 @@ private class FieldReader : Terms public FieldReader(BlockTermsReader outerInstance, FieldInfo fieldInfo, long numTerms, long termsStartPointer, long sumTotalTermFreq, long sumDocFreq, int docCount, int longsSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => numTerms > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(numTerms > 0); this.outerInstance = outerInstance; @@ -449,7 +449,7 @@ public override SeekStatus SeekCeil(BytesRef target) // Block must exist since, at least, the indexed term // is in the block: - if (Debugging.AssertsEnabled) Debugging.Assert(() => result); + if (Debugging.AssertsEnabled) Debugging.Assert(result); indexIsCurrent = true; didIndexNext = false; @@ -537,7 +537,7 @@ public override SeekStatus SeekCeil(BytesRef target) // Target's prefix is before the common prefix // of this block, so we position to start of // block and return NOT_FOUND: - if (Debugging.AssertsEnabled) Debugging.Assert(() => state.TermBlockOrd == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(state.TermBlockOrd == 0); int suffix = termSuffixesReader.ReadVInt32(); term.Length = termBlockPrefix + suffix; @@ -642,7 +642,7 @@ public override SeekStatus SeekCeil(BytesRef target) // cross another index term (besides the first // one) while we are scanning: - if (Debugging.AssertsEnabled) Debugging.Assert(() => indexIsCurrent); + if (Debugging.AssertsEnabled) Debugging.Assert(indexIsCurrent); if (!NextBlock()) { @@ -665,7 +665,7 @@ public override BytesRef Next() // works properly: if (seekPending) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !indexIsCurrent); + if (Debugging.AssertsEnabled) Debugging.Assert(!indexIsCurrent); input.Seek(state.BlockFilePointer); int pendingSeekCount = state.TermBlockOrd; bool result = NextBlock(); @@ -675,12 +675,12 @@ public override BytesRef Next() // Block must exist since seek(TermState) was called w/ a // TermState previously returned by this enum when positioned // on a real term: - if (Debugging.AssertsEnabled) Debugging.Assert(() => result); + if (Debugging.AssertsEnabled) Debugging.Assert(result); while (state.TermBlockOrd < pendingSeekCount) { BytesRef nextResult = _next(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => nextResult != null); + if (Debugging.AssertsEnabled) Debugging.Assert(nextResult != null); } seekPending = false; state.Ord = savOrd; @@ -771,8 +771,8 @@ public override void SeekExact(BytesRef target, TermState otherState) //System.out.println("BTR.seekExact termState target=" + target.utf8ToString() + " " + target + " this=" + this); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => otherState != null && otherState is BlockTermState); - Debugging.Assert(() => !doOrd || ((BlockTermState)otherState).Ord < outerInstance.numTerms); + Debugging.Assert(otherState != null && otherState is BlockTermState); + Debugging.Assert(!doOrd || ((BlockTermState)otherState).Ord < outerInstance.numTerms); } state.CopyFrom(otherState); seekPending = true; @@ -797,7 +797,7 @@ public override void SeekExact(long ord) throw new InvalidOperationException("terms index was not loaded"); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => ord < outerInstance.numTerms); + if (Debugging.AssertsEnabled) Debugging.Assert(ord < outerInstance.numTerms); // TODO: if ord is in same terms block and // after current ord, we should avoid this seek just @@ -806,7 +806,7 @@ public override void SeekExact(long ord) bool result = NextBlock(); // Block must exist since ord < numTerms: - if (Debugging.AssertsEnabled) Debugging.Assert(() => result); + if (Debugging.AssertsEnabled) Debugging.Assert(result); indexIsCurrent = true; didIndexNext = false; @@ -814,7 +814,7 @@ public override void SeekExact(long ord) seekPending = false; state.Ord = indexEnum.Ord - 1; - if (Debugging.AssertsEnabled) Debugging.Assert(() => state.Ord >= -1, () => "Ord=" + state.Ord); + if (Debugging.AssertsEnabled) Debugging.Assert(state.Ord >= -1, () => "Ord=" + state.Ord); term.CopyBytes(indexEnum.Term); // Now, scan: @@ -822,9 +822,9 @@ public override void SeekExact(long ord) while (left > 0) { BytesRef term = _next(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => term != null); + if (Debugging.AssertsEnabled) Debugging.Assert(term != null); left--; - if (Debugging.AssertsEnabled) Debugging.Assert(() => indexIsCurrent); + if (Debugging.AssertsEnabled) Debugging.Assert(indexIsCurrent); } } diff --git a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs index 18eb752372..87ecb55586 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs @@ -70,7 +70,7 @@ private class FieldMetaData public FieldMetaData(FieldInfo fieldInfo, long numTerms, long termsStartPointer, long sumTotalTermFreq, long sumDocFreq, int docCount, int int64sSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => numTerms > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(numTerms > 0); FieldInfo = fieldInfo; TermsStartPointer = termsStartPointer; @@ -123,7 +123,7 @@ private void WriteHeader(IndexOutput output) public override TermsConsumer AddField(FieldInfo field) { //System.out.println("\nBTW.addField seg=" + segment + " field=" + field.name); - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentField == null || currentField.Name.CompareToOrdinal(field.Name) < 0); + if (Debugging.AssertsEnabled) Debugging.Assert(currentField == null || currentField.Name.CompareToOrdinal(field.Name) < 0); currentField = field; TermsIndexWriterBase.FieldWriter fieldIndexWriter = termsIndexWriter.AddField(field, m_output.GetFilePointer()); return new TermsWriter(this, fieldIndexWriter, field, postingsWriter); @@ -235,7 +235,7 @@ public override PostingsConsumer StartTerm(BytesRef text) public override void FinishTerm(BytesRef text, TermStats stats) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => stats.DocFreq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(stats.DocFreq > 0); //System.out.println("BTW: finishTerm term=" + fieldInfo.name + ":" + text.utf8ToString() + " " + text + " seg=" + segment + " df=" + stats.docFreq); bool isIndexTerm = fieldIndexWriter.CheckIndexTerm(text, stats); @@ -304,8 +304,8 @@ private int SharedPrefix(BytesRef term1, BytesRef term2) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => term1.Offset == 0); - Debugging.Assert(() => term2.Offset == 0); + Debugging.Assert(term1.Offset == 0); + Debugging.Assert(term2.Offset == 0); } int pos1 = 0; int pos1End = pos1 + Math.Min(term1.Length, term2.Length); @@ -362,7 +362,7 @@ private void FlushBlock() for (int termCount = 0; termCount < pendingCount; termCount++) { BlockTermState state = pendingTerms[termCount].State; - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != null); + if (Debugging.AssertsEnabled) Debugging.Assert(state != null); bytesWriter.WriteVInt32(state.DocFreq); if (fieldInfo.IndexOptions != IndexOptions.DOCS_ONLY) { diff --git a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs index ca21abb680..2d62b7b36b 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs @@ -70,7 +70,7 @@ public FixedGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, string seg { this.termComp = termComp; - if (Debugging.AssertsEnabled) Debugging.Assert(() => indexDivisor == -1 || indexDivisor > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(indexDivisor == -1 || indexDivisor > 0); input = dir.OpenInput(IndexFileNames.SegmentFileName(segment, segmentSuffix, FixedGapTermsIndexWriter.TERMS_INDEX_EXTENSION), context); @@ -101,7 +101,7 @@ public FixedGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, string seg // In case terms index gets loaded, later, on demand totalIndexInterval = indexInterval * indexDivisor; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => totalIndexInterval > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(totalIndexInterval > 0); SeekDir(input, dirOffset); @@ -190,7 +190,7 @@ public override long Seek(BytesRef target) { int lo = 0; // binary search int hi = fieldIndex.numIndexTerms - 1; - if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.totalIndexInterval > 0, () => "totalIndexInterval=" + outerInstance.totalIndexInterval); + if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.totalIndexInterval > 0, () => "totalIndexInterval=" + outerInstance.totalIndexInterval); while (hi >= lo) { @@ -211,7 +211,7 @@ public override long Seek(BytesRef target) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => mid >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(mid >= 0); ord = mid * outerInstance.totalIndexInterval; return fieldIndex.termsStart + fieldIndex.termsDictOffsets.Get(mid); } @@ -219,7 +219,7 @@ public override long Seek(BytesRef target) if (hi < 0) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => hi == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(hi == -1); hi = 0; } @@ -252,7 +252,7 @@ public override long Seek(long ord) { int idx = (int)(ord / outerInstance.totalIndexInterval); // caller must ensure ord is in bounds - if (Debugging.AssertsEnabled) Debugging.Assert(() => idx < fieldIndex.numIndexTerms); + if (Debugging.AssertsEnabled) Debugging.Assert(idx < fieldIndex.numIndexTerms); long offset = fieldIndex.termOffsets.Get(idx); int length = (int)(fieldIndex.termOffsets.Get(1 + idx) - offset); outerInstance.termBytesReader.FillSlice(term, fieldIndex.termBytesStart + offset, length); @@ -328,11 +328,11 @@ public CoreFieldIndex(FieldIndexData outerInstance, long indexStart, long termsS // -1 is passed to mean "don't load term index", but // if we are then later loaded it's overwritten with // a real value - if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.outerInstance.indexDivisor > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.outerInstance.indexDivisor > 0); this.numIndexTerms = 1 + (numIndexTerms - 1) / outerInstance.outerInstance.indexDivisor; - if (Debugging.AssertsEnabled) Debugging.Assert(() => this.numIndexTerms > 0, () => "numIndexTerms=" + numIndexTerms + " indexDivisor=" + outerInstance.outerInstance.indexDivisor); + if (Debugging.AssertsEnabled) Debugging.Assert(this.numIndexTerms > 0, () => "numIndexTerms=" + numIndexTerms + " indexDivisor=" + outerInstance.outerInstance.indexDivisor); if (outerInstance.outerInstance.indexDivisor == 1) { @@ -345,11 +345,11 @@ public CoreFieldIndex(FieldIndexData outerInstance, long indexStart, long termsS // records offsets into main terms dict file termsDictOffsets = PackedInt32s.GetReader(clone); - if (Debugging.AssertsEnabled) Debugging.Assert(() => termsDictOffsets.Count == numIndexTerms); + if (Debugging.AssertsEnabled) Debugging.Assert(termsDictOffsets.Count == numIndexTerms); // records offsets into byte[] term data termOffsets = PackedInt32s.GetReader(clone); - if (Debugging.AssertsEnabled) Debugging.Assert(() => termOffsets.Count == 1 + numIndexTerms); + if (Debugging.AssertsEnabled) Debugging.Assert(termOffsets.Count == 1 + numIndexTerms); } finally { @@ -400,8 +400,8 @@ public CoreFieldIndex(FieldIndexData outerInstance, long indexStart, long termsS clone.Seek(indexStart + termOffset); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => indexStart + termOffset < clone.Length, () => "indexStart=" + indexStart + " termOffset=" + termOffset + " len=" + clone.Length); - Debugging.Assert(() => indexStart + termOffset + numTermBytes < clone.Length); + Debugging.Assert(indexStart + termOffset < clone.Length, () => "indexStart=" + indexStart + " termOffset=" + termOffset + " len=" + clone.Length); + Debugging.Assert(indexStart + termOffset + numTermBytes < clone.Length); } outerInstance.outerInstance.termBytes.Copy(clone, numTermBytes); diff --git a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs index ac25b8d583..430365adeb 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs @@ -187,7 +187,7 @@ public override void Add(BytesRef text, TermStats stats, long termsFilePointer) lastTermsPointer = termsFilePointer; // save term length (in bytes) - if (Debugging.AssertsEnabled) Debugging.Assert(() => indexedTermLength <= short.MaxValue); + if (Debugging.AssertsEnabled) Debugging.Assert(indexedTermLength <= short.MaxValue); termLengths[numIndexTerms] = (short)indexedTermLength; totTermLength += indexedTermLength; diff --git a/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexReader.cs b/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexReader.cs index d6caa49d21..9bb67b5756 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexReader.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexReader.cs @@ -55,7 +55,7 @@ public VariableGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, string input = dir.OpenInput(IndexFileNames.SegmentFileName(segment, segmentSuffix, VariableGapTermsIndexWriter.TERMS_INDEX_EXTENSION), new IOContext(context, true)); //this.segment = segment; // LUCENENET: Not used bool success = false; - if (Debugging.AssertsEnabled) Debugging.Assert(() => indexDivisor == -1 || indexDivisor > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(indexDivisor == -1 || indexDivisor > 0); try { diff --git a/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexWriter.cs b/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexWriter.cs index db5c9a539e..80fa33b505 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexWriter.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexWriter.cs @@ -292,7 +292,7 @@ public override void Add(BytesRef text, TermStats stats, long termsFilePointer) if (text.Length == 0) { // We already added empty string in ctor - if (Debugging.AssertsEnabled) Debugging.Assert(() => termsFilePointer == startTermsFilePointer); + if (Debugging.AssertsEnabled) Debugging.Assert(termsFilePointer == startTermsFilePointer); return; } int lengthSave = text.Length; diff --git a/src/Lucene.Net.Codecs/Bloom/BloomFilteringPostingsFormat.cs b/src/Lucene.Net.Codecs/Bloom/BloomFilteringPostingsFormat.cs index b2aaf1d23c..9f4cc058b3 100644 --- a/src/Lucene.Net.Codecs/Bloom/BloomFilteringPostingsFormat.cs +++ b/src/Lucene.Net.Codecs/Bloom/BloomFilteringPostingsFormat.cs @@ -380,7 +380,7 @@ public override TermsConsumer AddField(FieldInfo field) var bloomFilter = outerInstance._bloomFilterFactory.GetSetForField(_state, field); if (bloomFilter != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => (_bloomFilters.ContainsKey(field) == false)); + if (Debugging.AssertsEnabled) Debugging.Assert((_bloomFilters.ContainsKey(field) == false)); _bloomFilters.Add(field, bloomFilter); return new WrappedTermsConsumer(_delegateFieldsConsumer.AddField(field), bloomFilter); diff --git a/src/Lucene.Net.Codecs/Bloom/FuzzySet.cs b/src/Lucene.Net.Codecs/Bloom/FuzzySet.cs index 76c007a625..c3cb2ce41f 100644 --- a/src/Lucene.Net.Codecs/Bloom/FuzzySet.cs +++ b/src/Lucene.Net.Codecs/Bloom/FuzzySet.cs @@ -228,7 +228,7 @@ public static FuzzySet Deserialize(DataInput input) private ContainsResult MayContainValue(int positiveHash) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => (positiveHash >= 0)); + if (Debugging.AssertsEnabled) Debugging.Assert((positiveHash >= 0)); // Bloom sizes are always base 2 and so can be ANDed for a fast modulo var pos = positiveHash & _bloomSize; diff --git a/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexInput.cs b/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexInput.cs index f0ce63ccd0..d201869523 100644 --- a/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexInput.cs +++ b/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexInput.cs @@ -109,7 +109,7 @@ public Reader(IndexInput input, int[] pending, IBlockReader blockReader) internal virtual void Seek(long fp, int upto) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => upto < blockSize); + if (Debugging.AssertsEnabled) Debugging.Assert(upto < blockSize); if (seekPending || fp != lastBlockFP) { pendingFP = fp; @@ -173,7 +173,7 @@ public override void Read(DataInput indexIn, bool absolute) fp += indexIn.ReadVInt64(); } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => upto < outerInstance.m_blockSize); + if (Debugging.AssertsEnabled) Debugging.Assert(upto < outerInstance.m_blockSize); } public override void Seek(Int32IndexInput.Reader other) diff --git a/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexOutput.cs b/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexOutput.cs index 869b93a199..d6bc3d44f8 100644 --- a/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexOutput.cs +++ b/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexOutput.cs @@ -101,7 +101,7 @@ public override void Write(DataOutput indexOut, bool absolute) else if (fp == lastFP) { // same block - if (Debugging.AssertsEnabled) Debugging.Assert(() => upto >= lastUpto); + if (Debugging.AssertsEnabled) Debugging.Assert(upto >= lastUpto); int uptoDelta = upto - lastUpto; indexOut.WriteVInt32(uptoDelta << 1 | 1); } diff --git a/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexInput.cs b/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexInput.cs index 3c90d8c25f..0a65f5a7fc 100644 --- a/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexInput.cs +++ b/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexInput.cs @@ -118,7 +118,7 @@ internal virtual void Seek(long fp, int upto) // TODO: should we do this in real-time, not lazy? pendingFP = fp; pendingUpto = upto; - if (Debugging.AssertsEnabled) Debugging.Assert(() => pendingUpto >= 0, () => "pendingUpto=" + pendingUpto); + if (Debugging.AssertsEnabled) Debugging.Assert(pendingUpto >= 0, () => "pendingUpto=" + pendingUpto); seekPending = true; } diff --git a/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexOutput.cs b/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexOutput.cs index db3af9dc58..1bc8a78cc8 100644 --- a/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexOutput.cs +++ b/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexOutput.cs @@ -107,7 +107,7 @@ public override void CopyFrom(Int32IndexOutput.Index other, bool copyLast) public override void Write(DataOutput indexOut, bool absolute) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => upto >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(upto >= 0); if (absolute) { indexOut.WriteVInt32(upto); @@ -116,7 +116,7 @@ public override void Write(DataOutput indexOut, bool absolute) else if (fp == lastFP) { // same block - if (Debugging.AssertsEnabled) Debugging.Assert(() => upto >= lastUpto); + if (Debugging.AssertsEnabled) Debugging.Assert(upto >= lastUpto); int uptoDelta = upto - lastUpto; indexOut.WriteVInt32(uptoDelta << 1 | 1); } @@ -136,7 +136,7 @@ public override void Write(int v) hitExcDuringWrite = true; upto -= Add(v) - 1; hitExcDuringWrite = false; - if (Debugging.AssertsEnabled) Debugging.Assert(() => upto >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(upto >= 0); } protected override void Dispose(bool disposing) @@ -152,7 +152,7 @@ protected override void Dispose(bool disposing) while (upto > stuffed) { upto -= Add(0) - 1; - if (Debugging.AssertsEnabled) Debugging.Assert(() => upto >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(upto >= 0); stuffed += 1; } } diff --git a/src/Lucene.Net.Codecs/Memory/DirectDocValuesConsumer.cs b/src/Lucene.Net.Codecs/Memory/DirectDocValuesConsumer.cs index 2d92f3f991..ecd18e89e6 100644 --- a/src/Lucene.Net.Codecs/Memory/DirectDocValuesConsumer.cs +++ b/src/Lucene.Net.Codecs/Memory/DirectDocValuesConsumer.cs @@ -372,7 +372,7 @@ public bool MoveNext() } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(false); return false; } } diff --git a/src/Lucene.Net.Codecs/Memory/DirectDocValuesProducer.cs b/src/Lucene.Net.Codecs/Memory/DirectDocValuesProducer.cs index 330237a53d..8df0270d34 100644 --- a/src/Lucene.Net.Codecs/Memory/DirectDocValuesProducer.cs +++ b/src/Lucene.Net.Codecs/Memory/DirectDocValuesProducer.cs @@ -552,7 +552,7 @@ private IBits GetMissingBits(int fieldNumber, long offset, long length) { var data = (IndexInput)this.data.Clone(); data.Seek(offset); - if (Debugging.AssertsEnabled) Debugging.Assert(() => length % 8 == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(length % 8 == 0); var bits = new long[(int)length >> 3]; for (var i = 0; i < bits.Length; i++) { diff --git a/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs b/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs index 63c9a80e2c..be7e794c79 100644 --- a/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs +++ b/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs @@ -589,7 +589,7 @@ public DirectField(SegmentReadState state, string field, Terms termsIn, int minS upto++; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => upto == docFreq); + if (Debugging.AssertsEnabled) Debugging.Assert(upto == docFreq); ent = new HighFreqTerm(docs, freqs, positions, payloads, totalTermFreq); } @@ -625,7 +625,7 @@ public DirectField(SegmentReadState state, string field, Terms termsIn, int minS } } this.skipOffsets[numTerms] = skipOffset; - if (Debugging.AssertsEnabled) Debugging.Assert(() => skipOffset == skipCount); + if (Debugging.AssertsEnabled) Debugging.Assert(skipOffset == skipCount); } /// Returns approximate RAM bytes used. @@ -738,7 +738,7 @@ private void SetSkips(int termOrd, byte[] termBytes) private void FinishSkips() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => count == terms.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(count == terms.Length); int lastTermOffset = termOffsets[count - 1]; int lastTermLength = termOffsets[count] - lastTermOffset; @@ -972,7 +972,7 @@ public override void SeekExact(BytesRef term, TermState state) { termOrd = (int) ((OrdTermState) state).Ord; SetTerm(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => term.Equals(scratch)); + if (Debugging.AssertsEnabled) Debugging.Assert(term.Equals(scratch)); } public override BytesRef Term => scratch; @@ -1207,15 +1207,15 @@ public DirectIntersectTermsEnum(DirectPostingsFormat.DirectField outerInstance, while (label > states[i].transitionMax) { states[i].transitionUpto++; - if (Debugging.AssertsEnabled) Debugging.Assert(() => states[i].transitionUpto < states[i].transitions.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(states[i].transitionUpto < states[i].transitions.Length); states[i].transitionMin = states[i].transitions[states[i].transitionUpto].Min; states[i].transitionMax = states[i].transitions[states[i].transitionUpto].Max; if (Debugging.AssertsEnabled) { - Debugging.Assert(() => states[i].transitionMin >= 0); - Debugging.Assert(() => states[i].transitionMin <= 255); - Debugging.Assert(() => states[i].transitionMax >= 0); - Debugging.Assert(() => states[i].transitionMax <= 255); + Debugging.Assert(states[i].transitionMin >= 0); + Debugging.Assert(states[i].transitionMin <= 255); + Debugging.Assert(states[i].transitionMax >= 0); + Debugging.Assert(states[i].transitionMax <= 255); } } @@ -1257,7 +1257,7 @@ public DirectIntersectTermsEnum(DirectPostingsFormat.DirectField outerInstance, // System.out.println(" no match; already beyond; return termOrd=" + termOrd); // } stateUpto -= skipUpto; - if (Debugging.AssertsEnabled) Debugging.Assert(() => stateUpto >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(stateUpto >= 0); return; } else if (label == (outerInstance.termBytes[termOffset_i + i] & 0xFF)) @@ -1272,7 +1272,7 @@ public DirectIntersectTermsEnum(DirectPostingsFormat.DirectField outerInstance, int nextState = runAutomaton.Step(states[stateUpto].state, label); // Automaton is required to accept startTerm: - if (Debugging.AssertsEnabled) Debugging.Assert(() => nextState != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(nextState != -1); stateUpto++; states[stateUpto].changeOrd = outerInstance.skips[skipOffset + skipUpto++]; @@ -1303,12 +1303,12 @@ public DirectIntersectTermsEnum(DirectPostingsFormat.DirectField outerInstance, while (termOrd < outerInstance.terms.Length && outerInstance.Compare(termOrd, startTerm) <= 0) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => termOrd == startTermOrd || + if (Debugging.AssertsEnabled) Debugging.Assert(termOrd == startTermOrd || outerInstance.skipOffsets[termOrd] == outerInstance.skipOffsets[termOrd + 1]); termOrd++; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => termOrd - startTermOrd < outerInstance.minSkipCount); + if (Debugging.AssertsEnabled) Debugging.Assert(termOrd - startTermOrd < outerInstance.minSkipCount); termOrd--; stateUpto -= skipUpto; // if (DEBUG) { @@ -1389,7 +1389,7 @@ public override BytesRef Next() if (termOrd == 0 && outerInstance.termOffsets[1] == 0) { // Special-case empty string: - if (Debugging.AssertsEnabled) Debugging.Assert(() => stateUpto == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(stateUpto == 0); // if (DEBUG) { // System.out.println(" visit empty string"); // } @@ -1438,9 +1438,9 @@ public override BytesRef Next() // System.out.println(" term=" + new BytesRef(termBytes, termOffset, termLength).utf8ToString() + " skips=" + Arrays.toString(skips)); // } - if (Debugging.AssertsEnabled) Debugging.Assert(() => termOrd < state.changeOrd); + if (Debugging.AssertsEnabled) Debugging.Assert(termOrd < state.changeOrd); - if (Debugging.AssertsEnabled) Debugging.Assert(() => stateUpto <= termLength, () => "term.length=" + termLength + "; stateUpto=" + stateUpto); + if (Debugging.AssertsEnabled) Debugging.Assert(stateUpto <= termLength, () => "term.length=" + termLength + "; stateUpto=" + stateUpto); int label = outerInstance.termBytes[termOffset + stateUpto] & 0xFF; while (label > state.transitionMax) @@ -1459,7 +1459,7 @@ public override BytesRef Next() } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state.changeOrd > termOrd); + if (Debugging.AssertsEnabled) Debugging.Assert(state.changeOrd > termOrd); // if (DEBUG) { // System.out.println(" jumpend " + (state.changeOrd - termOrd)); // } @@ -1470,16 +1470,16 @@ public override BytesRef Next() } goto nextTermContinue; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => state.transitionUpto < state.transitions.Length, + if (Debugging.AssertsEnabled) Debugging.Assert(state.transitionUpto < state.transitions.Length, () => " state.transitionUpto=" + state.transitionUpto + " vs " + state.transitions.Length); state.transitionMin = state.transitions[state.transitionUpto].Min; state.transitionMax = state.transitions[state.transitionUpto].Max; if (Debugging.AssertsEnabled) { - Debugging.Assert(() => state.transitionMin >= 0); - Debugging.Assert(() => state.transitionMin <= 255); - Debugging.Assert(() => state.transitionMax >= 0); - Debugging.Assert(() => state.transitionMax <= 255); + Debugging.Assert(state.transitionMin >= 0); + Debugging.Assert(state.transitionMin <= 255); + Debugging.Assert(state.transitionMax >= 0); + Debugging.Assert(state.transitionMax <= 255); } } @@ -1609,7 +1609,7 @@ public override BytesRef Next() if (compiledAutomaton.CommonSuffixRef != null) { //System.out.println("suffix " + compiledAutomaton.commonSuffixRef.utf8ToString()); - if (Debugging.AssertsEnabled) Debugging.Assert(() => compiledAutomaton.CommonSuffixRef.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(compiledAutomaton.CommonSuffixRef.Offset == 0); if (termLength < compiledAutomaton.CommonSuffixRef.Length) { termOrd++; @@ -2008,7 +2008,7 @@ public override int NextDoc() if (upto < postings.Length) { freq = postings[upto + 1]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => freq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(freq > 0); return postings[upto]; } } @@ -2017,7 +2017,7 @@ public override int NextDoc() while (upto < postings.Length) { freq = postings[upto + 1]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => freq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(freq > 0); if (liveDocs.Get(postings[upto])) { return postings[upto]; @@ -2192,7 +2192,7 @@ public override int NextDoc() public override int NextPosition() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => skipPositions > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(skipPositions > 0); skipPositions--; int pos = postings[upto++]; diff --git a/src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs b/src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs index 6d79270d49..c34bd4c8cf 100644 --- a/src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs +++ b/src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs @@ -166,7 +166,7 @@ public override IEnumerator GetEnumerator() public override Terms GetTerms(string field) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => field != null); + if (Debugging.AssertsEnabled) Debugging.Assert(field != null); TermsReader result; fields.TryGetValue(field, out result); return result; @@ -218,7 +218,7 @@ internal TermsReader(FSTOrdTermsReader outerInstance, FieldInfo fieldInfo, Index this.longsSize = longsSize; this.index = index; - if (Debugging.AssertsEnabled) Debugging.Assert(() => (numTerms & (~0xffffffffL)) == 0); + if (Debugging.AssertsEnabled) Debugging.Assert((numTerms & (~0xffffffffL)) == 0); int numBlocks = (int)(numTerms + INTERVAL - 1) / INTERVAL; this.numSkipInfo = longsSize + 3; this.skipInfo = new long[numBlocks * numSkipInfo]; @@ -500,7 +500,7 @@ public override BytesRef Next() { seekPending = false; var status = SeekCeil(term); - if (Debugging.AssertsEnabled) Debugging.Assert(() => status == SeekStatus.FOUND); // must positioned on valid term + if (Debugging.AssertsEnabled) Debugging.Assert(status == SeekStatus.FOUND); // must positioned on valid term } UpdateEnum(fstEnum.Next()); return term; @@ -631,7 +631,7 @@ internal override void DecodeMetaData() internal override void DecodeStats() { var arc = TopFrame().arc; - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.NextFinalOutput == fstOutputs.NoOutput); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.NextFinalOutput == fstOutputs.NoOutput); ord = arc.Output.Value; base.DecodeStats(); } @@ -699,7 +699,7 @@ private BytesRef DoSeekCeil(BytesRef target) { break; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => IsValid(frame)); // target must be fetched from automaton + if (Debugging.AssertsEnabled) Debugging.Assert(IsValid(frame)); // target must be fetched from automaton PushFrame(frame); upto++; } @@ -837,7 +837,7 @@ private void PushFrame(Frame frame) arc.Output = fstOutputs.Add(TopFrame().arc.Output, arc.Output); term = Grow(arc.Label); level++; - if (Debugging.AssertsEnabled) Debugging.Assert(() => frame == stack[level]); + if (Debugging.AssertsEnabled) Debugging.Assert(frame == stack[level]); } private Frame PopFrame() diff --git a/src/Lucene.Net.Codecs/Memory/FSTTermOutputs.cs b/src/Lucene.Net.Codecs/Memory/FSTTermOutputs.cs index 16d2d0c265..f07236593f 100644 --- a/src/Lucene.Net.Codecs/Memory/FSTTermOutputs.cs +++ b/src/Lucene.Net.Codecs/Memory/FSTTermOutputs.cs @@ -128,7 +128,7 @@ public override TermData Common(TermData t1, TermData t2) if (Equals(t1, NO_OUTPUT) || Equals(t2, NO_OUTPUT)) return NO_OUTPUT; - if (Debugging.AssertsEnabled) Debugging.Assert(() => t1.longs.Length == t2.longs.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(t1.longs.Length == t2.longs.Length); long[] min = t1.longs, max = t2.longs; int pos = 0; @@ -183,7 +183,7 @@ public override TermData Subtract(TermData t1, TermData t2) if (Equals(t2, NO_OUTPUT)) return t1; - if (Debugging.AssertsEnabled) Debugging.Assert(() => t1.longs.Length == t2.longs.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(t1.longs.Length == t2.longs.Length); int pos = 0; long diff = 0; @@ -220,7 +220,7 @@ public override TermData Add(TermData t1, TermData t2) if (Equals(t2, NO_OUTPUT)) return t1; - if (Debugging.AssertsEnabled) Debugging.Assert(() => t1.longs.Length == t2.longs.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(t1.longs.Length == t2.longs.Length); var pos = 0; var accum = new long[_longsSize]; diff --git a/src/Lucene.Net.Codecs/Memory/FSTTermsReader.cs b/src/Lucene.Net.Codecs/Memory/FSTTermsReader.cs index e12eb5adcb..38d6da76c7 100644 --- a/src/Lucene.Net.Codecs/Memory/FSTTermsReader.cs +++ b/src/Lucene.Net.Codecs/Memory/FSTTermsReader.cs @@ -166,7 +166,7 @@ public override IEnumerator GetEnumerator() public override Terms GetTerms(string field) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => field != null); + if (Debugging.AssertsEnabled) Debugging.Assert(field != null); TermsReader result; fields.TryGetValue(field, out result); return result; @@ -369,7 +369,7 @@ public override BytesRef Next() { seekPending = false; SeekStatus status = SeekCeil(term); - if (Debugging.AssertsEnabled) Debugging.Assert(() => status == SeekStatus.FOUND); // must positioned on valid term + if (Debugging.AssertsEnabled) Debugging.Assert(status == SeekStatus.FOUND); // must positioned on valid term } UpdateEnum(fstEnum.Next()); return term; @@ -500,7 +500,7 @@ internal IntersectTermsEnum(FSTTermsReader.TermsReader outerInstance, CompiledAu internal override void DecodeMetaData() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => term != null); + if (Debugging.AssertsEnabled) Debugging.Assert(term != null); if (!decoded) { if (meta.bytes != null) @@ -611,7 +611,7 @@ private BytesRef DoSeekCeil(BytesRef target) { break; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => IsValid(frame)); // target must be fetched from automaton + if (Debugging.AssertsEnabled) Debugging.Assert(IsValid(frame)); // target must be fetched from automaton PushFrame(frame); upto++; } diff --git a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs index 2176e99f92..cb6dc17fb1 100644 --- a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs +++ b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs @@ -144,7 +144,7 @@ internal virtual void AddNumericField(FieldInfo field, IEnumerable values ++count; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => count == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(count == maxDoc); } if (missing) diff --git a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesProducer.cs b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesProducer.cs index 802ca19189..535844f34e 100644 --- a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesProducer.cs +++ b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesProducer.cs @@ -675,7 +675,7 @@ private IBits GetMissingBits(int fieldNumber, long offset, long length) { var data = (IndexInput)this.data.Clone(); data.Seek(offset); - if (Debugging.AssertsEnabled) Debugging.Assert(() => length % 8 == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(length % 8 == 0); var bits = new long[(int) length >> 3]; for (var i = 0; i < bits.Length; i++) { diff --git a/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs b/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs index 7fa27bfcdb..1bf2067369 100644 --- a/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs +++ b/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs @@ -146,7 +146,7 @@ public PostingsWriter(MemoryPostingsFormat.TermsWriter outerInstance) public override void StartDoc(int docID, int termDocFreq) { int delta = docID - lastDocID; - if (Debugging.AssertsEnabled) Debugging.Assert(() => docID == 0 || delta > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(docID == 0 || delta > 0); lastDocID = docID; docCount++; @@ -161,7 +161,7 @@ public override void StartDoc(int docID, int termDocFreq) else { buffer.WriteVInt32(delta << 1); - if (Debugging.AssertsEnabled) Debugging.Assert(() => termDocFreq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(termDocFreq > 0); buffer.WriteVInt32(termDocFreq); } @@ -171,12 +171,12 @@ public override void StartDoc(int docID, int termDocFreq) public override void AddPosition(int pos, BytesRef payload, int startOffset, int endOffset) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => payload == null || outerInstance.field.HasPayloads); + if (Debugging.AssertsEnabled) Debugging.Assert(payload == null || outerInstance.field.HasPayloads); //System.out.println(" addPos pos=" + pos + " payload=" + payload); int delta = pos - lastPos; - if (Debugging.AssertsEnabled) Debugging.Assert(() => delta >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(delta >= 0); lastPos = pos; int payloadLen = 0; @@ -231,7 +231,7 @@ public override void FinishDoc() public virtual PostingsWriter Reset() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => buffer.GetFilePointer() == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(buffer.GetFilePointer() == 0); lastDocID = 0; docCount = 0; lastPayloadLen = 0; @@ -255,9 +255,9 @@ public override PostingsConsumer StartTerm(BytesRef text) public override void FinishTerm(BytesRef text, TermStats stats) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => postingsWriter.docCount == stats.DocFreq); + if (Debugging.AssertsEnabled) Debugging.Assert(postingsWriter.docCount == stats.DocFreq); - if (Debugging.AssertsEnabled) Debugging.Assert(() => buffer2.GetFilePointer() == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(buffer2.GetFilePointer() == 0); buffer2.WriteVInt32(stats.DocFreq); if (field.IndexOptions != IndexOptions.DOCS_ONLY) @@ -402,7 +402,7 @@ public bool CanReuse(IndexOptions indexOptions, bool storePayloads) public FSTDocsEnum Reset(BytesRef bufferIn, IBits liveDocs, int numDocs) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocs > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(numDocs > 0); if (buffer.Length < bufferIn.Length) { buffer = ArrayUtil.Grow(buffer, bufferIn.Length); @@ -446,7 +446,7 @@ public override int NextDoc() else { freq = @in.ReadVInt32(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => freq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(freq > 0); } if (indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) @@ -555,7 +555,7 @@ public bool CanReuse(bool storePayloads, bool storeOffsets) public FSTDocsAndPositionsEnum Reset(BytesRef bufferIn, IBits liveDocs, int numDocs) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocs > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(numDocs > 0); // System.out.println("D&P reset bytes this=" + this); // for(int i=bufferIn.offset;i freq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(freq > 0); } if (liveDocs == null || liveDocs.Get(accum)) @@ -655,7 +655,7 @@ public override int NextDoc() public override int NextPosition() { //System.out.println(" nextPos storePayloads=" + storePayloads + " this=" + this); - if (Debugging.AssertsEnabled) Debugging.Assert(() => posPending > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(posPending > 0); posPending--; if (!storePayloads) { diff --git a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsFormat.cs b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsFormat.cs index 8f61c25a83..f91fb849a7 100644 --- a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsFormat.cs +++ b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsFormat.cs @@ -47,7 +47,7 @@ public PulsingPostingsFormat(PostingsBaseFormat wrappedPostingsBaseFormat, int f int minBlockSize, int maxBlockSize) : base() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => minBlockSize > 1); + if (Debugging.AssertsEnabled) Debugging.Assert(minBlockSize > 1); _freqCutoff = freqCutoff; _minBlockSize = minBlockSize; diff --git a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs index b7568b2725..d429bd6cc9 100644 --- a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs +++ b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs @@ -122,7 +122,7 @@ public override object Clone() } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => WrappedTermState != null); + if (Debugging.AssertsEnabled) Debugging.Assert(WrappedTermState != null); clone.WrappedTermState = (BlockTermState)WrappedTermState.Clone(); clone.Absolute = Absolute; @@ -172,7 +172,7 @@ public override void DecodeTerm(long[] empty, DataInput input, FieldInfo fieldIn { var termState2 = (PulsingTermState) termState; - if (Debugging.AssertsEnabled) Debugging.Assert(() => empty.Length == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(empty.Length == 0); termState2.Absolute = termState2.Absolute || absolute; // if we have positions, its total TF, otherwise its computed based on docFreq. @@ -340,7 +340,7 @@ public PulsingDocsEnum(FieldInfo fieldInfo) public virtual PulsingDocsEnum Reset(IBits liveDocs, PulsingTermState termState) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => termState.PostingsSize != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(termState.PostingsSize != -1); // Must make a copy of termState's byte[] so that if // app does TermsEnum.next(), this DocsEnum is not affected @@ -482,7 +482,7 @@ internal bool CanReuse(FieldInfo fieldInfo) public virtual PulsingDocsAndPositionsEnum Reset(IBits liveDocs, PulsingTermState termState) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => termState.PostingsSize != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(termState.PostingsSize != -1); if (_postingsBytes == null) { @@ -542,7 +542,7 @@ public override int Advance(int target) public override int NextPosition() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => _posPending > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(_posPending > 0); _posPending--; diff --git a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsWriter.cs b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsWriter.cs index f1b0ec5770..e01618dc2c 100644 --- a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsWriter.cs +++ b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsWriter.cs @@ -150,7 +150,7 @@ public override BlockTermState NewTermState() public override void StartTerm() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => _pendingCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(_pendingCount == 0); } // TODO: -- should we NOT reuse across fields? would @@ -175,7 +175,7 @@ public override int SetField(FieldInfo fieldInfo) public override void StartDoc(int docId, int termDocFreq) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docId >= 0, () => "Got DocID=" + docId); + if (Debugging.AssertsEnabled) Debugging.Assert(docId >= 0, () => "Got DocID=" + docId); if (_pendingCount == _pending.Length) { @@ -185,7 +185,7 @@ public override void StartDoc(int docId, int termDocFreq) if (_pendingCount != -1) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => _pendingCount < _pending.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(_pendingCount < _pending.Length); _currentDoc = _pending[_pendingCount]; _currentDoc.docID = docId; if (_indexOptions == IndexOptions.DOCS_ONLY) @@ -267,7 +267,7 @@ public override void FinishTerm(BlockTermState state) { var state2 = (PulsingTermState)state; - if (Debugging.AssertsEnabled) Debugging.Assert(() => _pendingCount > 0 || _pendingCount == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(_pendingCount > 0 || _pendingCount == -1); if (_pendingCount == -1) { @@ -318,7 +318,7 @@ public override void FinishTerm(BlockTermState state) for (var posIDX = 0; posIDX < doc.termFreq; posIDX++) { var pos = _pending[pendingIDX++]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => pos.docID == doc.docID); + if (Debugging.AssertsEnabled) Debugging.Assert(pos.docID == doc.docID); var posDelta = pos.pos - lastPos; lastPos = pos.pos; @@ -361,7 +361,7 @@ public override void FinishTerm(BlockTermState state) if (payloadLength > 0) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => _storePayloads); + if (Debugging.AssertsEnabled) Debugging.Assert(_storePayloads); _buffer.WriteBytes(pos.payload.Bytes, 0, pos.payload.Length); } } @@ -375,7 +375,7 @@ public override void FinishTerm(BlockTermState state) Position doc = _pending[posIdx]; int delta = doc.docID - lastDocId; - if (Debugging.AssertsEnabled) Debugging.Assert(() => doc.termFreq != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(doc.termFreq != 0); if (doc.termFreq == 1) { @@ -411,7 +411,7 @@ public override void EncodeTerm(long[] empty, DataOutput output, FieldInfo field bool abs) { var _state = (PulsingTermState)state; - if (Debugging.AssertsEnabled) Debugging.Assert(() => empty.Length == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(empty.Length == 0); _absolute = _absolute || abs; if (_state.bytes == null) { @@ -469,7 +469,7 @@ protected override void Dispose(bool disposing) /// private void Push() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => _pendingCount == _pending.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(_pendingCount == _pending.Length); _wrappedPostingsWriter.StartTerm(); @@ -487,7 +487,7 @@ private void Push() } else if (doc.docID != pos.docID) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => pos.docID > doc.docID); + if (Debugging.AssertsEnabled) Debugging.Assert(pos.docID > doc.docID); _wrappedPostingsWriter.FinishDoc(); doc = pos; _wrappedPostingsWriter.StartDoc(doc.docID, doc.termFreq); diff --git a/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs b/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs index dad545a78a..b6565cff8c 100644 --- a/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs +++ b/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs @@ -263,7 +263,7 @@ public override DocsEnum Docs(FieldInfo fieldInfo, BlockTermState termState, IBi public override DocsAndPositionsEnum DocsAndPositions(FieldInfo fieldInfo, BlockTermState termState, IBits liveDocs, DocsAndPositionsEnum reuse, DocsAndPositionsFlags flags) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldInfo.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldInfo.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); SepTermState termState_ = (SepTermState)termState; SepDocsAndPositionsEnum postingsEnum; if (reuse == null || !(reuse is SepDocsAndPositionsEnum)) @@ -692,7 +692,7 @@ public override int NextPosition() { // Payload length has changed payloadLength = posReader.Next(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => payloadLength >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(payloadLength >= 0); } pendingPosCount--; position = 0; @@ -707,7 +707,7 @@ public override int NextPosition() { // Payload length has changed payloadLength = posReader.Next(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => payloadLength >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(payloadLength >= 0); } position += (int)(((uint)code) >> 1); pendingPayloadBytes += payloadLength; @@ -719,7 +719,7 @@ public override int NextPosition() } pendingPosCount--; - if (Debugging.AssertsEnabled) Debugging.Assert(() => pendingPosCount >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(pendingPosCount >= 0); return position; } @@ -741,7 +741,7 @@ public override BytesRef GetPayload() return payload; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => pendingPayloadBytes >= payloadLength); + if (Debugging.AssertsEnabled) Debugging.Assert(pendingPayloadBytes >= payloadLength); if (pendingPayloadBytes > payloadLength) { diff --git a/src/Lucene.Net.Codecs/Sep/SepPostingsWriter.cs b/src/Lucene.Net.Codecs/Sep/SepPostingsWriter.cs index e460fde487..f4b2d13b84 100644 --- a/src/Lucene.Net.Codecs/Sep/SepPostingsWriter.cs +++ b/src/Lucene.Net.Codecs/Sep/SepPostingsWriter.cs @@ -262,10 +262,10 @@ public override void StartDoc(int docID, int termDocFreq) /// Add a new position & payload. public override void AddPosition(int position, BytesRef payload, int startOffset, int endOffset) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); + if (Debugging.AssertsEnabled) Debugging.Assert(indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); int delta = position - lastPosition; - if (Debugging.AssertsEnabled) Debugging.Assert(() => delta >= 0, () => "position=" + position + " lastPosition=" + lastPosition); // not quite right (if pos=0 is repeated twice we don't catch it) + if (Debugging.AssertsEnabled) Debugging.Assert(delta >= 0, () => "position=" + position + " lastPosition=" + lastPosition); // not quite right (if pos=0 is repeated twice we don't catch it) lastPosition = position; if (storePayloads) @@ -319,8 +319,8 @@ public override void FinishTerm(BlockTermState state) // TODO: -- wasteful we are counting this in two places? if (Debugging.AssertsEnabled) { - Debugging.Assert(() => state_.DocFreq > 0); - Debugging.Assert(() => state_.DocFreq == df); + Debugging.Assert(state_.DocFreq > 0); + Debugging.Assert(state_.DocFreq == df); } state_.DocIndex = docOut.GetIndex(); diff --git a/src/Lucene.Net.Codecs/Sep/SepSkipListReader.cs b/src/Lucene.Net.Codecs/Sep/SepSkipListReader.cs index b7c50ef01a..ab7af414ee 100644 --- a/src/Lucene.Net.Codecs/Sep/SepSkipListReader.cs +++ b/src/Lucene.Net.Codecs/Sep/SepSkipListReader.cs @@ -195,7 +195,7 @@ protected override void SetLastSkipData(int level) protected override int ReadSkipData(int level, IndexInput skipStream) { int delta; - if (Debugging.AssertsEnabled) Debugging.Assert(() => indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !currentFieldStoresPayloads); + if (Debugging.AssertsEnabled) Debugging.Assert(indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !currentFieldStoresPayloads); if (currentFieldStoresPayloads) { // the current field stores payloads. diff --git a/src/Lucene.Net.Codecs/Sep/SepSkipListWriter.cs b/src/Lucene.Net.Codecs/Sep/SepSkipListWriter.cs index 2ef7094542..f65b92244e 100644 --- a/src/Lucene.Net.Codecs/Sep/SepSkipListWriter.cs +++ b/src/Lucene.Net.Codecs/Sep/SepSkipListWriter.cs @@ -178,7 +178,7 @@ protected override void WriteSkipData(int level, IndexOutput skipBuffer) // current payload length equals the length at the previous // skip point - if (Debugging.AssertsEnabled) Debugging.Assert(() => indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !curStorePayloads); + if (Debugging.AssertsEnabled) Debugging.Assert(indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !curStorePayloads); if (curStorePayloads) { diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs index 2123bd430c..47ffc15548 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs @@ -76,25 +76,25 @@ internal SimpleTextDocValuesReader(SegmentReadState state, string ext) { break; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.FIELD), () => scratch.Utf8ToString()); + if (Debugging.AssertsEnabled) Debugging.Assert(StartsWith(SimpleTextDocValuesWriter.FIELD), () => scratch.Utf8ToString()); var fieldName = StripPrefix(SimpleTextDocValuesWriter.FIELD); var field = new OneField(); fields[fieldName] = field; ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.TYPE), () => scratch.Utf8ToString()); + if (Debugging.AssertsEnabled) Debugging.Assert(StartsWith(SimpleTextDocValuesWriter.TYPE), () => scratch.Utf8ToString()); var dvType = (DocValuesType)Enum.Parse(typeof(DocValuesType), StripPrefix(SimpleTextDocValuesWriter.TYPE)); - // if (Debugging.AssertsEnabled) Debugging.Assert(() => dvType != null); // LUCENENET: Not possible for an enum to be null in .NET + // if (Debugging.AssertsEnabled) Debugging.Assert(dvType != null); // LUCENENET: Not possible for an enum to be null in .NET if (dvType == DocValuesType.NUMERIC) { ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.MINVALUE), + if (Debugging.AssertsEnabled) Debugging.Assert(StartsWith(SimpleTextDocValuesWriter.MINVALUE), () => "got " + scratch.Utf8ToString() + " field=" + fieldName + " ext=" + ext); field.MinValue = Convert.ToInt64(StripPrefix(SimpleTextDocValuesWriter.MINVALUE), CultureInfo.InvariantCulture); ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.PATTERN)); + if (Debugging.AssertsEnabled) Debugging.Assert(StartsWith(SimpleTextDocValuesWriter.PATTERN)); field.Pattern = StripPrefix(SimpleTextDocValuesWriter.PATTERN); field.DataStartFilePointer = data.GetFilePointer(); data.Seek(data.GetFilePointer() + (1 + field.Pattern.Length + 2)*maxDoc); @@ -102,10 +102,10 @@ internal SimpleTextDocValuesReader(SegmentReadState state, string ext) else if (dvType == DocValuesType.BINARY) { ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.MAXLENGTH)); + if (Debugging.AssertsEnabled) Debugging.Assert(StartsWith(SimpleTextDocValuesWriter.MAXLENGTH)); field.MaxLength = Convert.ToInt32(StripPrefix(SimpleTextDocValuesWriter.MAXLENGTH), CultureInfo.InvariantCulture); ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.PATTERN)); + if (Debugging.AssertsEnabled) Debugging.Assert(StartsWith(SimpleTextDocValuesWriter.PATTERN)); field.Pattern = StripPrefix(SimpleTextDocValuesWriter.PATTERN); field.DataStartFilePointer = data.GetFilePointer(); data.Seek(data.GetFilePointer() + (9 + field.Pattern.Length + field.MaxLength + 2)*maxDoc); @@ -113,16 +113,16 @@ internal SimpleTextDocValuesReader(SegmentReadState state, string ext) else if (dvType == DocValuesType.SORTED || dvType == DocValuesType.SORTED_SET) { ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.NUMVALUES)); + if (Debugging.AssertsEnabled) Debugging.Assert(StartsWith(SimpleTextDocValuesWriter.NUMVALUES)); field.NumValues = Convert.ToInt64(StripPrefix(SimpleTextDocValuesWriter.NUMVALUES), CultureInfo.InvariantCulture); ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.MAXLENGTH)); + if (Debugging.AssertsEnabled) Debugging.Assert(StartsWith(SimpleTextDocValuesWriter.MAXLENGTH)); field.MaxLength = Convert.ToInt32(StripPrefix(SimpleTextDocValuesWriter.MAXLENGTH), CultureInfo.InvariantCulture); ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.PATTERN)); + if (Debugging.AssertsEnabled) Debugging.Assert(StartsWith(SimpleTextDocValuesWriter.PATTERN)); field.Pattern = StripPrefix(SimpleTextDocValuesWriter.PATTERN); ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StartsWith(SimpleTextDocValuesWriter.ORDPATTERN)); + if (Debugging.AssertsEnabled) Debugging.Assert(StartsWith(SimpleTextDocValuesWriter.ORDPATTERN)); field.OrdPattern = StripPrefix(SimpleTextDocValuesWriter.ORDPATTERN); field.DataStartFilePointer = data.GetFilePointer(); data.Seek(data.GetFilePointer() + (9 + field.Pattern.Length + field.MaxLength)*field.NumValues + @@ -136,7 +136,7 @@ internal SimpleTextDocValuesReader(SegmentReadState state, string ext) // We should only be called from above if at least one // field has DVs: - if (Debugging.AssertsEnabled) Debugging.Assert(() => fields.Count > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(fields.Count > 0); } public override NumericDocValues GetNumeric(FieldInfo fieldInfo) @@ -144,10 +144,10 @@ public override NumericDocValues GetNumeric(FieldInfo fieldInfo) var field = fields[fieldInfo.Name]; if (Debugging.AssertsEnabled) { - Debugging.Assert(() => field != null); + Debugging.Assert(field != null); // SegmentCoreReaders already verifies this field is valid: - Debugging.Assert(() => field != null, () => "field=" + fieldInfo.Name + " fields=" + fields); + Debugging.Assert(field != null, () => "field=" + fieldInfo.Name + " fields=" + fields); } var @in = (IndexInput)data.Clone(); @@ -244,7 +244,7 @@ public bool Get(int index) public override BinaryDocValues GetBinary(FieldInfo fieldInfo) { var field = fields[fieldInfo.Name]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => field != null); + if (Debugging.AssertsEnabled) Debugging.Assert(field != null); var input = (IndexInput)data.Clone(); var scratch = new BytesRef(); @@ -278,7 +278,7 @@ public override void Get(int docId, BytesRef result) _input.Seek(_field.DataStartFilePointer + (9 + _field.Pattern.Length + _field.MaxLength + 2) * docId); SimpleTextUtil.ReadLine(_input, _scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH)); int len; try { @@ -335,7 +335,7 @@ public bool Get(int index) { _input.Seek(_field.DataStartFilePointer + (9 + _field.Pattern.Length + _field.MaxLength + 2) * index); SimpleTextUtil.ReadLine(_input, _scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH)); int len; try { @@ -368,7 +368,7 @@ public override SortedDocValues GetSorted(FieldInfo fieldInfo) var field = fields[fieldInfo.Name]; // SegmentCoreReaders already verifies this field is valid: - if (Debugging.AssertsEnabled) Debugging.Assert(() => field != null); + if (Debugging.AssertsEnabled) Debugging.Assert(field != null); var input = (IndexInput)data.Clone(); var scratch = new BytesRef(); @@ -437,7 +437,7 @@ public override void LookupOrd(int ord, BytesRef result) } _input.Seek(_field.DataStartFilePointer + ord * (9 + _field.Pattern.Length + _field.MaxLength)); SimpleTextUtil.ReadLine(_input, _scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH), + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH), () => "got " + _scratch.Utf8ToString() + " in=" + _input); int len; try @@ -473,7 +473,7 @@ public override SortedSetDocValues GetSortedSet(FieldInfo fieldInfo) // SegmentCoreReaders already verifies this field is // valid: - if (Debugging.AssertsEnabled) Debugging.Assert(() => field != null); + if (Debugging.AssertsEnabled) Debugging.Assert(field != null); var input = (IndexInput) data.Clone(); var scratch = new BytesRef(); @@ -542,7 +542,7 @@ public override void LookupOrd(long ord, BytesRef result) _input.Seek(_field.DataStartFilePointer + ord * (9 + _field.Pattern.Length + _field.MaxLength)); SimpleTextUtil.ReadLine(_input, _scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH), + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextDocValuesWriter.LENGTH), () => "got " + _scratch.Utf8ToString() + " in=" + _input); int len; try diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs index e7bc52218a..8ca1703e14 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs @@ -69,7 +69,7 @@ internal SimpleTextDocValuesWriter(SegmentWriteState state, string ext) /// private bool FieldSeen(string field) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !_fieldsSeen.Contains(field), () => "field \"" + field + "\" was added more than once during flush"); + if (Debugging.AssertsEnabled) Debugging.Assert(!_fieldsSeen.Contains(field), () => "field \"" + field + "\" was added more than once during flush"); _fieldsSeen.Add(field); return true; } @@ -78,8 +78,8 @@ public override void AddNumericField(FieldInfo field, IEnumerable values) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => FieldSeen(field.Name)); - Debugging.Assert(() => field.DocValuesType == DocValuesType.NUMERIC || + Debugging.Assert(FieldSeen(field.Name)); + Debugging.Assert(field.DocValuesType == DocValuesType.NUMERIC || field.NormType == DocValuesType.NUMERIC); } WriteFieldEntry(field, DocValuesType.NUMERIC); @@ -121,28 +121,28 @@ public override void AddNumericField(FieldInfo field, IEnumerable values) { long value = n.GetValueOrDefault(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => value >= minValue); + if (Debugging.AssertsEnabled) Debugging.Assert(value >= minValue); var delta = (decimal)value - (decimal)minValue; // LUCENENET specific - use decimal rather than BigInteger string s = delta.ToString(patternString, CultureInfo.InvariantCulture); - if (Debugging.AssertsEnabled) Debugging.Assert(() => s.Length == patternString.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(s.Length == patternString.Length); SimpleTextUtil.Write(data, s, scratch); SimpleTextUtil.WriteNewline(data); SimpleTextUtil.Write(data, n == null ? "F" : "T", scratch); SimpleTextUtil.WriteNewline(data); numDocsWritten++; - if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocsWritten <= numDocs); + if (Debugging.AssertsEnabled) Debugging.Assert(numDocsWritten <= numDocs); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocs == numDocsWritten, () => "numDocs=" + numDocs + " numDocsWritten=" + numDocsWritten); + if (Debugging.AssertsEnabled) Debugging.Assert(numDocs == numDocsWritten, () => "numDocs=" + numDocs + " numDocsWritten=" + numDocsWritten); } public override void AddBinaryField(FieldInfo field, IEnumerable values) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => FieldSeen(field.Name)); - Debugging.Assert(() => field.DocValuesType == DocValuesType.BINARY); + Debugging.Assert(FieldSeen(field.Name)); + Debugging.Assert(field.DocValuesType == DocValuesType.BINARY); } var maxLength = 0; @@ -198,15 +198,15 @@ public override void AddBinaryField(FieldInfo field, IEnumerable value numDocsWritten++; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocs == numDocsWritten); + if (Debugging.AssertsEnabled) Debugging.Assert(numDocs == numDocsWritten); } public override void AddSortedField(FieldInfo field, IEnumerable values, IEnumerable docToOrd) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => FieldSeen(field.Name)); - Debugging.Assert(() => field.DocValuesType == DocValuesType.SORTED); + Debugging.Assert(FieldSeen(field.Name)); + Debugging.Assert(field.DocValuesType == DocValuesType.SORTED); } WriteFieldEntry(field, DocValuesType.SORTED); @@ -277,10 +277,10 @@ public override void AddSortedField(FieldInfo field, IEnumerable value } SimpleTextUtil.WriteNewline(data); valuesSeen++; - if (Debugging.AssertsEnabled) Debugging.Assert(() => valuesSeen <= valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(valuesSeen <= valueCount); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => valuesSeen == valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(valuesSeen == valueCount); foreach (var ord in docToOrd) { @@ -294,8 +294,8 @@ public override void AddSortedSetField(FieldInfo field, IEnumerable va { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => FieldSeen(field.Name)); - Debugging.Assert(() => field.DocValuesType == DocValuesType.SORTED_SET); + Debugging.Assert(FieldSeen(field.Name)); + Debugging.Assert(field.DocValuesType == DocValuesType.SORTED_SET); } WriteFieldEntry(field, DocValuesType.SORTED_SET); @@ -387,10 +387,10 @@ public override void AddSortedSetField(FieldInfo field, IEnumerable va } SimpleTextUtil.WriteNewline(data); valuesSeen++; - if (Debugging.AssertsEnabled) Debugging.Assert(() => valuesSeen <= valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(valuesSeen <= valueCount); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => valuesSeen == valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(valuesSeen == valueCount); using (var ordStream = ords.GetEnumerator()) { @@ -438,7 +438,7 @@ protected override void Dispose(bool disposing) var success = false; try { - if (Debugging.AssertsEnabled) Debugging.Assert(() => _fieldsSeen.Count > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(_fieldsSeen.Count > 0); // java : sheisty to do this here? SimpleTextUtil.Write(data, END); SimpleTextUtil.WriteNewline(data); diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldInfosReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldInfosReader.cs index e9a68b013f..a7c423a289 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldInfosReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldInfosReader.cs @@ -57,29 +57,29 @@ public override FieldInfos Read(Directory directory, string segmentName, string { SimpleTextUtil.ReadLine(input, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NUMFIELDS)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NUMFIELDS)); var size = Convert.ToInt32(ReadString(SimpleTextFieldInfosWriter.NUMFIELDS.Length, scratch), CultureInfo.InvariantCulture); var infos = new FieldInfo[size]; for (var i = 0; i < size; i++) { SimpleTextUtil.ReadLine(input, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NAME)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NAME)); string name = ReadString(SimpleTextFieldInfosWriter.NAME.Length, scratch); SimpleTextUtil.ReadLine(input, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NUMBER)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NUMBER)); int fieldNumber = Convert.ToInt32(ReadString(SimpleTextFieldInfosWriter.NUMBER.Length, scratch), CultureInfo.InvariantCulture); SimpleTextUtil.ReadLine(input, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.ISINDEXED)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.ISINDEXED)); bool isIndexed = Convert.ToBoolean(ReadString(SimpleTextFieldInfosWriter.ISINDEXED.Length, scratch), CultureInfo.InvariantCulture); IndexOptions indexOptions; if (isIndexed) { SimpleTextUtil.ReadLine(input, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.INDEXOPTIONS)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.INDEXOPTIONS)); indexOptions = (IndexOptions)Enum.Parse(typeof(IndexOptions), ReadString(SimpleTextFieldInfosWriter.INDEXOPTIONS.Length, scratch)); } @@ -89,46 +89,46 @@ public override FieldInfos Read(Directory directory, string segmentName, string } SimpleTextUtil.ReadLine(input, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.STORETV)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.STORETV)); bool storeTermVector = Convert.ToBoolean(ReadString(SimpleTextFieldInfosWriter.STORETV.Length, scratch), CultureInfo.InvariantCulture); SimpleTextUtil.ReadLine(input, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.PAYLOADS)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.PAYLOADS)); bool storePayloads = Convert.ToBoolean(ReadString(SimpleTextFieldInfosWriter.PAYLOADS.Length, scratch), CultureInfo.InvariantCulture); SimpleTextUtil.ReadLine(input, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NORMS)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NORMS)); bool omitNorms = !Convert.ToBoolean(ReadString(SimpleTextFieldInfosWriter.NORMS.Length, scratch), CultureInfo.InvariantCulture); SimpleTextUtil.ReadLine(input, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NORMS_TYPE)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NORMS_TYPE)); string nrmType = ReadString(SimpleTextFieldInfosWriter.NORMS_TYPE.Length, scratch); Index.DocValuesType normsType = DocValuesType(nrmType); SimpleTextUtil.ReadLine(input, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.DOCVALUES)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.DOCVALUES)); string dvType = ReadString(SimpleTextFieldInfosWriter.DOCVALUES.Length, scratch); Index.DocValuesType docValuesType = DocValuesType(dvType); SimpleTextUtil.ReadLine(input, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.DOCVALUES_GEN)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.DOCVALUES_GEN)); long dvGen = Convert.ToInt64(ReadString(SimpleTextFieldInfosWriter.DOCVALUES_GEN.Length, scratch), CultureInfo.InvariantCulture); SimpleTextUtil.ReadLine(input, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NUM_ATTS)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.NUM_ATTS)); int numAtts = Convert.ToInt32(ReadString(SimpleTextFieldInfosWriter.NUM_ATTS.Length, scratch), CultureInfo.InvariantCulture); IDictionary atts = new Dictionary(); for (int j = 0; j < numAtts; j++) { SimpleTextUtil.ReadLine(input, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.ATT_KEY)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.ATT_KEY)); string key = ReadString(SimpleTextFieldInfosWriter.ATT_KEY.Length, scratch); SimpleTextUtil.ReadLine(input, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.ATT_VALUE)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(scratch, SimpleTextFieldInfosWriter.ATT_VALUE)); string value = ReadString(SimpleTextFieldInfosWriter.ATT_VALUE.Length, scratch); atts[key] = value; } diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldInfosWriter.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldInfosWriter.cs index 949c682a1b..7abfdccf9f 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldInfosWriter.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldInfosWriter.cs @@ -92,7 +92,7 @@ public override void Write(Directory directory, string segmentName, string segme if (fi.IsIndexed) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => fi.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); + if (Debugging.AssertsEnabled) Debugging.Assert(fi.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); SimpleTextUtil.Write(output, INDEXOPTIONS); SimpleTextUtil.Write(output, fi.IndexOptions != IndexOptions.NONE ? fi.IndexOptions.ToString() : string.Empty, diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs index 7e0c8d01db..9a2a97bd2b 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs @@ -159,7 +159,7 @@ public override SeekStatus SeekCeil(BytesRef text) public override BytesRef Next() { - //if (Debugging.AssertsEnabled) Debugging.Assert(() => !ended); // LUCENENET: Ended field is never set, so this can never fail + //if (Debugging.AssertsEnabled) Debugging.Assert(!ended); // LUCENENET: Ended field is never set, so this can never fail var result = _fstEnum.Next(); if (result == null) return null; @@ -316,8 +316,7 @@ public override int NextDoc() else { if (Debugging.AssertsEnabled) Debugging.Assert( - () => StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.TERM) || StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.FIELD) || - // LUCENENET TODO: This assert fails sometimes, which in turns causes _scratch.Utf8ToString() to throw an index out of range exception + StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.TERM) || StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.FIELD) || StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.END), () => "scratch=" + _scratch.Utf8ToString()); if (!first && (_liveDocs == null || _liveDocs.Get(_docId))) @@ -446,7 +445,7 @@ public override int NextDoc() } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.TERM) || StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.FIELD) || + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.TERM) || StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.FIELD) || StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.END)); if (!first && (_liveDocs == null || _liveDocs.Get(_docId))) @@ -472,7 +471,7 @@ public override int NextPosition() if (_readPositions) { SimpleTextUtil.ReadLine(_in, _scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.POS), () => "got line=" + _scratch.Utf8ToString()); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.POS), () => "got line=" + _scratch.Utf8ToString()); UnicodeUtil.UTF8toUTF16(_scratch.Bytes, _scratch.Offset + SimpleTextFieldsWriter.POS.Length, _scratch.Length - SimpleTextFieldsWriter.POS.Length, _scratchUtf162); pos = ArrayUtil.ParseInt32(_scratchUtf162.Chars, 0, _scratchUtf162.Length); @@ -485,12 +484,12 @@ public override int NextPosition() if (_readOffsets) { SimpleTextUtil.ReadLine(_in, _scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.START_OFFSET), () => "got line=" + _scratch.Utf8ToString()); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.START_OFFSET), () => "got line=" + _scratch.Utf8ToString()); UnicodeUtil.UTF8toUTF16(_scratch.Bytes, _scratch.Offset + SimpleTextFieldsWriter.START_OFFSET.Length, _scratch.Length - SimpleTextFieldsWriter.START_OFFSET.Length, _scratchUtf162); _startOffset = ArrayUtil.ParseInt32(_scratchUtf162.Chars, 0, _scratchUtf162.Length); SimpleTextUtil.ReadLine(_in, _scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.END_OFFSET), () => "got line=" + _scratch.Utf8ToString()); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextFieldsWriter.END_OFFSET), () => "got line=" + _scratch.Utf8ToString()); UnicodeUtil.UTF8toUTF16(_scratch.Bytes, _scratch.Offset + SimpleTextFieldsWriter.END_OFFSET.Length, _scratch.Length - SimpleTextFieldsWriter.END_OFFSET.Length, _scratchUtf162); _endOffset = ArrayUtil.ParseInt32(_scratchUtf162.Chars, 0, _scratchUtf162.Length); diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsWriter.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsWriter.cs index 31b85d931e..9f395262b7 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsWriter.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsWriter.cs @@ -165,8 +165,8 @@ public override void AddPosition(int position, BytesRef payload, int startOffset { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => endOffset >= startOffset); - Debugging.Assert(() => startOffset >= _lastStartOffset, + Debugging.Assert(endOffset >= startOffset); + Debugging.Assert(startOffset >= _lastStartOffset, () => "startOffset=" + startOffset + " lastStartOffset=" + _lastStartOffset); } _lastStartOffset = startOffset; @@ -180,7 +180,7 @@ public override void AddPosition(int position, BytesRef payload, int startOffset if (payload != null && payload.Length > 0) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => payload.Length != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(payload.Length != 0); _outerInstance.Write(PAYLOAD); _outerInstance.Write(payload); _outerInstance.Newline(); diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextLiveDocsFormat.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextLiveDocsFormat.cs index 0c2c553810..bf1aaa54af 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextLiveDocsFormat.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextLiveDocsFormat.cs @@ -67,7 +67,7 @@ public override IMutableBits NewLiveDocs(IBits existing) public override IBits ReadLiveDocs(Directory dir, SegmentCommitInfo info, IOContext context) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => info.HasDeletions); + if (Debugging.AssertsEnabled) Debugging.Assert(info.HasDeletions); var scratch = new BytesRef(); var scratchUtf16 = new CharsRef(); @@ -80,7 +80,7 @@ public override IBits ReadLiveDocs(Directory dir, SegmentCommitInfo info, IOCont input = dir.OpenChecksumInput(fileName, context); SimpleTextUtil.ReadLine(input, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SIZE)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(scratch, SIZE)); var size = ParseInt32At(scratch, SIZE.Length, scratchUtf16); var bits = new BitSet(size); @@ -88,7 +88,7 @@ public override IBits ReadLiveDocs(Directory dir, SegmentCommitInfo info, IOCont SimpleTextUtil.ReadLine(input, scratch); while (!scratch.Equals(END)) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, DOC)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(scratch, DOC)); var docid = ParseInt32At(scratch, DOC.Length, scratchUtf16); bits.Set(docid); SimpleTextUtil.ReadLine(input, scratch); diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextSegmentInfoReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextSegmentInfoReader.cs index 8fdf142f1e..3bcae92ce2 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextSegmentInfoReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextSegmentInfoReader.cs @@ -53,43 +53,43 @@ public override SegmentInfo Read(Directory directory, string segmentName, IOCont try { SimpleTextUtil.ReadLine(input, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_VERSION)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_VERSION)); string version = ReadString(SimpleTextSegmentInfoWriter.SI_VERSION.Length, scratch); SimpleTextUtil.ReadLine(input, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_DOCCOUNT)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_DOCCOUNT)); int docCount = Convert.ToInt32(ReadString(SimpleTextSegmentInfoWriter.SI_DOCCOUNT.Length, scratch), CultureInfo.InvariantCulture); SimpleTextUtil.ReadLine(input, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_USECOMPOUND)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_USECOMPOUND)); bool isCompoundFile = Convert.ToBoolean(ReadString(SimpleTextSegmentInfoWriter.SI_USECOMPOUND.Length, scratch), CultureInfo.InvariantCulture); SimpleTextUtil.ReadLine(input, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_NUM_DIAG)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_NUM_DIAG)); int numDiag = Convert.ToInt32(ReadString(SimpleTextSegmentInfoWriter.SI_NUM_DIAG.Length, scratch), CultureInfo.InvariantCulture); IDictionary diagnostics = new Dictionary(); for (int i = 0; i < numDiag; i++) { SimpleTextUtil.ReadLine(input, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_DIAG_KEY)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_DIAG_KEY)); string key = ReadString(SimpleTextSegmentInfoWriter.SI_DIAG_KEY.Length, scratch); SimpleTextUtil.ReadLine(input, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_DIAG_VALUE)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_DIAG_VALUE)); string value = ReadString(SimpleTextSegmentInfoWriter.SI_DIAG_VALUE.Length, scratch); diagnostics[key] = value; } SimpleTextUtil.ReadLine(input, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_NUM_FILES)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_NUM_FILES)); int numFiles = Convert.ToInt32(ReadString(SimpleTextSegmentInfoWriter.SI_NUM_FILES.Length, scratch), CultureInfo.InvariantCulture); var files = new JCG.HashSet(); for (int i = 0; i < numFiles; i++) { SimpleTextUtil.ReadLine(input, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_FILE)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(scratch, SimpleTextSegmentInfoWriter.SI_FILE)); string fileName = ReadString(SimpleTextSegmentInfoWriter.SI_FILE.Length, scratch); files.Add(fileName); } diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextStoredFieldsReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextStoredFieldsReader.cs index c17061f9f1..d7afaa0ec8 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextStoredFieldsReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextStoredFieldsReader.cs @@ -113,26 +113,26 @@ private void ReadIndex(int size) } } SimpleTextUtil.CheckFooter(input); - if (Debugging.AssertsEnabled) Debugging.Assert(() => upto == _offsets.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(upto == _offsets.Length); } public override void VisitDocument(int n, StoredFieldVisitor visitor) { _input.Seek(_offsets[n]); ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.NUM)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.NUM)); var numFields = ParseInt32At(SimpleTextStoredFieldsWriter.NUM.Length); for (var i = 0; i < numFields; i++) { ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.FIELD)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.FIELD)); int fieldNumber = ParseInt32At(SimpleTextStoredFieldsWriter.FIELD.Length); FieldInfo fieldInfo = _fieldInfos.FieldInfo(fieldNumber); ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.NAME)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.NAME)); ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.TYPE)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.TYPE)); BytesRef type; if (EqualsAt(SimpleTextStoredFieldsWriter.TYPE_STRING, _scratch, SimpleTextStoredFieldsWriter.TYPE.Length)) @@ -171,7 +171,7 @@ public override void VisitDocument(int n, StoredFieldVisitor visitor) break; case StoredFieldVisitor.Status.NO: ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.VALUE)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.VALUE)); break; case StoredFieldVisitor.Status.STOP: return; @@ -182,7 +182,7 @@ public override void VisitDocument(int n, StoredFieldVisitor visitor) private void ReadField(BytesRef type, FieldInfo fieldInfo, StoredFieldVisitor visitor) { ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.VALUE)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextStoredFieldsWriter.VALUE)); if (Equals(type, SimpleTextStoredFieldsWriter.TYPE_STRING)) { visitor.StringField(fieldInfo, diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs index 71f23ab226..729d5b4dd9 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs @@ -109,7 +109,7 @@ private void ReadIndex(int maxDoc) } } SimpleTextUtil.CheckFooter(input); - if (Debugging.AssertsEnabled) Debugging.Assert(() => upto == _offsets.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(upto == _offsets.Length); } public override Fields Get(int doc) @@ -119,7 +119,7 @@ public override Fields Get(int doc) _input.Seek(_offsets[doc]); ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.NUMFIELDS)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.NUMFIELDS)); var numFields = ParseInt32At(SimpleTextTermVectorsWriter.NUMFIELDS.Length); if (numFields == 0) { @@ -128,28 +128,28 @@ public override Fields Get(int doc) for (var i = 0; i < numFields; i++) { ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELD)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELD)); // skip fieldNumber: ParseInt32At(SimpleTextTermVectorsWriter.FIELD.Length); ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDNAME)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDNAME)); var fieldName = ReadString(SimpleTextTermVectorsWriter.FIELDNAME.Length, _scratch); ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDPOSITIONS)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDPOSITIONS)); var positions = Convert.ToBoolean(ReadString(SimpleTextTermVectorsWriter.FIELDPOSITIONS.Length, _scratch), CultureInfo.InvariantCulture); ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDOFFSETS)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDOFFSETS)); var offsets = Convert.ToBoolean(ReadString(SimpleTextTermVectorsWriter.FIELDOFFSETS.Length, _scratch), CultureInfo.InvariantCulture); ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDPAYLOADS)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDPAYLOADS)); var payloads = Convert.ToBoolean(ReadString(SimpleTextTermVectorsWriter.FIELDPAYLOADS.Length, _scratch), CultureInfo.InvariantCulture); ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDTERMCOUNT)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.FIELDTERMCOUNT)); var termCount = ParseInt32At(SimpleTextTermVectorsWriter.FIELDTERMCOUNT.Length); var terms = new SimpleTVTerms(offsets, positions, payloads); @@ -158,7 +158,7 @@ public override Fields Get(int doc) for (var j = 0; j < termCount; j++) { ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.TERMTEXT)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.TERMTEXT)); var term = new BytesRef(); var termLength = _scratch.Length - SimpleTextTermVectorsWriter.TERMTEXT.Length; term.Grow(termLength); @@ -169,7 +169,7 @@ public override Fields Get(int doc) terms.terms.Add(term, postings); ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.TERMFREQ)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.TERMFREQ)); postings.freq = ParseInt32At(SimpleTextTermVectorsWriter.TERMFREQ.Length); if (!positions && !offsets) continue; @@ -194,12 +194,12 @@ public override Fields Get(int doc) if (positions) { ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.POSITION)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.POSITION)); postings.positions[k] = ParseInt32At(SimpleTextTermVectorsWriter.POSITION.Length); if (payloads) { ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.PAYLOAD)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.PAYLOAD)); if (_scratch.Length - SimpleTextTermVectorsWriter.PAYLOAD.Length == 0) { postings.payloads[k] = null; @@ -217,11 +217,11 @@ public override Fields Get(int doc) if (!offsets) continue; ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.STARTOFFSET)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.STARTOFFSET)); postings.startOffsets[k] = ParseInt32At(SimpleTextTermVectorsWriter.STARTOFFSET.Length); ReadLine(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.ENDOFFSET)); + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(_scratch, SimpleTextTermVectorsWriter.ENDOFFSET)); postings.endOffsets[k] = ParseInt32At(SimpleTextTermVectorsWriter.ENDOFFSET.Length); } } @@ -444,7 +444,7 @@ public override int Freq { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => _freqRenamed != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(_freqRenamed != -1); return _freqRenamed; } } @@ -495,7 +495,7 @@ public override int Freq if (_positions != null) return _positions.Length; - if (Debugging.AssertsEnabled) Debugging.Assert(() => _startOffsets != null); + if (Debugging.AssertsEnabled) Debugging.Assert(_startOffsets != null); return _startOffsets.Length; } } @@ -540,7 +540,7 @@ public override BytesRef GetPayload() public override int NextPosition() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => (_positions != null && _nextPos < _positions.Length) || + if (Debugging.AssertsEnabled) Debugging.Assert((_positions != null && _nextPos < _positions.Length) || _startOffsets != null && _nextPos < _startOffsets.Length); if (_positions != null) diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsWriter.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsWriter.cs index b2dd7efb5e..e178c3e790 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsWriter.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsWriter.cs @@ -143,7 +143,7 @@ public override void StartTerm(BytesRef term, int freq) public override void AddPosition(int position, int startOffset, int endOffset, BytesRef payload) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => _positions || _offsets); + if (Debugging.AssertsEnabled) Debugging.Assert(_positions || _offsets); if (_positions) { @@ -156,7 +156,7 @@ public override void AddPosition(int position, int startOffset, int endOffset, B Write(PAYLOAD); if (payload != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => payload.Length > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(payload.Length > 0); Write(payload); } NewLine(); diff --git a/src/Lucene.Net.Expressions/ExpressionComparator.cs b/src/Lucene.Net.Expressions/ExpressionComparator.cs index 4905386464..034020176e 100644 --- a/src/Lucene.Net.Expressions/ExpressionComparator.cs +++ b/src/Lucene.Net.Expressions/ExpressionComparator.cs @@ -49,11 +49,11 @@ public override void SetScorer(Scorer scorer) base.SetScorer(scorer); // TODO: might be cleaner to lazy-init 'source' and set scorer after? - if (Debugging.AssertsEnabled) Debugging.Assert(() => readerContext != null); + if (Debugging.AssertsEnabled) Debugging.Assert(readerContext != null); try { var context = new Dictionary(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => scorer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(scorer != null); context["scorer"] = scorer; scores = source.GetValues(context, readerContext); } diff --git a/src/Lucene.Net.Expressions/ScoreFunctionValues.cs b/src/Lucene.Net.Expressions/ScoreFunctionValues.cs index a42d4ecf85..a28bc3f2bd 100644 --- a/src/Lucene.Net.Expressions/ScoreFunctionValues.cs +++ b/src/Lucene.Net.Expressions/ScoreFunctionValues.cs @@ -43,7 +43,7 @@ public override double DoubleVal(int document) { try { - if (Debugging.AssertsEnabled) Debugging.Assert(() => document == scorer.DocID); + if (Debugging.AssertsEnabled) Debugging.Assert(document == scorer.DocID); return scorer.GetScore(); } catch (IOException exception) diff --git a/src/Lucene.Net.Facet/DrillDownQuery.cs b/src/Lucene.Net.Facet/DrillDownQuery.cs index 7d26a0e8d6..40c050d807 100644 --- a/src/Lucene.Net.Facet/DrillDownQuery.cs +++ b/src/Lucene.Net.Facet/DrillDownQuery.cs @@ -86,7 +86,7 @@ internal DrillDownQuery(FacetsConfig config, Filter filter, DrillDownQuery other { throw new ArgumentException("cannot apply filter unless baseQuery isn't null; pass ConstantScoreQuery instead"); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => clauses.Length == 1 + other.drillDownDims.Count, () => clauses.Length + " vs " + (1 + other.drillDownDims.Count)); + if (Debugging.AssertsEnabled) Debugging.Assert(clauses.Length == 1 + other.drillDownDims.Count, () => clauses.Length + " vs " + (1 + other.drillDownDims.Count)); drillDownDims.PutAll(other.drillDownDims); query.Add(new FilteredQuery(clauses[0].Query, filter), Occur.MUST); for (int i = 1; i < clauses.Length; i++) diff --git a/src/Lucene.Net.Facet/DrillSideways.cs b/src/Lucene.Net.Facet/DrillSideways.cs index b144bb2d95..80d0509e62 100644 --- a/src/Lucene.Net.Facet/DrillSideways.cs +++ b/src/Lucene.Net.Facet/DrillSideways.cs @@ -175,7 +175,7 @@ public virtual DrillSidewaysResult Search(DrillDownQuery query, ICollector hitCo } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => clauses.Length == 1 + drillDownDims.Count); + if (Debugging.AssertsEnabled) Debugging.Assert(clauses.Length == 1 + drillDownDims.Count); baseQuery = clauses[0].Query; startClause = 1; } diff --git a/src/Lucene.Net.Facet/DrillSidewaysScorer.cs b/src/Lucene.Net.Facet/DrillSidewaysScorer.cs index ccbc0c2f49..afd9bc16b7 100644 --- a/src/Lucene.Net.Facet/DrillSidewaysScorer.cs +++ b/src/Lucene.Net.Facet/DrillSidewaysScorer.cs @@ -89,7 +89,7 @@ public override bool Score(ICollector collector, int maxDoc) // TODO: if we ever allow null baseScorer ... it will // mean we DO score docs out of order ... hmm, or if we // change up the order of the conjuntions below - if (Debugging.AssertsEnabled) Debugging.Assert(() => baseScorer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(baseScorer != null); // some scorers, eg ReqExlScorer, can hit NPE if cost is called after nextDoc long baseQueryCost = baseScorer.GetCost(); @@ -395,7 +395,7 @@ private void DoDrillDownAdvanceScoring(ICollector collector, DocIdSetIterator[] while (slot0 < CHUNK && (slot0 = seen.NextSetBit(slot0)) != -1) { int ddDocID = docIDs[slot0]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => ddDocID != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(ddDocID != -1); int baseDocID = baseScorer.DocID; if (baseDocID < ddDocID) @@ -550,7 +550,7 @@ private void DoUnionScoring(ICollector collector, DocIdSetIterator[] disis, ICol //} // Mark slot as valid: - if (Debugging.AssertsEnabled) Debugging.Assert(() => docIDs[slot] != docID, () => "slot=" + slot + " docID=" + docID); + if (Debugging.AssertsEnabled) Debugging.Assert(docIDs[slot] != docID, () => "slot=" + slot + " docID=" + docID); docIDs[slot] = docID; scores[slot] = baseScorer.GetScore(); filledSlots[filledCount++] = slot; diff --git a/src/Lucene.Net.Facet/FacetsConfig.cs b/src/Lucene.Net.Facet/FacetsConfig.cs index b0f0f0e293..51830651b4 100644 --- a/src/Lucene.Net.Facet/FacetsConfig.cs +++ b/src/Lucene.Net.Facet/FacetsConfig.cs @@ -694,7 +694,7 @@ public static string[] StringToPath(string s) } } parts.Add(new string(buffer, 0, upto)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => !lastEscape); + if (Debugging.AssertsEnabled) Debugging.Assert(!lastEscape); return parts.ToArray(); } } diff --git a/src/Lucene.Net.Facet/Range/LongRangeCounter.cs b/src/Lucene.Net.Facet/Range/LongRangeCounter.cs index 0641fc6db2..c8b8998ef0 100644 --- a/src/Lucene.Net.Facet/Range/LongRangeCounter.cs +++ b/src/Lucene.Net.Facet/Range/LongRangeCounter.cs @@ -120,7 +120,7 @@ public Int64RangeCounter(Int64Range[] ranges) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => flags == 2); + if (Debugging.AssertsEnabled) Debugging.Assert(flags == 2); // This point is only the end of an interval; attach // it to last interval: elementaryIntervals.Add(new InclusiveRange(prev, v)); @@ -275,7 +275,7 @@ private sealed class InclusiveRange public InclusiveRange(long start, long end) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => end >= start); + if (Debugging.AssertsEnabled) Debugging.Assert(end >= start); this.Start = start; this.End = end; } @@ -349,7 +349,7 @@ internal void AddOutputs(int index, Int64Range range) } else if (left != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => right != null); + if (Debugging.AssertsEnabled) Debugging.Assert(right != null); // Recurse: left.AddOutputs(index, range); right.AddOutputs(index, range); @@ -361,7 +361,7 @@ internal void ToString(StringBuilder sb, int depth) Indent(sb, depth); if (left == null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => right == null); + if (Debugging.AssertsEnabled) Debugging.Assert(right == null); sb.Append("leaf: " + start + " to " + end); } else @@ -377,7 +377,7 @@ internal void ToString(StringBuilder sb, int depth) if (left != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => right != null); + if (Debugging.AssertsEnabled) Debugging.Assert(right != null); left.ToString(sb, depth + 1); right.ToString(sb, depth + 1); } diff --git a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs index 92f5d57e41..7f3f6b2643 100644 --- a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs +++ b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs @@ -65,7 +65,7 @@ private CategoryPath(CategoryPath copyFrom, int prefixLen) // while the code which calls this method is safe, at some point a test // tripped on AIOOBE in toString, but we failed to reproduce. adding the // assert as a safety check. - if (Debugging.AssertsEnabled) Debugging.Assert(() => prefixLen > 0 && prefixLen <= copyFrom.Components.Length, () => "prefixLen cannot be negative nor larger than the given components' length: prefixLen=" + prefixLen + " components.length=" + copyFrom.Components.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(prefixLen > 0 && prefixLen <= copyFrom.Components.Length, () => "prefixLen cannot be negative nor larger than the given components' length: prefixLen=" + prefixLen + " components.length=" + copyFrom.Components.Length); this.Components = copyFrom.Components; Length = prefixLen; } @@ -75,7 +75,7 @@ private CategoryPath(CategoryPath copyFrom, int prefixLen) /// public CategoryPath(params string[] components) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => components.Length > 0, () => "use CategoryPath.EMPTY to create an empty path"); + if (Debugging.AssertsEnabled) Debugging.Assert(components.Length > 0, () => "use CategoryPath.EMPTY to create an empty path"); foreach (string comp in components) { if (string.IsNullOrEmpty(comp)) diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs index 52df74c584..79da91fe3c 100644 --- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs +++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs @@ -190,7 +190,7 @@ public DirectoryTaxonomyWriter(Directory directory, OpenMode openMode, // verify (to some extent) that merge policy in effect would preserve category docids if (indexWriter != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !(indexWriter.Config.MergePolicy is TieredMergePolicy), () => "for preserving category docids, merging none-adjacent segments is not allowed"); + if (Debugging.AssertsEnabled) Debugging.Assert(!(indexWriter.Config.MergePolicy is TieredMergePolicy), () => "for preserving category docids, merging none-adjacent segments is not allowed"); } // after we opened the writer, and the index is locked, it's safe to check @@ -826,7 +826,7 @@ public virtual void SetCacheMissesUntilFill(int i) FacetLabel cp = new FacetLabel(FacetsConfig.StringToPath(t.Utf8ToString())); docsEnum = termsEnum.Docs(null, docsEnum, DocsFlags.NONE); bool res = cache.Put(cp, docsEnum.NextDoc() + ctx.DocBase); - if (Debugging.AssertsEnabled) Debugging.Assert(() => !res, () => "entries should not have been evicted from the cache"); + if (Debugging.AssertsEnabled) Debugging.Assert(!res, () => "entries should not have been evicted from the cache"); } else { @@ -907,7 +907,7 @@ public virtual int GetParent(int ordinal) } int[] parents = GetTaxoArrays().Parents; - if (Debugging.AssertsEnabled) Debugging.Assert(() => ordinal < parents.Length, () => "requested ordinal (" + ordinal + "); parents.length (" + parents.Length + ") !"); + if (Debugging.AssertsEnabled) Debugging.Assert(ordinal < parents.Length, () => "requested ordinal (" + ordinal + "); parents.length (" + parents.Length + ") !"); return parents[ordinal]; } diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs index 52e71c9389..dd5030ebba 100644 --- a/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs +++ b/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs @@ -77,7 +77,7 @@ public TaxonomyIndexArrays(IndexReader reader) public TaxonomyIndexArrays(IndexReader reader, TaxonomyIndexArrays copyFrom) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => copyFrom != null); + if (Debugging.AssertsEnabled) Debugging.Assert(copyFrom != null); // note that copyParents.length may be equal to reader.maxDoc(). this is not a bug // it may be caused if e.g. the taxonomy segments were merged, and so an updated diff --git a/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs b/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs index c9bc017fec..562d80ed7c 100644 --- a/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs +++ b/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs @@ -68,7 +68,7 @@ private FacetLabel(FacetLabel copyFrom, int prefixLen) // while the code which calls this method is safe, at some point a test // tripped on AIOOBE in toString, but we failed to reproduce. adding the // assert as a safety check. - if (Debugging.AssertsEnabled) Debugging.Assert(() => prefixLen >= 0 && prefixLen <= copyFrom.Components.Length, () => "prefixLen cannot be negative nor larger than the given components' length: prefixLen=" + prefixLen + " components.length=" + copyFrom.Components.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(prefixLen >= 0 && prefixLen <= copyFrom.Components.Length, () => "prefixLen cannot be negative nor larger than the given components' length: prefixLen=" + prefixLen + " components.length=" + copyFrom.Components.Length); this.Components = copyFrom.Components; Length = prefixLen; } diff --git a/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs index b71ed1fa44..15e8233c4f 100644 --- a/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs +++ b/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs @@ -58,7 +58,7 @@ protected virtual void Rollup() if (ft.IsHierarchical && ft.IsMultiValued == false) { int dimRootOrd = m_taxoReader.GetOrdinal(new FacetLabel(dim)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => dimRootOrd > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(dimRootOrd > 0); m_values[dimRootOrd] += Rollup(m_children[dimRootOrd]); } } diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs index fa4cec5d35..d3aee8ff2f 100644 --- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs +++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs @@ -143,7 +143,7 @@ protected TaxonomyReader() // LUCENENET specific - marked protected instead of p public static T OpenIfChanged(T oldTaxoReader) where T : TaxonomyReader { T newTaxoReader = (T)oldTaxoReader.DoOpenIfChanged(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => newTaxoReader != oldTaxoReader); + if (Debugging.AssertsEnabled) Debugging.Assert(newTaxoReader != oldTaxoReader); return newTaxoReader; } diff --git a/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs b/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs index 2e39fefda1..a7bd3957bd 100644 --- a/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs +++ b/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs @@ -248,7 +248,7 @@ public virtual void Collect(int doc) bottomGroup = m_orderedGroups.Last(); m_orderedGroups.Remove(bottomGroup); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => m_orderedGroups.Count == topNGroups - 1); + if (Debugging.AssertsEnabled) Debugging.Assert(m_orderedGroups.Count == topNGroups - 1); groupMap.Remove(bottomGroup.GroupValue); @@ -263,7 +263,7 @@ public virtual void Collect(int doc) groupMap[bottomGroup.GroupValue] = bottomGroup; m_orderedGroups.Add(bottomGroup); - if (Debugging.AssertsEnabled) Debugging.Assert(() => m_orderedGroups.Count == topNGroups); + if (Debugging.AssertsEnabled) Debugging.Assert(m_orderedGroups.Count == topNGroups); int lastComparerSlot = m_orderedGroups.Last().ComparerSlot; foreach (FieldComparer fc in comparers) @@ -315,7 +315,7 @@ public virtual void Collect(int doc) prevLast = m_orderedGroups.Last(); m_orderedGroups.Remove(group); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => m_orderedGroups.Count == topNGroups - 1); + if (Debugging.AssertsEnabled) Debugging.Assert(m_orderedGroups.Count == topNGroups - 1); } else { @@ -333,7 +333,7 @@ public virtual void Collect(int doc) if (m_orderedGroups != null) { m_orderedGroups.Add(group); - if (Debugging.AssertsEnabled) Debugging.Assert(() => m_orderedGroups.Count == topNGroups); + if (Debugging.AssertsEnabled) Debugging.Assert(m_orderedGroups.Count == topNGroups); var newLast = m_orderedGroups.Last(); // If we changed the value of the last group, or changed which group was last, then update bottom: if (group == newLast || prevLast != newLast) @@ -376,7 +376,7 @@ private void BuildSortedSet() var comparer = new BuildSortedSetComparer(this); m_orderedGroups = new JCG.SortedSet>(comparer); m_orderedGroups.UnionWith(groupMap.Values); - if (Debugging.AssertsEnabled) Debugging.Assert(() => m_orderedGroups.Count > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(m_orderedGroups.Count > 0); foreach (FieldComparer fc in comparers) { diff --git a/src/Lucene.Net.Grouping/BlockGroupingCollector.cs b/src/Lucene.Net.Grouping/BlockGroupingCollector.cs index 293475449d..3d73f2d9c2 100644 --- a/src/Lucene.Net.Grouping/BlockGroupingCollector.cs +++ b/src/Lucene.Net.Grouping/BlockGroupingCollector.cs @@ -155,8 +155,8 @@ protected internal override bool LessThan(OneGroup group1, OneGroup group2) //System.out.println(" ltcheck"); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => group1 != group2); - Debugging.Assert(() => group1.comparerSlot != group2.comparerSlot); + Debugging.Assert(group1 != group2); + Debugging.Assert(group1.comparerSlot != group2.comparerSlot); } int numComparers = outerInstance.comparers.Length; @@ -224,7 +224,7 @@ private void ProcessGroup() { // Replace bottom element in PQ and then updateTop OneGroup og = groupQueue.Top; - if (Debugging.AssertsEnabled) Debugging.Assert(() => og != null); + if (Debugging.AssertsEnabled) Debugging.Assert(og != null); og.count = subDocUpto; og.topGroupDoc = docBase + topGroupDoc; // Swap pending docs @@ -524,7 +524,7 @@ public virtual void Collect(int doc) { if (subDocUpto == 1) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !queueFull); + if (Debugging.AssertsEnabled) Debugging.Assert(!queueFull); //System.out.println(" init copy to bottomSlot=" + bottomSlot); foreach (FieldComparer fc in comparers) diff --git a/src/Lucene.Net.Grouping/SearchGroup.cs b/src/Lucene.Net.Grouping/SearchGroup.cs index 32f47568ac..680b2570b1 100644 --- a/src/Lucene.Net.Grouping/SearchGroup.cs +++ b/src/Lucene.Net.Grouping/SearchGroup.cs @@ -186,12 +186,12 @@ private bool NeverEquals(object other) { if (groupValue == null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => otherMergedGroup.groupValue != null); + if (Debugging.AssertsEnabled) Debugging.Assert(otherMergedGroup.groupValue != null); } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !groupValueIsValueType + if (Debugging.AssertsEnabled) Debugging.Assert(!groupValueIsValueType ? JCG.EqualityComparer.Default.Equals(groupValue, otherMergedGroup.groupValue) // LUCENENET specific - use J2N.Collections.StructuralEqualityComparer.Default.Equals() if we have a reference type @@ -206,7 +206,7 @@ public override bool Equals(object other) { // We never have another MergedGroup instance with // same groupValue - if (Debugging.AssertsEnabled) Debugging.Assert(() => NeverEquals(other)); + if (Debugging.AssertsEnabled) Debugging.Assert(NeverEquals(other)); if (other is MergedGroup otherMergedGroup) { @@ -294,7 +294,7 @@ public virtual int Compare(MergedGroup group, MergedGroup other) } // Tie break by min shard index: - if (Debugging.AssertsEnabled) Debugging.Assert(() => group.MinShardIndex != other.MinShardIndex); + if (Debugging.AssertsEnabled) Debugging.Assert(group.MinShardIndex != other.MinShardIndex); return group.MinShardIndex - other.MinShardIndex; } } @@ -327,7 +327,7 @@ private void UpdateNextGroup(int topN, ShardIter shard) //System.out.println(" new"); mergedGroup = new MergedGroup(group.GroupValue); mergedGroup.MinShardIndex = shard.ShardIndex; - if (Debugging.AssertsEnabled) Debugging.Assert(() => group.SortValues != null); + if (Debugging.AssertsEnabled) Debugging.Assert(group.SortValues != null); mergedGroup.TopValues = group.SortValues; groupsSeen[group.GroupValue] = mergedGroup; mergedGroup.IsInQueue = true; diff --git a/src/Lucene.Net.Grouping/Term/TermGroupFacetCollector.cs b/src/Lucene.Net.Grouping/Term/TermGroupFacetCollector.cs index 2d65424f42..45ddcbc9e0 100644 --- a/src/Lucene.Net.Grouping/Term/TermGroupFacetCollector.cs +++ b/src/Lucene.Net.Grouping/Term/TermGroupFacetCollector.cs @@ -175,7 +175,7 @@ public override void SetNextReader(AtomicReaderContext context) BytesRef facetEndPrefix = BytesRef.DeepCopyOf(m_facetPrefix); facetEndPrefix.Append(UnicodeUtil.BIG_TERM); m_endFacetOrd = facetFieldTermsIndex.LookupTerm(facetEndPrefix); - if (Debugging.AssertsEnabled) Debugging.Assert(() => m_endFacetOrd < 0); + if (Debugging.AssertsEnabled) Debugging.Assert(m_endFacetOrd < 0); m_endFacetOrd = -m_endFacetOrd - 1; // Points to the ord one higher than facetEndPrefix } else @@ -203,7 +203,7 @@ internal SegmentResult(int[] counts, int total, TermsEnum tenum, int startFacetO this.m_mergePos = startFacetOrd == -1 ? 1 : startFacetOrd + 1; if (m_mergePos < m_maxTermPos) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => tenum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(tenum != null); tenum.SeekExact(startFacetOrd == -1 ? 0 : startFacetOrd); m_mergeTerm = tenum.Term; } diff --git a/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs b/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs index ffc4b8e9f6..a26ba1e846 100644 --- a/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs +++ b/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs @@ -292,7 +292,7 @@ public override int StartOffset { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentStartOffset >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(currentStartOffset >= 0); return currentStartOffset; } } @@ -301,7 +301,7 @@ public override int EndOffset { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentEndOffset >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(currentEndOffset >= 0); return currentEndOffset; } } diff --git a/src/Lucene.Net.Highlighter/PostingsHighlight/Passage.cs b/src/Lucene.Net.Highlighter/PostingsHighlight/Passage.cs index 22805b2553..fa0f236809 100644 --- a/src/Lucene.Net.Highlighter/PostingsHighlight/Passage.cs +++ b/src/Lucene.Net.Highlighter/PostingsHighlight/Passage.cs @@ -43,7 +43,7 @@ public sealed class Passage internal void AddMatch(int startOffset, int endOffset, BytesRef term) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => startOffset >= this.startOffset && startOffset <= this.endOffset); + if (Debugging.AssertsEnabled) Debugging.Assert(startOffset >= this.startOffset && startOffset <= this.endOffset); if (numMatches == matchStarts.Length) { int newLength = ArrayUtil.Oversize(numMatches + 1, RamUsageEstimator.NUM_BYTES_OBJECT_REF); @@ -57,7 +57,7 @@ internal void AddMatch(int startOffset, int endOffset, BytesRef term) matchEnds = newMatchEnds; matchTerms = newMatchTerms; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => matchStarts.Length == matchEnds.Length && matchEnds.Length == matchTerms.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(matchStarts.Length == matchEnds.Length && matchEnds.Length == matchTerms.Length); matchStarts[numMatches] = startOffset; matchEnds[numMatches] = endOffset; matchTerms[numMatches] = term; diff --git a/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs b/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs index b5b5a8abc5..09786eb01b 100644 --- a/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs +++ b/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs @@ -542,7 +542,7 @@ private IDictionary HighlightField(string field, string[] contents, AtomicReaderContext subContext = leaves[leaf]; AtomicReader r = subContext.AtomicReader; - if (Debugging.AssertsEnabled) Debugging.Assert(() => leaf >= lastLeaf); // increasing order + if (Debugging.AssertsEnabled) Debugging.Assert(leaf >= lastLeaf); // increasing order // if the segment has changed, we must initialize new enums. if (leaf != lastLeaf) @@ -671,7 +671,7 @@ private Passage[] HighlightDoc(string field, BytesRef[] terms, int contentLength // LUCENE-5166: this hit would span the content limit... however more valid // hits may exist (they are sorted by start). so we pretend like we never // saw this term, it won't cause a passage to be added to passageQueue or anything. - if (Debugging.AssertsEnabled) Debugging.Assert(() => EMPTY.StartOffset == int.MaxValue); + if (Debugging.AssertsEnabled) Debugging.Assert(EMPTY.StartOffset == int.MaxValue); if (start < contentLength && end > contentLength) { continue; @@ -714,7 +714,7 @@ private Passage[] HighlightDoc(string field, BytesRef[] terms, int contentLength return passages; } // advance breakiterator - if (Debugging.AssertsEnabled) Debugging.Assert(() => BreakIterator.Done < 0); + if (Debugging.AssertsEnabled) Debugging.Assert(BreakIterator.Done < 0); current.startOffset = Math.Max(bi.Preceding(start + 1), 0); current.endOffset = Math.Min(bi.Next(), contentLength); } @@ -727,7 +727,7 @@ private Passage[] HighlightDoc(string field, BytesRef[] terms, int contentLength { // multitermquery match, pull from payload term = off.dp.GetPayload(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => term != null); + if (Debugging.AssertsEnabled) Debugging.Assert(term != null); } current.AddMatch(start, end, term); if (off.pos == dp.Freq) @@ -751,7 +751,7 @@ private Passage[] HighlightDoc(string field, BytesRef[] terms, int contentLength } // Dead code but compiler disagrees: - if (Debugging.AssertsEnabled) Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(false); return null; } @@ -766,7 +766,7 @@ protected virtual Passage[] GetEmptyHighlight(string fieldName, BreakIterator bi // BreakIterator should be un-next'd: List passages = new List(); int pos = bi.Current; - if (Debugging.AssertsEnabled) Debugging.Assert(() => pos == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(pos == 0); while (passages.Count < maxPassages) { int next = bi.Next(); @@ -883,7 +883,7 @@ private class LimitedStoredFieldVisitor : StoredFieldVisitor public LimitedStoredFieldVisitor(string[] fields, char[] valueSeparators, int maxLength) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => fields.Length == valueSeparators.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(fields.Length == valueSeparators.Length); this.fields = fields; this.valueSeparators = valueSeparators; this.maxLength = maxLength; @@ -896,7 +896,7 @@ public LimitedStoredFieldVisitor(string[] fields, char[] valueSeparators, int ma public override void StringField(Index.FieldInfo fieldInfo, string value) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentField >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(currentField >= 0); StringBuilder builder = builders[currentField]; if (builder.Length > 0 && builder.Length < maxLength) { diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragListBuilder.cs b/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragListBuilder.cs index 7f656b8f1d..1c328c2156 100644 --- a/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragListBuilder.cs +++ b/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragListBuilder.cs @@ -142,7 +142,7 @@ public IteratorQueue(IEnumerator iter) { this.iter = iter; T removeTop = RemoveTop(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => removeTop == null); + if (Debugging.AssertsEnabled) Debugging.Assert(removeTop == null); } public T Top() diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/FieldTermStack.cs b/src/Lucene.Net.Highlighter/VectorHighlight/FieldTermStack.cs index 98185988fc..fda94467b2 100644 --- a/src/Lucene.Net.Highlighter/VectorHighlight/FieldTermStack.cs +++ b/src/Lucene.Net.Highlighter/VectorHighlight/FieldTermStack.cs @@ -140,7 +140,7 @@ public FieldTermStack(IndexReader reader, int docId, string fieldName, FieldQuer TermInfo current = termList[i]; if (current.Position == currentPos) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => previous != null); + if (Debugging.AssertsEnabled) Debugging.Assert(previous != null); previous.SetNext(current); previous = current; //iterator.Remove(); diff --git a/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs b/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs index 09d424a804..f3e0297838 100644 --- a/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs +++ b/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs @@ -246,7 +246,7 @@ public override int NextDoc() } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => _childDoc < _parentDoc, () => "childDoc=" + _childDoc + " parentDoc=" + _parentDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(_childDoc < _parentDoc, () => "childDoc=" + _childDoc + " parentDoc=" + _parentDoc); _childDoc++; if (_acceptDocs != null && !_acceptDocs.Get(_childDoc)) { @@ -280,7 +280,7 @@ public override float GetScore() public override int Advance(int childTarget) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => childTarget >= _parentBits.Length || !_parentBits.Get(childTarget)); + if (Debugging.AssertsEnabled) Debugging.Assert(childTarget >= _parentBits.Length || !_parentBits.Get(childTarget)); //System.out.println("Q.advance childTarget=" + childTarget); if (childTarget == NO_MORE_DOCS) @@ -289,14 +289,14 @@ public override int Advance(int childTarget) return _childDoc = _parentDoc = NO_MORE_DOCS; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => _childDoc == -1 || childTarget != _parentDoc, () => "childTarget=" + childTarget); + if (Debugging.AssertsEnabled) Debugging.Assert(_childDoc == -1 || childTarget != _parentDoc, () => "childTarget=" + childTarget); if (_childDoc == -1 || childTarget > _parentDoc) { // Advance to new parent: _parentDoc = _parentScorer.Advance(childTarget); ValidateParentDoc(); //System.out.println(" advance to parentDoc=" + parentDoc); - if (Debugging.AssertsEnabled) Debugging.Assert(() => _parentDoc > childTarget); + if (Debugging.AssertsEnabled) Debugging.Assert(_parentDoc > childTarget); if (_parentDoc == NO_MORE_DOCS) { //System.out.println(" END"); @@ -312,7 +312,7 @@ public override int Advance(int childTarget) childTarget = Math.Max(childTarget, firstChild); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => childTarget < _parentDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(childTarget < _parentDoc); // Advance within children of current parent: _childDoc = childTarget; diff --git a/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs b/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs index 71deac5f2f..61550ca69b 100644 --- a/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs +++ b/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs @@ -284,7 +284,7 @@ private void CopyGroups(OneGroup og) og.counts[scorerIDX] = joinScorer.ChildCount; //System.out.println(" count=" + og.counts[scorerIDX]); og.docs[scorerIDX] = joinScorer.SwapChildDocs(og.docs[scorerIDX]); - if (Debugging.AssertsEnabled) Debugging.Assert(() => og.docs[scorerIDX].Length >= og.counts[scorerIDX], () => "length=" + og.docs[scorerIDX].Length + " vs count=" + og.counts[scorerIDX]); + if (Debugging.AssertsEnabled) Debugging.Assert(og.docs[scorerIDX].Length >= og.counts[scorerIDX], () => "length=" + og.docs[scorerIDX].Length + " vs count=" + og.counts[scorerIDX]); //System.out.println(" len=" + og.docs[scorerIDX].length); /* for(int idx=0;idx og.scores[scorerIDX].Length >= og.counts[scorerIDX], () => "length=" + og.scores[scorerIDX].Length + " vs count=" + og.counts[scorerIDX]); + if (Debugging.AssertsEnabled) Debugging.Assert(og.scores[scorerIDX].Length >= og.counts[scorerIDX], () => "length=" + og.scores[scorerIDX].Length + " vs count=" + og.counts[scorerIDX]); } } else diff --git a/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs b/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs index 84f41fdae4..ae816ec7cd 100644 --- a/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs +++ b/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs @@ -284,7 +284,7 @@ public override int NextDoc() } //System.out.println(" parentDoc=" + parentDoc); - if (Debugging.AssertsEnabled) Debugging.Assert(() => _parentDoc != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(_parentDoc != -1); //System.out.println(" nextChildDoc=" + nextChildDoc); if (_acceptDocs != null && !_acceptDocs.Get(_parentDoc)) @@ -402,7 +402,7 @@ public override int Advance(int parentTarget) _prevParentDoc = _parentBits.PrevSetBit(parentTarget - 1); //System.out.println(" rolled back to prevParentDoc=" + prevParentDoc + " vs parentDoc=" + parentDoc); - if (Debugging.AssertsEnabled) Debugging.Assert(() => _prevParentDoc >= _parentDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(_prevParentDoc >= _parentDoc); if (_prevParentDoc > _nextChildDoc) { _nextChildDoc = _childScorer.Advance(_prevParentDoc); diff --git a/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs b/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs index c6ef1a5b03..6f6d3fc3f7 100644 --- a/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs +++ b/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs @@ -250,7 +250,7 @@ internal int BinarySearch(BytesRef b, BytesRef bytesRef, int low, int high, Byte return mid; } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => comparer.Compare(bytesRef, b) != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(comparer.Compare(bytesRef, b) != 0); return -(low + 1); } @@ -285,7 +285,7 @@ public override SeekStatus SeekCeil(BytesRef text) public override void SeekExact(long ord) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ord < info.terms.Count); + if (Debugging.AssertsEnabled) Debugging.Assert(ord < info.terms.Count); termUpto = (int)ord; } @@ -332,7 +332,7 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPos public override void SeekExact(BytesRef term, TermState state) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != null); + if (Debugging.AssertsEnabled) Debugging.Assert(state != null); this.SeekExact(((OrdTermState)state).Ord); } @@ -452,8 +452,8 @@ public override int NextPosition() { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => posUpto++ < freq); - Debugging.Assert(() => !sliceReader.IsEndOfSlice, () => " stores offsets : " + startOffset); + Debugging.Assert(posUpto++ < freq); + Debugging.Assert(!sliceReader.IsEndOfSlice, () => " stores offsets : " + startOffset); } if (outerInstance.outerInstance.storeOffsets) { diff --git a/src/Lucene.Net.Memory/MemoryIndex.cs b/src/Lucene.Net.Memory/MemoryIndex.cs index 18df8c9215..19ea43eba4 100644 --- a/src/Lucene.Net.Memory/MemoryIndex.cs +++ b/src/Lucene.Net.Memory/MemoryIndex.cs @@ -217,7 +217,7 @@ internal MemoryIndex(bool storeOffsets, long maxReusedBytes) this.bytesUsed = Counter.NewCounter(); int maxBufferedByteBlocks = (int)((maxReusedBytes / 2) / ByteBlockPool.BYTE_BLOCK_SIZE); int maxBufferedIntBlocks = (int)((maxReusedBytes - (maxBufferedByteBlocks * ByteBlockPool.BYTE_BLOCK_SIZE)) / (Int32BlockPool.INT32_BLOCK_SIZE * RamUsageEstimator.NUM_BYTES_INT32)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => (maxBufferedByteBlocks * ByteBlockPool.BYTE_BLOCK_SIZE) + (maxBufferedIntBlocks * Int32BlockPool.INT32_BLOCK_SIZE * RamUsageEstimator.NUM_BYTES_INT32) <= maxReusedBytes); + if (Debugging.AssertsEnabled) Debugging.Assert((maxBufferedByteBlocks * ByteBlockPool.BYTE_BLOCK_SIZE) + (maxBufferedIntBlocks * Int32BlockPool.INT32_BLOCK_SIZE * RamUsageEstimator.NUM_BYTES_INT32) <= maxReusedBytes); byteBlockPool = new ByteBlockPool(new RecyclingByteBlockAllocator(ByteBlockPool.BYTE_BLOCK_SIZE, maxBufferedByteBlocks, bytesUsed)); intBlockPool = new Int32BlockPool(new RecyclingInt32BlockAllocator(Int32BlockPool.INT32_BLOCK_SIZE, maxBufferedIntBlocks, bytesUsed)); postingsWriter = new Int32BlockPool.SliceWriter(intBlockPool); @@ -741,9 +741,9 @@ public override int[] Init() freq = new int[ArrayUtil.Oversize(ord.Length, RamUsageEstimator.NUM_BYTES_INT32)]; if (Debugging.AssertsEnabled) { - Debugging.Assert(() => start.Length >= ord.Length); - Debugging.Assert(() => end.Length >= ord.Length); - Debugging.Assert(() => freq.Length >= ord.Length); + Debugging.Assert(start.Length >= ord.Length); + Debugging.Assert(end.Length >= ord.Length); + Debugging.Assert(freq.Length >= ord.Length); } return ord; } @@ -759,9 +759,9 @@ public override int[] Grow() } if (Debugging.AssertsEnabled) { - Debugging.Assert(() => start.Length >= ord.Length); - Debugging.Assert(() => end.Length >= ord.Length); - Debugging.Assert(() => freq.Length >= ord.Length); + Debugging.Assert(start.Length >= ord.Length); + Debugging.Assert(end.Length >= ord.Length); + Debugging.Assert(freq.Length >= ord.Length); } return ord; } diff --git a/src/Lucene.Net.Misc/Document/LazyDocument.cs b/src/Lucene.Net.Misc/Document/LazyDocument.cs index bc2aa77852..3e6e7515c7 100644 --- a/src/Lucene.Net.Misc/Document/LazyDocument.cs +++ b/src/Lucene.Net.Misc/Document/LazyDocument.cs @@ -120,7 +120,7 @@ private void FetchRealValues(string name, int fieldNum) fields.TryGetValue(fieldNum, out lazyValues); IIndexableField[] realValues = d.GetFields(name); - if (Debugging.AssertsEnabled) Debugging.Assert(() => realValues.Length <= lazyValues.Count, + if (Debugging.AssertsEnabled) Debugging.Assert(realValues.Length <= lazyValues.Count, () => "More lazy values then real values for field: " + name); for (int i = 0; i < lazyValues.Count; i++) @@ -166,8 +166,8 @@ internal virtual IIndexableField GetRealValue() } if (Debugging.AssertsEnabled) { - Debugging.Assert(() => HasBeenLoaded, () => "field value was not lazy loaded"); - Debugging.Assert(() => realValue.Name.Equals(Name, StringComparison.Ordinal), () => "realvalue name != name: " + realValue.Name + " != " + Name); + Debugging.Assert(HasBeenLoaded, () => "field value was not lazy loaded"); + Debugging.Assert(realValue.Name.Equals(Name, StringComparison.Ordinal), () => "realvalue name != name: " + realValue.Name + " != " + Name); } return realValue; diff --git a/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs b/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs index 17695f7f54..29c6f005dd 100644 --- a/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs +++ b/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs @@ -297,7 +297,7 @@ public void UndeleteAll() if (m_input.HasDeletions) { IBits oldLiveDocs = m_input.LiveDocs; - if (Debugging.AssertsEnabled) Debugging.Assert(() => oldLiveDocs != null); + if (Debugging.AssertsEnabled) Debugging.Assert(oldLiveDocs != null); // this loop is a little bit ineffective, as Bits has no nextSetBit(): for (int i = 0; i < maxDoc; i++) { diff --git a/src/Lucene.Net.Misc/Index/PKIndexSplitter.cs b/src/Lucene.Net.Misc/Index/PKIndexSplitter.cs index 8f96af9521..b90a7133ac 100644 --- a/src/Lucene.Net.Misc/Index/PKIndexSplitter.cs +++ b/src/Lucene.Net.Misc/Index/PKIndexSplitter.cs @@ -156,7 +156,7 @@ public DocumentFilteredAtomicIndexReader(AtomicReaderContext context, Filter pre if (m_input.HasDeletions) { IBits oldLiveDocs = m_input.LiveDocs; - if (Debugging.AssertsEnabled) Debugging.Assert(() => oldLiveDocs != null); + if (Debugging.AssertsEnabled) Debugging.Assert(oldLiveDocs != null); DocIdSetIterator it = bits.GetIterator(); for (int i = it.NextDoc(); i < maxDoc; i = it.NextDoc()) { diff --git a/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs b/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs index cfff119466..4aacfc42b8 100644 --- a/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs +++ b/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs @@ -85,8 +85,8 @@ internal static bool IsConsistent(DocMap docMap) int oldID = docMap.NewToOld(newID); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => newID >= 0 && newID < maxDoc, () => "doc IDs must be in [0-" + maxDoc + "[, got " + newID); - Debugging.Assert(() => i == oldID, () => "mapping is inconsistent: " + i + " --oldToNew--> " + newID + " --newToOld--> " + oldID); + Debugging.Assert(newID >= 0 && newID < maxDoc, () => "doc IDs must be in [0-" + maxDoc + "[, got " + newID); + Debugging.Assert(i == oldID, () => "mapping is inconsistent: " + i + " --oldToNew--> " + newID + " --newToOld--> " + oldID); } if (i != oldID || newID < 0 || newID >= maxDoc) { diff --git a/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs b/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs index 63745337eb..bb6eace596 100644 --- a/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs +++ b/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs @@ -761,7 +761,7 @@ internal static AtomicReader Wrap(AtomicReader reader, Sorter.DocMap docMap) { throw new ArgumentException("reader.MaxDoc should be equal to docMap.Count, got" + reader.MaxDoc + " != " + docMap.Count); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => Sorter.IsConsistent(docMap)); + if (Debugging.AssertsEnabled) Debugging.Assert(Sorter.IsConsistent(docMap)); return new SortingAtomicReader(reader, docMap); } diff --git a/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs b/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs index 70282442eb..c692723a06 100644 --- a/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs +++ b/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs @@ -134,7 +134,7 @@ public override MergePolicy.DocMap GetDocMap(MergeState mergeState) { return base.GetDocMap(mergeState); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => mergeState.DocMaps.Length == 1); // we returned a singleton reader + if (Debugging.AssertsEnabled) Debugging.Assert(mergeState.DocMaps.Length == 1); // we returned a singleton reader MonotonicAppendingInt64Buffer deletes = GetDeletes(unsortedReaders); return new DocMapAnonymousInnerClassHelper(this, mergeState, deletes); } diff --git a/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs b/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs index 03da23bd4d..bf03c34e3a 100644 --- a/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs +++ b/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs @@ -88,7 +88,7 @@ public override object Subtract(object @object, object inc) public override object Add(object prefix, object output) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !(prefix is IList)); + if (Debugging.AssertsEnabled) Debugging.Assert(!(prefix is IList)); if (!(output is IList)) { return outputs.Add((T)prefix, (T)output); @@ -107,7 +107,7 @@ public override object Add(object prefix, object output) public override void Write(object output, DataOutput @out) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !(output is IList)); + if (Debugging.AssertsEnabled) Debugging.Assert(!(output is IList)); outputs.Write((T)output, @out); } diff --git a/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs b/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs index 40b4361990..b5a3e93c24 100644 --- a/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs +++ b/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs @@ -72,8 +72,8 @@ public TwoInt64s(long first, long second) this.second = second; if (Debugging.AssertsEnabled) { - Debugging.Assert(() => first >= 0); - Debugging.Assert(() => second >= 0); + Debugging.Assert(first >= 0); + Debugging.Assert(second >= 0); } } @@ -139,8 +139,8 @@ public override object Common(object output1, object output2) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => Valid(output1, false)); - Debugging.Assert(() => Valid(output2, false)); + Debugging.Assert(Valid(output1, false)); + Debugging.Assert(Valid(output2, false)); } long? output1_ = (long?)output1; long? output2_ = (long?)output2; @@ -152,8 +152,8 @@ public override object Common(object output1, object output2) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => output1_ > 0); - Debugging.Assert(() => output2_ > 0); + Debugging.Assert(output1_ > 0); + Debugging.Assert(output2_ > 0); } return Math.Min(output1_.GetValueOrDefault(), output2_.GetValueOrDefault()); } @@ -171,12 +171,12 @@ public override object Subtract(object output, object inc) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => Valid(output, false)); - Debugging.Assert(() => Valid(inc, false)); + Debugging.Assert(Valid(output, false)); + Debugging.Assert(Valid(inc, false)); } long? output2 = (long?)output; long? inc2 = (long?)inc; - if (Debugging.AssertsEnabled) Debugging.Assert(() => output2 >= inc2); + if (Debugging.AssertsEnabled) Debugging.Assert(output2 >= inc2); if (inc2 == NO_OUTPUT) { @@ -194,8 +194,8 @@ public override object Subtract(object output, object inc) public override object Add(object prefix, object output) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => Valid(prefix, false)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => Valid(output, true)); + if (Debugging.AssertsEnabled) Debugging.Assert(Valid(prefix, false)); + if (Debugging.AssertsEnabled) Debugging.Assert(Valid(output, true)); long? prefix2 = (long?)prefix; if (output is long?) { @@ -223,7 +223,7 @@ public override object Add(object prefix, object output) public override void Write(object output, DataOutput @out) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => Valid(output, true)); + if (Debugging.AssertsEnabled) Debugging.Assert(Valid(output, true)); if (output is long?) { long? output2 = (long?)output; @@ -264,9 +264,9 @@ public override object Read(DataInput @in) private bool Valid(long? o) { - Debugging.Assert(() => o != null); - Debugging.Assert(() => o is long?); - Debugging.Assert(() => o == NO_OUTPUT || o > 0); + Debugging.Assert(o != null); + Debugging.Assert(o is long?); + Debugging.Assert(o == NO_OUTPUT || o > 0); return true; } @@ -275,7 +275,7 @@ private bool Valid(object o, bool allowDouble) { if (!allowDouble) { - Debugging.Assert(() => o is long?); + Debugging.Assert(o is long?); return Valid((long?)o); } else if (o is TwoInt64s) @@ -300,8 +300,8 @@ public override object Merge(object first, object second) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => Valid(first, false)); - Debugging.Assert(() => Valid(second, false)); + Debugging.Assert(Valid(first, false)); + Debugging.Assert(Valid(second, false)); } return new TwoInt64s(((long?)first).GetValueOrDefault(), ((long?)second).GetValueOrDefault()); } diff --git a/src/Lucene.Net.Queries/BooleanFilter.cs b/src/Lucene.Net.Queries/BooleanFilter.cs index d15cee8c61..164985c926 100644 --- a/src/Lucene.Net.Queries/BooleanFilter.cs +++ b/src/Lucene.Net.Queries/BooleanFilter.cs @@ -78,7 +78,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo { if (res == null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !hasShouldClauses); + if (Debugging.AssertsEnabled) Debugging.Assert(!hasShouldClauses); res = new FixedBitSet(reader.MaxDoc); res.Set(0, reader.MaxDoc); // NOTE: may set bits on deleted docs } diff --git a/src/Lucene.Net.Queries/CommonTermsQuery.cs b/src/Lucene.Net.Queries/CommonTermsQuery.cs index e78ce71985..053ff1eae0 100644 --- a/src/Lucene.Net.Queries/CommonTermsQuery.cs +++ b/src/Lucene.Net.Queries/CommonTermsQuery.cs @@ -279,7 +279,7 @@ public virtual void CollectTermContext(IndexReader reader, IList termsEnum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(termsEnum != null); if (termsEnum == TermsEnum.EMPTY) { diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs index f4d4c5fd74..5a220bf637 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs @@ -182,7 +182,7 @@ protected override IQueryNode PostProcessNode(IQueryNode node) { bool hasNext; hasNext = buffer.IncrementToken(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => hasNext == true); + if (Debugging.AssertsEnabled) Debugging.Assert(hasNext == true); term = termAtt.ToString(); } #pragma warning disable 168 @@ -213,7 +213,7 @@ protected override IQueryNode PostProcessNode(IQueryNode node) try { bool hasNext = buffer.IncrementToken(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => hasNext == true); + if (Debugging.AssertsEnabled) Debugging.Assert(hasNext == true); term = termAtt.ToString(); } #pragma warning disable 168 @@ -240,7 +240,7 @@ protected override IQueryNode PostProcessNode(IQueryNode node) try { bool hasNext = buffer.IncrementToken(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => hasNext == true); + if (Debugging.AssertsEnabled) Debugging.Assert(hasNext == true); term = termAtt.ToString(); } #pragma warning disable 168 @@ -307,7 +307,7 @@ protected override IQueryNode PostProcessNode(IQueryNode node) try { bool hasNext = buffer.IncrementToken(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => hasNext == true); + if (Debugging.AssertsEnabled) Debugging.Assert(hasNext == true); term = termAtt.ToString(); if (posIncrAtt != null) { @@ -379,7 +379,7 @@ protected override IQueryNode PostProcessNode(IQueryNode node) try { bool hasNext = buffer.IncrementToken(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => hasNext == true); + if (Debugging.AssertsEnabled) Debugging.Assert(hasNext == true); term = termAtt.ToString(); if (posIncrAtt != null) diff --git a/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs b/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs index 009ad62094..0cb8bace9d 100644 --- a/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs +++ b/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs @@ -242,7 +242,7 @@ private void ParseSubQuery(State state) private void ConsumeSubQuery(State state) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => (m_flags & Operator.PRECEDENCE_OPERATORS) != 0); + if (Debugging.AssertsEnabled) Debugging.Assert((m_flags & Operator.PRECEDENCE_OPERATORS) != 0); int start = ++state.Index; int precedence = 1; bool escaped = false; @@ -315,7 +315,7 @@ private void ConsumeSubQuery(State state) private void ConsumePhrase(State state) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => (m_flags & Operator.PHRASE_OPERATOR) != 0); + if (Debugging.AssertsEnabled) Debugging.Assert((m_flags & Operator.PHRASE_OPERATOR) != 0); int start = ++state.Index; int copied = 0; bool escaped = false; diff --git a/src/Lucene.Net.Replicator/IndexAndTaxonomyRevision.cs b/src/Lucene.Net.Replicator/IndexAndTaxonomyRevision.cs index eddc6f0777..8793c80eac 100644 --- a/src/Lucene.Net.Replicator/IndexAndTaxonomyRevision.cs +++ b/src/Lucene.Net.Replicator/IndexAndTaxonomyRevision.cs @@ -187,7 +187,7 @@ public virtual int CompareTo(IRevision other) /// public virtual Stream Open(string source, string fileName) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => source.Equals(INDEX_SOURCE, StringComparison.Ordinal) || source.Equals(TAXONOMY_SOURCE, StringComparison.Ordinal), () => string.Format("invalid source; expected=({0} or {1}) got={2}", INDEX_SOURCE, TAXONOMY_SOURCE, source)); + if (Debugging.AssertsEnabled) Debugging.Assert(source.Equals(INDEX_SOURCE, StringComparison.Ordinal) || source.Equals(TAXONOMY_SOURCE, StringComparison.Ordinal), () => string.Format("invalid source; expected=({0} or {1}) got={2}", INDEX_SOURCE, TAXONOMY_SOURCE, source)); IndexCommit commit = source.Equals(INDEX_SOURCE, StringComparison.Ordinal) ? indexCommit : taxonomyCommit; return new IndexInputStream(commit.Directory.OpenInput(fileName, IOContext.READ_ONCE)); } diff --git a/src/Lucene.Net.Replicator/IndexRevision.cs b/src/Lucene.Net.Replicator/IndexRevision.cs index 3a97eadb67..d454d1387e 100644 --- a/src/Lucene.Net.Replicator/IndexRevision.cs +++ b/src/Lucene.Net.Replicator/IndexRevision.cs @@ -134,7 +134,7 @@ public virtual int CompareTo(IRevision other) public virtual Stream Open(string source, string fileName) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => source.Equals(SOURCE, StringComparison.Ordinal), () => string.Format("invalid source; expected={0} got={1}", SOURCE, source)); + if (Debugging.AssertsEnabled) Debugging.Assert(source.Equals(SOURCE, StringComparison.Ordinal), () => string.Format("invalid source; expected={0} got={1}", SOURCE, source)); return new IndexInputStream(commit.Directory.OpenInput(fileName, IOContext.READ_ONCE)); } diff --git a/src/Lucene.Net.Replicator/ReplicationClient.cs b/src/Lucene.Net.Replicator/ReplicationClient.cs index 35251e62d4..a9a784460b 100644 --- a/src/Lucene.Net.Replicator/ReplicationClient.cs +++ b/src/Lucene.Net.Replicator/ReplicationClient.cs @@ -370,7 +370,7 @@ protected virtual IDictionary> RequiredFiles(IDictio // make sure to preserve revisionFiles order List res = new List(); string source = e.Key; - if (Debugging.AssertsEnabled) Debugging.Assert(() => newRevisionFiles.ContainsKey(source), () => string.Format("source not found in newRevisionFiles: {0}", newRevisionFiles)); + if (Debugging.AssertsEnabled) Debugging.Assert(newRevisionFiles.ContainsKey(source), () => string.Format("source not found in newRevisionFiles: {0}", newRevisionFiles)); foreach (RevisionFile file in newRevisionFiles[source]) { if (!handlerFiles.Contains(file.FileName)) @@ -416,7 +416,7 @@ public virtual void StartUpdateThread(long intervalMillis, string threadName) updateThread = new ReplicationThread(intervalMillis, threadName, DoUpdate, HandleUpdateException, updateLock); updateThread.Start(); // we rely on isAlive to return true in isUpdateThreadAlive, assert to be on the safe side - if (Debugging.AssertsEnabled) Debugging.Assert(() => updateThread.IsAlive, () => "updateThread started but not alive?"); + if (Debugging.AssertsEnabled) Debugging.Assert(updateThread.IsAlive, () => "updateThread started but not alive?"); } /// diff --git a/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs b/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs index 265e06fbd9..34f2783be8 100644 --- a/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs +++ b/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs @@ -191,7 +191,7 @@ protected override SortedDocValues GetSortedDocValues(AtomicReaderContext contex case Selector.MIDDLE_MAX: return new MiddleMaxValue(randomOrds); case Selector.MIN: default: - if (Debugging.AssertsEnabled) Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(false); return null; } } diff --git a/src/Lucene.Net.Spatial/Prefix/AbstractPrefixTreeFilter.cs b/src/Lucene.Net.Spatial/Prefix/AbstractPrefixTreeFilter.cs index 3e5da90d4e..2a906eb6a5 100644 --- a/src/Lucene.Net.Spatial/Prefix/AbstractPrefixTreeFilter.cs +++ b/src/Lucene.Net.Spatial/Prefix/AbstractPrefixTreeFilter.cs @@ -113,7 +113,7 @@ public BaseTermsEnumTraverser(AbstractPrefixTreeFilter outerInstance, AtomicRead protected virtual void CollectDocs(FixedBitSet bitSet) { //WARN: keep this specialization in sync - if (Debugging.AssertsEnabled) Debugging.Assert(() => m_termsEnum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(m_termsEnum != null); m_docsEnum = m_termsEnum.Docs(m_acceptDocs, m_docsEnum, DocsFlags.NONE); int docid; while ((docid = m_docsEnum.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS) diff --git a/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs b/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs index 033eb93576..9edae90ed5 100644 --- a/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs +++ b/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs @@ -52,7 +52,7 @@ public AbstractVisitingPrefixTreeFilter(IShape queryShape, string fieldName, Spa : base(queryShape, fieldName, grid, detailLevel) { this.m_prefixGridScanLevel = Math.Max(0, Math.Min(prefixGridScanLevel, grid.MaxLevels - 1)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => detailLevel <= grid.MaxLevels); + if (Debugging.AssertsEnabled) Debugging.Assert(detailLevel <= grid.MaxLevels); } public override bool Equals(object o) @@ -135,7 +135,7 @@ public VisitorTemplate(AbstractVisitingPrefixTreeFilter outerInstance, AtomicRea public virtual DocIdSet GetDocIdSet() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => curVNode == null, () => "Called more than once?"); + if (Debugging.AssertsEnabled) Debugging.Assert(curVNode == null, () => "Called more than once?"); if (m_termsEnum == null) { return null; @@ -169,7 +169,7 @@ public virtual DocIdSet GetDocIdSet() // LUCENENET IMPORTANT: Must not call this inline with Debug.Assert // because the compiler removes Debug.Assert statements in release mode!! bool hasNext = curVNode.children.MoveNext(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => hasNext); + if (Debugging.AssertsEnabled) Debugging.Assert(hasNext); curVNode = curVNode.children.Current; } @@ -206,7 +206,7 @@ public virtual DocIdSet GetDocIdSet() if (compare > 0) { // leap frog (termsEnum is beyond where we would otherwise seek) - if (Debugging.AssertsEnabled) Debugging.Assert(() => !m_context.AtomicReader.GetTerms(m_outerInstance.m_fieldName).GetIterator(null).SeekExact(curVNodeTerm), () => "should be absent"); + if (Debugging.AssertsEnabled) Debugging.Assert(!m_context.AtomicReader.GetTerms(m_outerInstance.m_fieldName).GetIterator(null).SeekExact(curVNodeTerm), () => "should be absent"); } else { @@ -250,7 +250,7 @@ public virtual DocIdSet GetDocIdSet() /// private void AddIntersectingChildren() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => thisTerm != null); + if (Debugging.AssertsEnabled) Debugging.Assert(thisTerm != null); Cell cell = curVNode.cell; if (cell.Level >= m_outerInstance.m_detailLevel) { @@ -261,7 +261,7 @@ private void AddIntersectingChildren() { //If the next indexed term just adds a leaf marker ('+') to cell, // then add all of those docs - if (Debugging.AssertsEnabled) Debugging.Assert(() => StringHelper.StartsWith(thisTerm, curVNodeTerm));//TODO refactor to use method on curVNode.cell + if (Debugging.AssertsEnabled) Debugging.Assert(StringHelper.StartsWith(thisTerm, curVNodeTerm));//TODO refactor to use method on curVNode.cell scanCell = m_outerInstance.m_grid.GetCell(thisTerm.Bytes, thisTerm.Offset, thisTerm.Length, scanCell); if (scanCell.Level == cell.Level && scanCell.IsLeaf) { @@ -490,9 +490,9 @@ internal VNode(VNode parent) internal virtual void Reset(Cell cell) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => cell != null); + if (Debugging.AssertsEnabled) Debugging.Assert(cell != null); this.cell = cell; - if (Debugging.AssertsEnabled) Debugging.Assert(() => children == null); + if (Debugging.AssertsEnabled) Debugging.Assert(children == null); } } diff --git a/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs b/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs index 80d20390c7..88f2e3a8c2 100644 --- a/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs +++ b/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs @@ -102,7 +102,7 @@ internal SmallDocSet Visit(Cell cell, IBits acceptContains) if (cell.Level != 0 && ((cell.ShapeRel == SpatialRelation.NOT_SET || cell.ShapeRel == SpatialRelation.WITHIN))) { subCellsFilter = null; - if (Debugging.AssertsEnabled) Debugging.Assert(() => cell.Shape.Relate(outerInstance.m_queryShape) == SpatialRelation.WITHIN); + if (Debugging.AssertsEnabled) Debugging.Assert(cell.Shape.Relate(outerInstance.m_queryShape) == SpatialRelation.WITHIN); } ICollection subCells = cell.GetSubCells(subCellsFilter); foreach (Cell subCell in subCells) @@ -147,7 +147,7 @@ internal SmallDocSet Visit(Cell cell, IBits acceptContains) private bool SeekExact(Cell cell) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => new BytesRef(cell.GetTokenBytes()).CompareTo(termBytes) > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(new BytesRef(cell.GetTokenBytes()).CompareTo(termBytes) > 0); this.termBytes.Bytes = cell.GetTokenBytes(); this.termBytes.Length = this.termBytes.Bytes.Length; if (m_termsEnum == null) @@ -157,7 +157,7 @@ private bool SeekExact(Cell cell) private SmallDocSet GetDocs(Cell cell, IBits acceptContains) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => new BytesRef(cell.GetTokenBytes()).Equals(termBytes)); + if (Debugging.AssertsEnabled) Debugging.Assert(new BytesRef(cell.GetTokenBytes()).Equals(termBytes)); return this.CollectDocs(acceptContains); } @@ -167,8 +167,8 @@ private SmallDocSet GetLeafDocs(Cell leafCell, IBits acceptContains) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => new BytesRef(leafCell.GetTokenBytes()).Equals(termBytes)); - Debugging.Assert(() => !leafCell.Equals(lastLeaf));//don't call for same leaf again + Debugging.Assert(new BytesRef(leafCell.GetTokenBytes()).Equals(termBytes)); + Debugging.Assert(!leafCell.Equals(lastLeaf));//don't call for same leaf again } lastLeaf = leafCell; @@ -301,7 +301,7 @@ public override DocIdSetIterator GetIterator() } docs[d++] = v; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => d == intSet.Count); + if (Debugging.AssertsEnabled) Debugging.Assert(d == intSet.Count); int size = d; //sort them Array.Sort(docs, 0, size); diff --git a/src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs b/src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs index e4fcc52fa2..7ae545e14d 100644 --- a/src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs +++ b/src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs @@ -104,7 +104,7 @@ protected internal Cell(SpatialPrefixTree outerInstance, byte[] bytes, int off, public virtual void Reset(byte[] bytes, int off, int len) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => Level != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(Level != 0); token = null; m_shapeRel = SpatialRelation.NOT_SET; this.bytes = bytes; @@ -139,7 +139,7 @@ private void B_fixLeaf() /// Note: not supported at level 0. public virtual void SetLeaf() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => Level != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(Level != 0); m_leaf = true; } diff --git a/src/Lucene.Net.Spatial/Prefix/Tree/QuadPrefixTree.cs b/src/Lucene.Net.Spatial/Prefix/Tree/QuadPrefixTree.cs index b6b76f42ad..a24056c36a 100644 --- a/src/Lucene.Net.Spatial/Prefix/Tree/QuadPrefixTree.cs +++ b/src/Lucene.Net.Spatial/Prefix/Tree/QuadPrefixTree.cs @@ -171,7 +171,7 @@ private void Build( IShape shape, int maxLevel) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => str.Length == level); + if (Debugging.AssertsEnabled) Debugging.Assert(str.Length == level); double w = levelW[level] / 2; double h = levelH[level] / 2; @@ -197,7 +197,7 @@ private void CheckBattenberg( IShape shape, int maxLevel) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => str.Length == level); + if (Debugging.AssertsEnabled) Debugging.Assert(str.Length == level); double w = levelW[level] / 2; double h = levelH[level] / 2; diff --git a/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs b/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs index 36a45f39d7..3d00c3a8b0 100644 --- a/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs +++ b/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs @@ -48,7 +48,7 @@ public abstract class SpatialPrefixTree public SpatialPrefixTree(SpatialContext ctx, int maxLevels) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => maxLevels > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(maxLevels > 0); this.m_ctx = ctx; this.m_maxLevels = maxLevels; } @@ -269,7 +269,7 @@ public virtual IList GetCells(IPoint p, int detailLevel, bool inclParents) return new ReadOnlyCollection(new[] { cell }); } string endToken = cell.TokenString; - if (Debugging.AssertsEnabled) Debugging.Assert(() => endToken.Length == detailLevel); + if (Debugging.AssertsEnabled) Debugging.Assert(endToken.Length == detailLevel); IList cells = new List(detailLevel); for (int i = 1; i < detailLevel; i++) { diff --git a/src/Lucene.Net.Spatial/Prefix/WithinPrefixTreeFilter.cs b/src/Lucene.Net.Spatial/Prefix/WithinPrefixTreeFilter.cs index 5b33da79dc..ca1f64da04 100644 --- a/src/Lucene.Net.Spatial/Prefix/WithinPrefixTreeFilter.cs +++ b/src/Lucene.Net.Spatial/Prefix/WithinPrefixTreeFilter.cs @@ -205,8 +205,8 @@ protected internal override void VisitLeaf(Cell cell) //visitRelation is declared as a field, populated by visit() so we don't recompute it if (Debugging.AssertsEnabled) { - Debugging.Assert(() => m_outerInstance.m_detailLevel != cell.Level); - Debugging.Assert(() => visitRelation == cell.Shape.Relate(m_outerInstance.m_queryShape)); + Debugging.Assert(m_outerInstance.m_detailLevel != cell.Level); + Debugging.Assert(visitRelation == cell.Shape.Relate(m_outerInstance.m_queryShape)); } if (AllCellsIntersectQuery(cell, visitRelation)) { diff --git a/src/Lucene.Net.Spatial/Vector/DistanceValueSource.cs b/src/Lucene.Net.Spatial/Vector/DistanceValueSource.cs index 391f23edfd..3b2fc96fb3 100644 --- a/src/Lucene.Net.Spatial/Vector/DistanceValueSource.cs +++ b/src/Lucene.Net.Spatial/Vector/DistanceValueSource.cs @@ -104,7 +104,7 @@ public override double DoubleVal(int doc) // make sure it has minX and area if (validX.Get(doc)) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => validY.Get(doc)); + if (Debugging.AssertsEnabled) Debugging.Assert(validY.Get(doc)); return calculator.Distance(outerInstance.from, ptX.Get(doc), ptY.Get(doc)) * outerInstance.multiplier; } return nullValue; diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs index 0be20a58f0..4cba458197 100644 --- a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs +++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs @@ -270,7 +270,7 @@ private void ReplaceSep(Automaton a) IList newTransitions = new List(); foreach (Transition t in state.GetTransitions()) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => t.Min == t.Max); + if (Debugging.AssertsEnabled) Debugging.Assert(t.Min == t.Max); if (t.Min == TokenStreamToAutomaton.POS_SEP) { if (preserveSep) @@ -361,8 +361,8 @@ public int Compare(BytesRef a, BytesRef b) long bCost = readerB.ReadInt32(); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => DecodeWeight(aCost) >= 0); - Debugging.Assert(() => DecodeWeight(bCost) >= 0); + Debugging.Assert(DecodeWeight(aCost) >= 0); + Debugging.Assert(DecodeWeight(bCost) >= 0); } if (aCost < bCost) { @@ -490,7 +490,7 @@ public override void Build(IInputIterator iterator) output.WriteBytes(surfaceForm.Bytes, surfaceForm.Offset, surfaceForm.Length); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => output.Position == requiredLength, () => output.Position + " vs " + requiredLength); + if (Debugging.AssertsEnabled) Debugging.Assert(output.Position == requiredLength, () => output.Position + " vs " + requiredLength); writer.Write(buffer, 0, output.Position); } @@ -664,7 +664,7 @@ private LookupResult GetLookupResult(long? output1, BytesRef output2, CharsRef s break; } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => sepIndex != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(sepIndex != -1); spare.Grow(sepIndex); int payloadLen = output2.Length - sepIndex - 1; @@ -710,7 +710,7 @@ private bool SameSurfaceForm(BytesRef key, BytesRef output2) public override IList DoLookup(string key, IEnumerable contexts, bool onlyMorePopular, int num) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => num > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(num > 0); if (onlyMorePopular) { @@ -802,7 +802,7 @@ public override IList DoLookup(string key, IEnumerable c } var completions = searcher.Search(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => completions.IsComplete); + if (Debugging.AssertsEnabled) Debugging.Assert(completions.IsComplete); // NOTE: this is rather inefficient: we enumerate // every matching "exactly the same analyzed form" @@ -845,7 +845,7 @@ public override IList DoLookup(string key, IEnumerable c } var completions2 = searcher2.Search(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => completions2.IsComplete); + if (Debugging.AssertsEnabled) Debugging.Assert(completions2.IsComplete); foreach (Util.Fst.Util.Result.Pair> completion in completions2) { @@ -923,7 +923,7 @@ protected override bool AcceptResult(Int32sRef input, PairOutputs results.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(results.Count == 1); return false; } else @@ -967,7 +967,7 @@ internal ISet ToFiniteStrings(BytesRef surfaceForm, TokenStreamToAuto ReplaceSep(automaton); automaton = ConvertAutomaton(automaton); - if (Debugging.AssertsEnabled) Debugging.Assert(() => SpecialOperations.IsFinite(automaton)); + if (Debugging.AssertsEnabled) Debugging.Assert(SpecialOperations.IsFinite(automaton)); // Get all paths from the automaton (there can be // more than one path, eg if the analyzer created a diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/BlendedInfixSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/BlendedInfixSuggester.cs index a0c70e4cb3..3b1c8f57bb 100644 --- a/src/Lucene.Net.Suggest/Suggest/Analyzing/BlendedInfixSuggester.cs +++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/BlendedInfixSuggester.cs @@ -145,7 +145,7 @@ protected override FieldType GetTextFieldType() { BinaryDocValues textDV = MultiDocValues.GetBinaryValues(searcher.IndexReader, TEXT_FIELD_NAME); - if (Debugging.AssertsEnabled) Debugging.Assert(() => textDV != null); + if (Debugging.AssertsEnabled) Debugging.Assert(textDV != null); // This will just be null if app didn't pass payloads to build(): // TODO: maybe just stored fields? they compress... diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/FSTUtil.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/FSTUtil.cs index fbb56ad880..53d613d1ab 100644 --- a/src/Lucene.Net.Suggest/Suggest/Analyzing/FSTUtil.cs +++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/FSTUtil.cs @@ -69,7 +69,7 @@ public Path(State state, FST.Arc fstNode, T output, Int32sRef input) /// public static IList> IntersectPrefixPaths(Automaton a, FST fst) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => a.IsDeterministic); + if (Debugging.AssertsEnabled) Debugging.Assert(a.IsDeterministic); IList> queue = new List>(); List> endNodes = new List>(); queue.Add(new Path(a.GetInitialState(), fst.GetFirstArc(new FST.Arc()), fst.Outputs.NoOutput, new Int32sRef())); @@ -120,8 +120,8 @@ public static IList> IntersectPrefixPaths(Automaton a, FST fst) FST.Arc nextArc = Lucene.Net.Util.Fst.Util.ReadCeilArc(min, fst, path.FstNode, scratchArc, fstReader); while (nextArc != null && nextArc.Label <= max) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => nextArc.Label <= max); - if (Debugging.AssertsEnabled) Debugging.Assert(() => nextArc.Label >= min, () => nextArc.Label + " " + min); + if (Debugging.AssertsEnabled) Debugging.Assert(nextArc.Label <= max); + if (Debugging.AssertsEnabled) Debugging.Assert(nextArc.Label >= min, () => nextArc.Label + " " + min); Int32sRef newInput = new Int32sRef(currentInput.Length + 1); newInput.CopyInt32s(currentInput); newInput.Int32s[currentInput.Length] = nextArc.Label; @@ -130,7 +130,7 @@ public static IList> IntersectPrefixPaths(Automaton a, FST fst) .CopyFrom(nextArc), fst.Outputs.Add(path.Output, nextArc.Output), newInput)); int label = nextArc.Label; // used in assert nextArc = nextArc.IsLast ? null : fst.ReadNextRealArc(nextArc, fstReader); - if (Debugging.AssertsEnabled) Debugging.Assert(() => nextArc == null || label < nextArc.Label, () => "last: " + label + " next: " + (nextArc == null ? "" : nextArc.Label.ToString())); + if (Debugging.AssertsEnabled) Debugging.Assert(nextArc == null || label < nextArc.Label, () => "last: " + label + " next: " + (nextArc == null ? "" : nextArc.Label.ToString())); } } } diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs index 2b79029781..040d19a1c8 100644 --- a/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs +++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs @@ -556,7 +556,7 @@ public virtual IList DoLookup(string key, IEnumerable co // a separate dedicated att for this? int gramCount = posLenAtt.PositionLength; - if (Debugging.AssertsEnabled) Debugging.Assert(() => gramCount <= grams); + if (Debugging.AssertsEnabled) Debugging.Assert(gramCount <= grams); // Safety: make sure the recalculated count "agrees": if (CountGrams(tokenBytes) != gramCount) @@ -682,7 +682,7 @@ public virtual IList DoLookup(string key, IEnumerable co { BytesRef context = new BytesRef(token.Bytes, token.Offset, i); long? output = Lucene.Net.Util.Fst.Util.Get(fst, Lucene.Net.Util.Fst.Util.ToInt32sRef(context, new Int32sRef())); - if (Debugging.AssertsEnabled) Debugging.Assert(() => output != null); + if (Debugging.AssertsEnabled) Debugging.Assert(output != null); contextCount = DecodeWeight(output); lastTokenFragment = new BytesRef(token.Bytes, token.Offset + i + 1, token.Length - i - 1); break; @@ -699,7 +699,7 @@ public virtual IList DoLookup(string key, IEnumerable co { finalLastToken = BytesRef.DeepCopyOf(lastTokenFragment); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => finalLastToken.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(finalLastToken.Offset == 0); CharsRef spare = new CharsRef(); @@ -726,7 +726,7 @@ public virtual IList DoLookup(string key, IEnumerable co searcher.AddStartPaths(arc, prefixOutput, true, new Int32sRef()); completions = searcher.Search(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => completions.IsComplete); + if (Debugging.AssertsEnabled) Debugging.Assert(completions.IsComplete); } catch (IOException bogus) { @@ -754,7 +754,7 @@ public virtual IList DoLookup(string key, IEnumerable co { if (token.Bytes[token.Offset + i] == separator) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => token.Length - i - 1 > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(token.Length - i - 1 > 0); lastToken = new BytesRef(token.Bytes, token.Offset + i + 1, token.Length - i - 1); break; } @@ -772,7 +772,7 @@ public virtual IList DoLookup(string key, IEnumerable co // return numbers that are greater than long.MaxValue, which results in a negative long number. (long)(long.MaxValue * (decimal)backoff * ((decimal)DecodeWeight(completion.Output)) / contextCount)); results.Add(result); - if (Debugging.AssertsEnabled) Debugging.Assert(() => results.Count == seen.Count); + if (Debugging.AssertsEnabled) Debugging.Assert(results.Count == seen.Count); //System.out.println(" add result=" + result); nextCompletionContinue:; } @@ -874,7 +874,7 @@ private long EncodeWeight(long ngramCount) //private long decodeWeight(Pair output) { private static long DecodeWeight(long? output) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => output != null); + if (Debugging.AssertsEnabled) Debugging.Assert(output != null); return (int)(long.MaxValue - output); // LUCENENET TODO: Perhaps a Java Lucene bug? Why cast to int when returning long? } diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/SuggestStopFilter.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/SuggestStopFilter.cs index 8f909db40f..424420d272 100644 --- a/src/Lucene.Net.Suggest/Suggest/Analyzing/SuggestStopFilter.cs +++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/SuggestStopFilter.cs @@ -115,7 +115,7 @@ public override bool IncrementToken() m_input.End(); endState = CaptureState(); int finalEndOffset = offsetAtt.EndOffset; - if (Debugging.AssertsEnabled) Debugging.Assert(() => finalEndOffset >= endOffset); + if (Debugging.AssertsEnabled) Debugging.Assert(finalEndOffset >= endOffset); if (finalEndOffset > endOffset) { // OK there was a token separator after the diff --git a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs index afdfc8d498..0aa067e4de 100644 --- a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs +++ b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs @@ -407,7 +407,7 @@ private bool Collect(IList res, int num, int bucket, BytesRef output { output.Bytes = ArrayUtil.Grow(output.Bytes); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => output.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(output.Offset == 0); output.Bytes[output.Length++] = (byte) arc.Label; FST.BytesReader fstReader = automaton.GetBytesReader(); automaton.ReadFirstTargetArc(arc, arc, fstReader); diff --git a/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs b/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs index 8efcd39998..e793931c6d 100644 --- a/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs +++ b/src/Lucene.Net.Suggest/Suggest/Fst/WFSTCompletionLookup.cs @@ -140,7 +140,7 @@ public override IList DoLookup(string key, IEnumerable c { throw new ArgumentException("this suggester doesn't support contexts"); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => num > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(num > 0); if (onlyMorePopular) { @@ -190,7 +190,7 @@ public override IList DoLookup(string key, IEnumerable c try { completions = Lucene.Net.Util.Fst.Util.ShortestPaths(fst, arc, prefixOutput, weightComparer, num, !exactFirst); - if (Debugging.AssertsEnabled) Debugging.Assert(() => completions.IsComplete); + if (Debugging.AssertsEnabled) Debugging.Assert(completions.IsComplete); } catch (IOException bogus) { @@ -213,7 +213,7 @@ public override IList DoLookup(string key, IEnumerable c private long? LookupPrefix(BytesRef scratch, FST.Arc arc) //Bogus { - if (Debugging.AssertsEnabled) Debugging.Assert(() => 0 == (long)fst.Outputs.NoOutput); + if (Debugging.AssertsEnabled) Debugging.Assert(0 == (long)fst.Outputs.NoOutput); long output = 0; var bytesReader = fst.GetBytesReader(); @@ -294,7 +294,7 @@ internal WFSTInputIterator(WFSTCompletionLookup outerInstance, IInputIterator so : base(source) { this.outerInstance = outerInstance; - if (Debugging.AssertsEnabled) Debugging.Assert(() => source.HasPayloads == false); + if (Debugging.AssertsEnabled) Debugging.Assert(source.HasPayloads == false); } protected internal override void Encode(OfflineSorter.ByteSequencesWriter writer, ByteArrayDataOutput output, byte[] buffer, BytesRef spare, BytesRef payload, ICollection contexts, long weight) diff --git a/src/Lucene.Net.Suggest/Suggest/UnsortedInputIterator.cs b/src/Lucene.Net.Suggest/Suggest/UnsortedInputIterator.cs index 4b8791f6fd..d7891b7f57 100644 --- a/src/Lucene.Net.Suggest/Suggest/UnsortedInputIterator.cs +++ b/src/Lucene.Net.Suggest/Suggest/UnsortedInputIterator.cs @@ -62,7 +62,7 @@ public override long Weight { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentOrd == ords[m_curPos]); + if (Debugging.AssertsEnabled) Debugging.Assert(currentOrd == ords[m_curPos]); return m_freqs[currentOrd]; } } @@ -83,7 +83,7 @@ public override BytesRef Payload { if (HasPayloads && m_curPos < m_payloads.Length) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentOrd == ords[m_curPos]); + if (Debugging.AssertsEnabled) Debugging.Assert(currentOrd == ords[m_curPos]); return m_payloads.Get(payloadSpare, currentOrd); } return null; @@ -96,7 +96,7 @@ public override ICollection Contexts { if (HasContexts && m_curPos < m_contextSets.Count) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentOrd == ords[m_curPos]); + if (Debugging.AssertsEnabled) Debugging.Assert(currentOrd == ords[m_curPos]); return m_contextSets[currentOrd]; } return null; diff --git a/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs b/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs index f20029b576..ba0518bd82 100644 --- a/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs +++ b/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs @@ -78,7 +78,7 @@ public virtual void Add(AttributeSource.State state) public virtual AttributeSource.State NextState() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => NextRead < InputTokens.Count); + if (Debugging.AssertsEnabled) Debugging.Assert(NextRead < InputTokens.Count); return InputTokens[NextRead++]; } } @@ -141,7 +141,7 @@ protected virtual void InsertToken() m_positions.Get(m_inputPos).Add(CaptureState()); tokenPending = false; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => !insertPending); + if (Debugging.AssertsEnabled) Debugging.Assert(!insertPending); insertPending = true; } @@ -184,8 +184,8 @@ protected virtual bool PeekToken() { Console.WriteLine("LTF.peekToken inputPos=" + m_inputPos + " outputPos=" + m_outputPos + " tokenPending=" + tokenPending); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => !m_end); - if (Debugging.AssertsEnabled) Debugging.Assert(() => m_inputPos == -1 || m_outputPos <= m_inputPos); + if (Debugging.AssertsEnabled) Debugging.Assert(!m_end); + if (Debugging.AssertsEnabled) Debugging.Assert(m_inputPos == -1 || m_outputPos <= m_inputPos); if (tokenPending) { m_positions.Get(m_inputPos).Add(CaptureState()); @@ -199,7 +199,7 @@ protected virtual bool PeekToken() if (gotToken) { m_inputPos += m_posIncAtt.PositionIncrement; - if (Debugging.AssertsEnabled) Debugging.Assert(() => m_inputPos >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(m_inputPos >= 0); if (DEBUG) { Console.WriteLine(" now inputPos=" + m_inputPos); @@ -216,7 +216,7 @@ protected virtual bool PeekToken() else { // Make sure our input isn't messing up offsets: - if (Debugging.AssertsEnabled) Debugging.Assert(() => startPosData.StartOffset == startOffset, () => "prev startOffset=" + startPosData.StartOffset + " vs new startOffset=" + startOffset + " inputPos=" + m_inputPos); + if (Debugging.AssertsEnabled) Debugging.Assert(startPosData.StartOffset == startOffset, () => "prev startOffset=" + startPosData.StartOffset + " vs new startOffset=" + startOffset + " inputPos=" + m_inputPos); } int endOffset = m_offsetAtt.EndOffset; @@ -227,7 +227,7 @@ protected virtual bool PeekToken() else { // Make sure our input isn't messing up offsets: - if (Debugging.AssertsEnabled) Debugging.Assert(() => endPosData.EndOffset == endOffset, () => "prev endOffset=" + endPosData.EndOffset + " vs new endOffset=" + endOffset + " inputPos=" + m_inputPos); + if (Debugging.AssertsEnabled) Debugging.Assert(endPosData.EndOffset == endOffset, () => "prev endOffset=" + endPosData.EndOffset + " vs new endOffset=" + endOffset + " inputPos=" + m_inputPos); } tokenPending = true; @@ -314,7 +314,7 @@ protected virtual bool NextToken() { Console.WriteLine(" return inserted token"); } - if (Debugging.AssertsEnabled) Debugging.Assert(InsertedTokenConsistent); + if (Debugging.AssertsEnabled) Debugging.Assert(InsertedTokenConsistent()); insertPending = false; return true; } @@ -340,7 +340,7 @@ protected virtual bool NextToken() { Console.WriteLine(" return inserted token"); } - if (Debugging.AssertsEnabled) Debugging.Assert(InsertedTokenConsistent); + if (Debugging.AssertsEnabled) Debugging.Assert(InsertedTokenConsistent()); insertPending = false; return true; } @@ -364,8 +364,8 @@ private bool InsertedTokenConsistent() { int posLen = m_posLenAtt.PositionLength; Position endPosData = m_positions.Get(m_outputPos + posLen); - if (Debugging.AssertsEnabled) Debugging.Assert(() => endPosData.EndOffset != -1); - if (Debugging.AssertsEnabled) Debugging.Assert(() => m_offsetAtt.EndOffset == endPosData.EndOffset, () => "offsetAtt.endOffset=" + m_offsetAtt.EndOffset + " vs expected=" + endPosData.EndOffset); + if (Debugging.AssertsEnabled) Debugging.Assert(endPosData.EndOffset != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(m_offsetAtt.EndOffset == endPosData.EndOffset, () => "offsetAtt.endOffset=" + m_offsetAtt.EndOffset + " vs expected=" + endPosData.EndOffset); return true; } diff --git a/src/Lucene.Net.TestFramework/Analysis/MockCharFilter.cs b/src/Lucene.Net.TestFramework/Analysis/MockCharFilter.cs index 0b377d1cea..d7ef75e9b0 100644 --- a/src/Lucene.Net.TestFramework/Analysis/MockCharFilter.cs +++ b/src/Lucene.Net.TestFramework/Analysis/MockCharFilter.cs @@ -112,7 +112,7 @@ protected override int Correct(int currentOff) ret = currentOff; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => ret >= 0, () => "currentOff=" + currentOff + ",diff=" + (ret - currentOff)); + if (Debugging.AssertsEnabled) Debugging.Assert(ret >= 0, () => "currentOff=" + currentOff + ",diff=" + (ret - currentOff)); return ret; } diff --git a/src/Lucene.Net.TestFramework/Analysis/MockReaderWrapper.cs b/src/Lucene.Net.TestFramework/Analysis/MockReaderWrapper.cs index 49bbebfb4f..f7527ca8be 100644 --- a/src/Lucene.Net.TestFramework/Analysis/MockReaderWrapper.cs +++ b/src/Lucene.Net.TestFramework/Analysis/MockReaderWrapper.cs @@ -47,7 +47,7 @@ public virtual void ThrowExcAfterChar(int charUpto) { excAtChar = charUpto; // You should only call this on init!: - if (Debugging.AssertsEnabled) Debugging.Assert(() => 0 == readSoFar); + if (Debugging.AssertsEnabled) Debugging.Assert(0 == readSoFar); } public virtual void ThrowExcNext() @@ -91,10 +91,10 @@ public override int Read(char[] cbuf, int off, int len) if (excAtChar != -1) { int left = excAtChar - readSoFar; - if (Debugging.AssertsEnabled) Debugging.Assert(() => left != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(left != 0); read = input.Read(cbuf, off, Math.Min(realLen, left)); //Characters are left - if (Debugging.AssertsEnabled) Debugging.Assert(() => read != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(read != 0); readSoFar += read; } else diff --git a/src/Lucene.Net.TestFramework/Analysis/MockTokenizer.cs b/src/Lucene.Net.TestFramework/Analysis/MockTokenizer.cs index 50152f6224..d6da069f6f 100644 --- a/src/Lucene.Net.TestFramework/Analysis/MockTokenizer.cs +++ b/src/Lucene.Net.TestFramework/Analysis/MockTokenizer.cs @@ -140,7 +140,7 @@ public MockTokenizer(AttributeFactory factory, TextReader input) public sealed override bool IncrementToken() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !enableChecks || (streamState == State.RESET || streamState == State.INCREMENT), () => "IncrementToken() called while in wrong state: " + streamState); + if (Debugging.AssertsEnabled) Debugging.Assert(!enableChecks || (streamState == State.RESET || streamState == State.INCREMENT), () => "IncrementToken() called while in wrong state: " + streamState); ClearAttributes(); for (; ; ) { @@ -219,7 +219,7 @@ protected virtual int ReadCodePoint() } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !char.IsLowSurrogate((char)ch), () => "unpaired low surrogate: " + ch.ToString("x")); + if (Debugging.AssertsEnabled) Debugging.Assert(!char.IsLowSurrogate((char)ch), () => "unpaired low surrogate: " + ch.ToString("x")); off++; if (char.IsHighSurrogate((char)ch)) { @@ -227,12 +227,12 @@ protected virtual int ReadCodePoint() if (ch2 >= 0) { off++; - if (Debugging.AssertsEnabled) Debugging.Assert(() => char.IsLowSurrogate((char)ch2), () => "unpaired high surrogate: " + ch.ToString("x") + ", followed by: " + ch2.ToString("x")); + if (Debugging.AssertsEnabled) Debugging.Assert(char.IsLowSurrogate((char)ch2), () => "unpaired high surrogate: " + ch.ToString("x") + ", followed by: " + ch2.ToString("x")); return Character.ToCodePoint((char)ch, (char)ch2); } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "stream ends with unpaired high surrogate: " + ch.ToString("x")); + if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "stream ends with unpaired high surrogate: " + ch.ToString("x")); } } return ch; @@ -300,7 +300,7 @@ public override void Reset() state = runAutomaton.InitialState; lastOffset = off = 0; bufferedCodePoint = -1; - if (Debugging.AssertsEnabled) Debugging.Assert(() => !enableChecks || streamState != State.RESET, () => "Double Reset()"); + if (Debugging.AssertsEnabled) Debugging.Assert(!enableChecks || streamState != State.RESET, () => "Double Reset()"); streamState = State.RESET; } @@ -312,14 +312,14 @@ protected override void Dispose(bool disposing) // in some exceptional cases (e.g. TestIndexWriterExceptions) a test can prematurely close() // these tests should disable this check, by default we check the normal workflow. // TODO: investigate the CachingTokenFilter "double-close"... for now we ignore this - if (Debugging.AssertsEnabled) Debugging.Assert(() => !enableChecks || streamState == State.END || streamState == State.CLOSE, () => "Dispose() called in wrong state: " + streamState); + if (Debugging.AssertsEnabled) Debugging.Assert(!enableChecks || streamState == State.END || streamState == State.CLOSE, () => "Dispose() called in wrong state: " + streamState); streamState = State.CLOSE; } } internal override bool SetReaderTestPoint() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !enableChecks || streamState == State.CLOSE, () => "SetReader() called in wrong state: " + streamState); + if (Debugging.AssertsEnabled) Debugging.Assert(!enableChecks || streamState == State.CLOSE, () => "SetReader() called in wrong state: " + streamState); streamState = State.SETREADER; return true; } @@ -333,7 +333,7 @@ public override void End() // these tests should disable this check (in general you should consume the entire stream) try { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !enableChecks || streamState == State.INCREMENT_FALSE, () => "End() called before IncrementToken() returned false!"); + if (Debugging.AssertsEnabled) Debugging.Assert(!enableChecks || streamState == State.INCREMENT_FALSE, () => "End() called before IncrementToken() returned false!"); } finally { diff --git a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingDocValuesFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingDocValuesFormat.cs index 486a980b3a..e2da7b6941 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingDocValuesFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingDocValuesFormat.cs @@ -40,15 +40,15 @@ public AssertingDocValuesFormat() public override DocValuesConsumer FieldsConsumer(SegmentWriteState state) { DocValuesConsumer consumer = @in.FieldsConsumer(state); - if (Debugging.AssertsEnabled) Debugging.Assert(() => consumer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(consumer != null); return new AssertingDocValuesConsumer(consumer, state.SegmentInfo.DocCount); } public override DocValuesProducer FieldsProducer(SegmentReadState state) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state.FieldInfos.HasDocValues); + if (Debugging.AssertsEnabled) Debugging.Assert(state.FieldInfos.HasDocValues); DocValuesProducer producer = @in.FieldsProducer(state); - if (Debugging.AssertsEnabled) Debugging.Assert(() => producer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(producer != null); return new AssertingDocValuesProducer(producer, state.SegmentInfo.DocCount); } @@ -70,7 +70,7 @@ public override void AddNumericField(FieldInfo field, IEnumerable values) { count++; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => count == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(count == maxDoc); CheckIterator(values.GetEnumerator(), maxDoc, true); @in.AddNumericField(field, values); } @@ -80,10 +80,10 @@ public override void AddBinaryField(FieldInfo field, IEnumerable value int count = 0; foreach (BytesRef b in values) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => b == null || b.IsValid()); + if (Debugging.AssertsEnabled) Debugging.Assert(b == null || b.IsValid()); count++; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => count == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(count == maxDoc); CheckIterator(values.GetEnumerator(), maxDoc, true); @in.AddBinaryField(field, values); } @@ -94,25 +94,25 @@ public override void AddSortedField(FieldInfo field, IEnumerable value BytesRef lastValue = null; foreach (BytesRef b in values) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => b != null); - if (Debugging.AssertsEnabled) Debugging.Assert(() => b.IsValid()); + if (Debugging.AssertsEnabled) Debugging.Assert(b != null); + if (Debugging.AssertsEnabled) Debugging.Assert(b.IsValid()); if (valueCount > 0) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => b.CompareTo(lastValue) > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(b.CompareTo(lastValue) > 0); } lastValue = BytesRef.DeepCopyOf(b); valueCount++; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount <= maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(valueCount <= maxDoc); FixedBitSet seenOrds = new FixedBitSet(valueCount); int count = 0; foreach (long? v in docToOrd) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => v != null); + if (Debugging.AssertsEnabled) Debugging.Assert(v != null); int ord = (int)v.Value; - if (Debugging.AssertsEnabled) Debugging.Assert(() => ord >= -1 && ord < valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(ord >= -1 && ord < valueCount); if (ord >= 0) { seenOrds.Set(ord); @@ -120,8 +120,8 @@ public override void AddSortedField(FieldInfo field, IEnumerable value count++; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => count == maxDoc); - if (Debugging.AssertsEnabled) Debugging.Assert(() => seenOrds.Cardinality() == valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(count == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(seenOrds.Cardinality() == valueCount); CheckIterator(values.GetEnumerator(), valueCount, false); CheckIterator(docToOrd.GetEnumerator(), maxDoc, false); @in.AddSortedField(field, values, docToOrd); @@ -133,11 +133,11 @@ public override void AddSortedSetField(FieldInfo field, IEnumerable va BytesRef lastValue = null; foreach (BytesRef b in values) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => b != null); - if (Debugging.AssertsEnabled) Debugging.Assert(() => b.IsValid()); + if (Debugging.AssertsEnabled) Debugging.Assert(b != null); + if (Debugging.AssertsEnabled) Debugging.Assert(b.IsValid()); if (valueCount > 0) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => b.CompareTo(lastValue) > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(b.CompareTo(lastValue) > 0); } lastValue = BytesRef.DeepCopyOf(b); valueCount++; @@ -150,9 +150,9 @@ public override void AddSortedSetField(FieldInfo field, IEnumerable va { foreach (long? v in docToOrdCount) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => v != null); + if (Debugging.AssertsEnabled) Debugging.Assert(v != null); int count = (int)v.Value; - if (Debugging.AssertsEnabled) Debugging.Assert(() => count >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(count >= 0); docCount++; ordCount += count; @@ -161,18 +161,18 @@ public override void AddSortedSetField(FieldInfo field, IEnumerable va { ordIterator.MoveNext(); long? o = ordIterator.Current; - if (Debugging.AssertsEnabled) Debugging.Assert(() => o != null); + if (Debugging.AssertsEnabled) Debugging.Assert(o != null); long ord = o.Value; - if (Debugging.AssertsEnabled) Debugging.Assert(() => ord >= 0 && ord < valueCount); - if (Debugging.AssertsEnabled) Debugging.Assert(() => ord > lastOrd, () => "ord=" + ord + ",lastOrd=" + lastOrd); + if (Debugging.AssertsEnabled) Debugging.Assert(ord >= 0 && ord < valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(ord > lastOrd, () => "ord=" + ord + ",lastOrd=" + lastOrd); seenOrds.Set(ord); lastOrd = ord; } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => ordIterator.MoveNext() == false); + if (Debugging.AssertsEnabled) Debugging.Assert(ordIterator.MoveNext() == false); - if (Debugging.AssertsEnabled) Debugging.Assert(() => docCount == maxDoc); - if (Debugging.AssertsEnabled) Debugging.Assert(() => seenOrds.Cardinality() == valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(docCount == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(seenOrds.Cardinality() == valueCount); CheckIterator(values.GetEnumerator(), valueCount, false); CheckIterator(docToOrdCount.GetEnumerator(), maxDoc, false); CheckIterator(ords.GetEnumerator(), ordCount, false); @@ -203,10 +203,10 @@ public override void AddNumericField(FieldInfo field, IEnumerable values) int count = 0; foreach (long? v in values) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => v != null); + if (Debugging.AssertsEnabled) Debugging.Assert(v != null); count++; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => count == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(count == maxDoc); CheckIterator(values.GetEnumerator(), maxDoc, false); @in.AddNumericField(field, values); } @@ -240,9 +240,9 @@ private static void CheckIterator(IEnumerator iterator, long expectedSize, for (long i = 0; i < expectedSize; i++) { bool hasNext = iterator.MoveNext(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => hasNext); + if (Debugging.AssertsEnabled) Debugging.Assert(hasNext); T v = iterator.Current; - if (Debugging.AssertsEnabled) Debugging.Assert(() => allowNull || v != null); + if (Debugging.AssertsEnabled) Debugging.Assert(allowNull || v != null); // LUCENE.NET specific. removed call to Reset(). //try @@ -255,7 +255,7 @@ private static void CheckIterator(IEnumerator iterator, long expectedSize, // // ok //} } - if (Debugging.AssertsEnabled) Debugging.Assert(() => !iterator.MoveNext()); + if (Debugging.AssertsEnabled) Debugging.Assert(!iterator.MoveNext()); /*try { //iterator.next(); @@ -285,42 +285,42 @@ internal AssertingDocValuesProducer(DocValuesProducer @in, int maxDoc) public override NumericDocValues GetNumeric(FieldInfo field) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => field.DocValuesType == DocValuesType.NUMERIC || field.NormType == DocValuesType.NUMERIC); + if (Debugging.AssertsEnabled) Debugging.Assert(field.DocValuesType == DocValuesType.NUMERIC || field.NormType == DocValuesType.NUMERIC); NumericDocValues values = @in.GetNumeric(field); - if (Debugging.AssertsEnabled) Debugging.Assert(() => values != null); + if (Debugging.AssertsEnabled) Debugging.Assert(values != null); return new AssertingNumericDocValues(values, maxDoc); } public override BinaryDocValues GetBinary(FieldInfo field) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => field.DocValuesType == DocValuesType.BINARY); + if (Debugging.AssertsEnabled) Debugging.Assert(field.DocValuesType == DocValuesType.BINARY); BinaryDocValues values = @in.GetBinary(field); - if (Debugging.AssertsEnabled) Debugging.Assert(() => values != null); + if (Debugging.AssertsEnabled) Debugging.Assert(values != null); return new AssertingBinaryDocValues(values, maxDoc); } public override SortedDocValues GetSorted(FieldInfo field) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => field.DocValuesType == DocValuesType.SORTED); + if (Debugging.AssertsEnabled) Debugging.Assert(field.DocValuesType == DocValuesType.SORTED); SortedDocValues values = @in.GetSorted(field); - if (Debugging.AssertsEnabled) Debugging.Assert(() => values != null); + if (Debugging.AssertsEnabled) Debugging.Assert(values != null); return new AssertingSortedDocValues(values, maxDoc); } public override SortedSetDocValues GetSortedSet(FieldInfo field) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => field.DocValuesType == DocValuesType.SORTED_SET); + if (Debugging.AssertsEnabled) Debugging.Assert(field.DocValuesType == DocValuesType.SORTED_SET); SortedSetDocValues values = @in.GetSortedSet(field); - if (Debugging.AssertsEnabled) Debugging.Assert(() => values != null); + if (Debugging.AssertsEnabled) Debugging.Assert(values != null); return new AssertingSortedSetDocValues(values, maxDoc); } public override IBits GetDocsWithField(FieldInfo field) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => field.DocValuesType != DocValuesType.NONE); + if (Debugging.AssertsEnabled) Debugging.Assert(field.DocValuesType != DocValuesType.NONE); IBits bits = @in.GetDocsWithField(field); - if (Debugging.AssertsEnabled) Debugging.Assert(() => bits != null); - if (Debugging.AssertsEnabled) Debugging.Assert(() => bits.Length == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(bits != null); + if (Debugging.AssertsEnabled) Debugging.Assert(bits.Length == maxDoc); return new AssertingBits(bits); } diff --git a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingNormsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingNormsFormat.cs index ea40b54df6..1e87622b3b 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingNormsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingNormsFormat.cs @@ -32,15 +32,15 @@ public class AssertingNormsFormat : NormsFormat public override DocValuesConsumer NormsConsumer(SegmentWriteState state) { DocValuesConsumer consumer = @in.NormsConsumer(state); - if (Debugging.AssertsEnabled) Debugging.Assert(() => consumer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(consumer != null); return new AssertingNormsConsumer(consumer, state.SegmentInfo.DocCount); } public override DocValuesProducer NormsProducer(SegmentReadState state) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state.FieldInfos.HasNorms); + if (Debugging.AssertsEnabled) Debugging.Assert(state.FieldInfos.HasNorms); DocValuesProducer producer = @in.NormsProducer(state); - if (Debugging.AssertsEnabled) Debugging.Assert(() => producer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(producer != null); return new AssertingDocValuesProducer(producer, state.SegmentInfo.DocCount); } } diff --git a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingPostingsFormat.cs index 69e981f8b0..f0e3fc243e 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingPostingsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingPostingsFormat.cs @@ -67,7 +67,7 @@ protected override void Dispose(bool disposing) public override IEnumerator GetEnumerator() { IEnumerator iterator = @in.GetEnumerator(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => iterator != null); + if (Debugging.AssertsEnabled) Debugging.Assert(iterator != null); return iterator; } @@ -105,7 +105,7 @@ internal AssertingFieldsConsumer(FieldsConsumer @in) public override TermsConsumer AddField(FieldInfo field) { TermsConsumer consumer = @in.AddField(field); - if (Debugging.AssertsEnabled) Debugging.Assert(() => consumer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(consumer != null); return new AssertingTermsConsumer(consumer, field); } @@ -144,28 +144,28 @@ internal AssertingTermsConsumer(TermsConsumer @in, FieldInfo fieldInfo) public override PostingsConsumer StartTerm(BytesRef text) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state == TermsConsumerState.INITIAL || state == TermsConsumerState.START && lastPostingsConsumer.docFreq == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(state == TermsConsumerState.INITIAL || state == TermsConsumerState.START && lastPostingsConsumer.docFreq == 0); state = TermsConsumerState.START; - if (Debugging.AssertsEnabled) Debugging.Assert(() => lastTerm == null || @in.Comparer.Compare(text, lastTerm) > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(lastTerm == null || @in.Comparer.Compare(text, lastTerm) > 0); lastTerm = BytesRef.DeepCopyOf(text); return lastPostingsConsumer = new AssertingPostingsConsumer(@in.StartTerm(text), fieldInfo, visitedDocs); } public override void FinishTerm(BytesRef text, TermStats stats) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state == TermsConsumerState.START); + if (Debugging.AssertsEnabled) Debugging.Assert(state == TermsConsumerState.START); state = TermsConsumerState.INITIAL; - if (Debugging.AssertsEnabled) Debugging.Assert(() => text.Equals(lastTerm)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => stats.DocFreq > 0); // otherwise, this method should not be called. - if (Debugging.AssertsEnabled) Debugging.Assert(() => stats.DocFreq == lastPostingsConsumer.docFreq); + if (Debugging.AssertsEnabled) Debugging.Assert(text.Equals(lastTerm)); + if (Debugging.AssertsEnabled) Debugging.Assert(stats.DocFreq > 0); // otherwise, this method should not be called. + if (Debugging.AssertsEnabled) Debugging.Assert(stats.DocFreq == lastPostingsConsumer.docFreq); sumDocFreq += stats.DocFreq; if (fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => stats.TotalTermFreq == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(stats.TotalTermFreq == -1); } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => stats.TotalTermFreq == lastPostingsConsumer.totalTermFreq); + if (Debugging.AssertsEnabled) Debugging.Assert(stats.TotalTermFreq == lastPostingsConsumer.totalTermFreq); sumTotalTermFreq += stats.TotalTermFreq; } @in.FinishTerm(text, stats); @@ -173,20 +173,20 @@ public override void FinishTerm(BytesRef text, TermStats stats) public override void Finish(long sumTotalTermFreq, long sumDocFreq, int docCount) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state == TermsConsumerState.INITIAL || state == TermsConsumerState.START && lastPostingsConsumer.docFreq == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(state == TermsConsumerState.INITIAL || state == TermsConsumerState.START && lastPostingsConsumer.docFreq == 0); state = TermsConsumerState.FINISHED; - if (Debugging.AssertsEnabled) Debugging.Assert(() => docCount >= 0); - if (Debugging.AssertsEnabled) Debugging.Assert(() => docCount == visitedDocs.Cardinality()); - if (Debugging.AssertsEnabled) Debugging.Assert(() => sumDocFreq >= docCount); - if (Debugging.AssertsEnabled) Debugging.Assert(() => sumDocFreq == this.sumDocFreq); + if (Debugging.AssertsEnabled) Debugging.Assert(docCount >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(docCount == visitedDocs.Cardinality()); + if (Debugging.AssertsEnabled) Debugging.Assert(sumDocFreq >= docCount); + if (Debugging.AssertsEnabled) Debugging.Assert(sumDocFreq == this.sumDocFreq); if (fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => sumTotalTermFreq == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(sumTotalTermFreq == -1); } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => sumTotalTermFreq >= sumDocFreq); - if (Debugging.AssertsEnabled) Debugging.Assert(() => sumTotalTermFreq == this.sumTotalTermFreq); + if (Debugging.AssertsEnabled) Debugging.Assert(sumTotalTermFreq >= sumDocFreq); + if (Debugging.AssertsEnabled) Debugging.Assert(sumTotalTermFreq == this.sumTotalTermFreq); } @in.Finish(sumTotalTermFreq, sumDocFreq, docCount); } @@ -222,17 +222,17 @@ internal AssertingPostingsConsumer(PostingsConsumer @in, FieldInfo fieldInfo, Op public override void StartDoc(int docID, int freq) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state == PostingsConsumerState.INITIAL); + if (Debugging.AssertsEnabled) Debugging.Assert(state == PostingsConsumerState.INITIAL); state = PostingsConsumerState.START; - if (Debugging.AssertsEnabled) Debugging.Assert(() => docID >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(docID >= 0); if (fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => freq == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(freq == -1); this.freq = 0; // we don't expect any positions here } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => freq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(freq > 0); this.freq = freq; totalTermFreq += freq; } @@ -246,41 +246,41 @@ public override void StartDoc(int docID, int freq) public override void AddPosition(int position, BytesRef payload, int startOffset, int endOffset) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state == PostingsConsumerState.START); - if (Debugging.AssertsEnabled) Debugging.Assert(() => positionCount < freq); + if (Debugging.AssertsEnabled) Debugging.Assert(state == PostingsConsumerState.START); + if (Debugging.AssertsEnabled) Debugging.Assert(positionCount < freq); positionCount++; - if (Debugging.AssertsEnabled) Debugging.Assert(() => position >= lastPosition || position == -1); // we still allow -1 from old 3.x indexes + if (Debugging.AssertsEnabled) Debugging.Assert(position >= lastPosition || position == -1); // we still allow -1 from old 3.x indexes lastPosition = position; if (fieldInfo.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => startOffset >= 0); - if (Debugging.AssertsEnabled) Debugging.Assert(() => startOffset >= lastStartOffset); + if (Debugging.AssertsEnabled) Debugging.Assert(startOffset >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(startOffset >= lastStartOffset); lastStartOffset = startOffset; - if (Debugging.AssertsEnabled) Debugging.Assert(() => endOffset >= startOffset); + if (Debugging.AssertsEnabled) Debugging.Assert(endOffset >= startOffset); } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => startOffset == -1); - if (Debugging.AssertsEnabled) Debugging.Assert(() => endOffset == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(startOffset == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(endOffset == -1); } if (payload != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldInfo.HasPayloads); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldInfo.HasPayloads); } @in.AddPosition(position, payload, startOffset, endOffset); } public override void FinishDoc() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state == PostingsConsumerState.START); + if (Debugging.AssertsEnabled) Debugging.Assert(state == PostingsConsumerState.START); state = PostingsConsumerState.INITIAL; if (fieldInfo.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => positionCount == 0); // we should not have fed any positions! + if (Debugging.AssertsEnabled) Debugging.Assert(positionCount == 0); // we should not have fed any positions! } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => positionCount == freq); + if (Debugging.AssertsEnabled) Debugging.Assert(positionCount == freq); } @in.FinishDoc(); } diff --git a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingStoredFieldsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingStoredFieldsFormat.cs index d4d9d763b2..fa3514e5a0 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingStoredFieldsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingStoredFieldsFormat.cs @@ -58,7 +58,7 @@ protected override void Dispose(bool disposing) public override void VisitDocument(int n, StoredFieldVisitor visitor) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => n >= 0 && n < maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(n >= 0 && n < maxDoc); @in.VisitDocument(n, visitor); } @@ -100,9 +100,9 @@ internal AssertingStoredFieldsWriter(StoredFieldsWriter @in) public override void StartDocument(int numStoredFields) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docStatus != Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(docStatus != Status.STARTED); @in.StartDocument(numStoredFields); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldCount == 0); fieldCount = numStoredFields; numWritten++; docStatus = Status.STARTED; @@ -110,17 +110,17 @@ public override void StartDocument(int numStoredFields) public override void FinishDocument() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docStatus == Status.STARTED); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(docStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldCount == 0); @in.FinishDocument(); docStatus = Status.FINISHED; } public override void WriteField(FieldInfo info, IIndexableField field) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(docStatus == Status.STARTED); @in.WriteField(info, field); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldCount > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldCount > 0); fieldCount--; } @@ -131,10 +131,10 @@ public override void Abort() public override void Finish(FieldInfos fis, int numDocs) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docStatus == (numDocs > 0 ? Status.FINISHED : Status.UNDEFINED)); + if (Debugging.AssertsEnabled) Debugging.Assert(docStatus == (numDocs > 0 ? Status.FINISHED : Status.UNDEFINED)); @in.Finish(fis, numDocs); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldCount == 0); - if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocs == numWritten); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(numDocs == numWritten); } protected override void Dispose(bool disposing) diff --git a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingTermVectorsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingTermVectorsFormat.cs index b7ce2300f0..dd538b96aa 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingTermVectorsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingTermVectorsFormat.cs @@ -103,8 +103,8 @@ internal AssertingTermVectorsWriter(TermVectorsWriter @in) public override void StartDocument(int numVectorFields) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldCount == 0); - if (Debugging.AssertsEnabled) Debugging.Assert(() => docStatus != Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(docStatus != Status.STARTED); @in.StartDocument(numVectorFields); docStatus = Status.STARTED; fieldCount = numVectorFields; @@ -113,17 +113,17 @@ public override void StartDocument(int numVectorFields) public override void FinishDocument() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldCount == 0); - if (Debugging.AssertsEnabled) Debugging.Assert(() => docStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(docStatus == Status.STARTED); @in.FinishDocument(); docStatus = Status.FINISHED; } public override void StartField(FieldInfo info, int numTerms, bool positions, bool offsets, bool payloads) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => termCount == 0); - if (Debugging.AssertsEnabled) Debugging.Assert(() => docStatus == Status.STARTED); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldStatus != Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(termCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(docStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldStatus != Status.STARTED); @in.StartField(info, numTerms, positions, offsets, payloads); fieldStatus = Status.STARTED; termCount = numTerms; @@ -132,8 +132,8 @@ public override void StartField(FieldInfo info, int numTerms, bool positions, bo public override void FinishField() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => termCount == 0); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(termCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldStatus == Status.STARTED); @in.FinishField(); fieldStatus = Status.FINISHED; --fieldCount; @@ -141,9 +141,9 @@ public override void FinishField() public override void StartTerm(BytesRef term, int freq) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docStatus == Status.STARTED); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldStatus == Status.STARTED); - if (Debugging.AssertsEnabled) Debugging.Assert(() => termStatus != Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(docStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(termStatus != Status.STARTED); @in.StartTerm(term, freq); termStatus = Status.STARTED; positionCount = hasPositions ? freq : 0; @@ -151,10 +151,10 @@ public override void StartTerm(BytesRef term, int freq) public override void FinishTerm() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => positionCount == 0); - if (Debugging.AssertsEnabled) Debugging.Assert(() => docStatus == Status.STARTED); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldStatus == Status.STARTED); - if (Debugging.AssertsEnabled) Debugging.Assert(() => termStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(positionCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(docStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(termStatus == Status.STARTED); @in.FinishTerm(); termStatus = Status.FINISHED; --termCount; @@ -162,9 +162,9 @@ public override void FinishTerm() public override void AddPosition(int position, int startOffset, int endOffset, BytesRef payload) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docStatus == Status.STARTED); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldStatus == Status.STARTED); - if (Debugging.AssertsEnabled) Debugging.Assert(() => termStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(docStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldStatus == Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(termStatus == Status.STARTED); @in.AddPosition(position, startOffset, endOffset, payload); --positionCount; } @@ -176,10 +176,10 @@ public override void Abort() public override void Finish(FieldInfos fis, int numDocs) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docCount == numDocs); - if (Debugging.AssertsEnabled) Debugging.Assert(() => docStatus == (numDocs > 0 ? Status.FINISHED : Status.UNDEFINED)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldStatus != Status.STARTED); - if (Debugging.AssertsEnabled) Debugging.Assert(() => termStatus != Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(docCount == numDocs); + if (Debugging.AssertsEnabled) Debugging.Assert(docStatus == (numDocs > 0 ? Status.FINISHED : Status.UNDEFINED)); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldStatus != Status.STARTED); + if (Debugging.AssertsEnabled) Debugging.Assert(termStatus != Status.STARTED); @in.Finish(fis, numDocs); } diff --git a/src/Lucene.Net.TestFramework/Codecs/Compressing/Dummy/DummyCompressingCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Compressing/Dummy/DummyCompressingCodec.cs index ae218dd460..f1eef1bcb4 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Compressing/Dummy/DummyCompressingCodec.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Compressing/Dummy/DummyCompressingCodec.cs @@ -57,7 +57,7 @@ private class DecompressorAnonymousInnerClassHelper : Decompressor { public override void Decompress(DataInput @in, int originalLength, int offset, int length, BytesRef bytes) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => offset + length <= originalLength); + if (Debugging.AssertsEnabled) Debugging.Assert(offset + length <= originalLength); if (bytes.Bytes.Length < originalLength) { bytes.Bytes = new byte[ArrayUtil.Oversize(originalLength, 1)]; diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosWriter.cs index 8a461be015..0538bdc5b0 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosWriter.cs @@ -79,7 +79,7 @@ public override void Write(Directory directory, string segmentName, string segme if (fi.IsIndexed) { bits |= IS_INDEXED; - if (Debugging.AssertsEnabled) Debugging.Assert(() => fi.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !fi.HasPayloads); + if (Debugging.AssertsEnabled) Debugging.Assert(fi.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !fi.HasPayloads); if (fi.IndexOptions == IndexOptions.DOCS_ONLY) { bits |= OMIT_TERM_FREQ_AND_POSITIONS; @@ -103,7 +103,7 @@ public override void Write(Directory directory, string segmentName, string segme // only in RW case output.WriteByte((byte)(sbyte)(fi.NormType == Index.DocValuesType.NONE ? 0 : 1)); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => fi.Attributes == null); // not used or supported + if (Debugging.AssertsEnabled) Debugging.Assert(fi.Attributes == null); // not used or supported } success = true; } diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldsWriter.cs index a849aeb7fc..25bdea9feb 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldsWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldsWriter.cs @@ -81,7 +81,7 @@ public PreFlexRWFieldsWriter(SegmentWriteState state) public override TermsConsumer AddField(FieldInfo field) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => field.Number != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(field.Number != -1); if (field.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0) { throw new NotSupportedException("this codec cannot index offsets"); @@ -164,7 +164,7 @@ public override void StartDoc(int docID, int termDocFreq) lastDocID = docID; - if (Debugging.AssertsEnabled) Debugging.Assert(() => docID < outerInstance.outerInstance.totalNumDocs, () => "docID=" + docID + " totalNumDocs=" + outerInstance.outerInstance.totalNumDocs); + if (Debugging.AssertsEnabled) Debugging.Assert(docID < outerInstance.outerInstance.totalNumDocs, () => "docID=" + docID + " totalNumDocs=" + outerInstance.outerInstance.totalNumDocs); if (outerInstance.omitTF) { @@ -188,9 +188,9 @@ public override void StartDoc(int docID, int termDocFreq) public override void AddPosition(int position, BytesRef payload, int startOffset, int endOffset) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.outerInstance.proxOut != null); - if (Debugging.AssertsEnabled) Debugging.Assert(() => startOffset == -1); - if (Debugging.AssertsEnabled) Debugging.Assert(() => endOffset == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.outerInstance.proxOut != null); + if (Debugging.AssertsEnabled) Debugging.Assert(startOffset == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(endOffset == -1); //System.out.println(" w pos=" + position + " payl=" + payload); int delta = position - lastPosition; lastPosition = position; diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs index 9cd2cdeec7..0ff6703e24 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs @@ -75,7 +75,7 @@ public PreFlexRWNormsConsumer(Directory directory, string segment, IOContext con public override void AddNumericField(FieldInfo field, IEnumerable values) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => field.Number > lastFieldNumber, () => "writing norms fields out of order" + lastFieldNumber + " -> " + field.Number); + if (Debugging.AssertsEnabled) Debugging.Assert(field.Number > lastFieldNumber, () => "writing norms fields out of order" + lastFieldNumber + " -> " + field.Number); foreach (var n in values) { if (((sbyte)(byte)(long)n) < sbyte.MinValue || ((sbyte)(byte)(long)n) > sbyte.MaxValue) diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs index 807b279967..03f3a3d27f 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs @@ -36,7 +36,7 @@ internal sealed class PreFlexRWStoredFieldsWriter : StoredFieldsWriter public PreFlexRWStoredFieldsWriter(Directory directory, string segment, IOContext context) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => directory != null); + if (Debugging.AssertsEnabled) Debugging.Assert(directory != null); this.directory = directory; this.segment = segment; @@ -188,7 +188,7 @@ public override void WriteField(FieldInfo info, IIndexableField field) fieldsStream.WriteInt64(J2N.BitConversion.DoubleToInt64Bits(field.GetDoubleValue().Value)); break; default: - if (Debugging.AssertsEnabled) Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(false); break; } } diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsWriter.cs index 1c42cd5f8f..e08d332419 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsWriter.cs @@ -75,7 +75,7 @@ public override void StartDocument(int numVectorFields) public override void StartField(FieldInfo info, int numTerms, bool positions, bool offsets, bool payloads) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => lastFieldName == null || info.Name.CompareToOrdinal(lastFieldName) > 0, () => "fieldName=" + info.Name + " lastFieldName=" + lastFieldName); + if (Debugging.AssertsEnabled) Debugging.Assert(lastFieldName == null || info.Name.CompareToOrdinal(lastFieldName) > 0, () => "fieldName=" + info.Name + " lastFieldName=" + lastFieldName); lastFieldName = info.Name; if (payloads) { @@ -98,7 +98,7 @@ public override void StartField(FieldInfo info, int numTerms, bool positions, bo } tvf.WriteByte((byte)bits); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldCount <= numVectorFields); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldCount <= numVectorFields); if (fieldCount == numVectorFields) { // last field of the document @@ -148,7 +148,7 @@ public override void StartTerm(BytesRef term, int freq) public override void AddPosition(int position, int startOffset, int endOffset, BytesRef payload) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => payload == null); + if (Debugging.AssertsEnabled) Debugging.Assert(payload == null); if (positions && offsets) { // write position delta diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/TermInfosWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/TermInfosWriter.cs index 61d3b9e277..8e33659624 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/TermInfosWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/TermInfosWriter.cs @@ -135,7 +135,7 @@ private void Initialize(Directory directory, string segment, FieldInfos fis, int output.WriteInt32(indexInterval); // write indexInterval output.WriteInt32(skipInterval); // write skipInterval output.WriteInt32(maxSkipLevels); // write maxSkipLevels - if (Debugging.AssertsEnabled) Debugging.Assert(InitUTF16Results); + if (Debugging.AssertsEnabled) Debugging.Assert(InitUTF16Results()); success = true; } finally @@ -202,10 +202,10 @@ private int CompareToLastTerm(int fieldNumber, BytesRef term) } scratchBytes.CopyBytes(term); - if (Debugging.AssertsEnabled) Debugging.Assert(() => lastTerm.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(lastTerm.Offset == 0); UnicodeUtil.UTF8toUTF16(lastTerm.Bytes, 0, lastTerm.Length, utf16Result1); - if (Debugging.AssertsEnabled) Debugging.Assert(() => scratchBytes.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(scratchBytes.Offset == 0); UnicodeUtil.UTF8toUTF16(scratchBytes.Bytes, 0, scratchBytes.Length, utf16Result2); int len; @@ -243,10 +243,10 @@ private int CompareToLastTerm(int fieldNumber, BytesRef term) /// public void Add(int fieldNumber, BytesRef term, TermInfo ti) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => CompareToLastTerm(fieldNumber, term) < 0 || (isIndex && term.Length == 0 && lastTerm.Length == 0), () => "Terms are out of order: field=" + FieldName(fieldInfos, fieldNumber) + " (number " + fieldNumber + ")" + " lastField=" + FieldName(fieldInfos, lastFieldNumber) + " (number " + lastFieldNumber + ")" + " text=" + term.Utf8ToString() + " lastText=" + lastTerm.Utf8ToString()); + if (Debugging.AssertsEnabled) Debugging.Assert(CompareToLastTerm(fieldNumber, term) < 0 || (isIndex && term.Length == 0 && lastTerm.Length == 0), () => "Terms are out of order: field=" + FieldName(fieldInfos, fieldNumber) + " (number " + fieldNumber + ")" + " lastField=" + FieldName(fieldInfos, lastFieldNumber) + " (number " + lastFieldNumber + ")" + " text=" + term.Utf8ToString() + " lastText=" + lastTerm.Utf8ToString()); - if (Debugging.AssertsEnabled) Debugging.Assert(() => ti.FreqPointer >= lastTi.FreqPointer, () => "freqPointer out of order (" + ti.FreqPointer + " < " + lastTi.FreqPointer + ")"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => ti.ProxPointer >= lastTi.ProxPointer, () => "proxPointer out of order (" + ti.ProxPointer + " < " + lastTi.ProxPointer + ")"); + if (Debugging.AssertsEnabled) Debugging.Assert(ti.FreqPointer >= lastTi.FreqPointer, () => "freqPointer out of order (" + ti.FreqPointer + " < " + lastTi.FreqPointer + ")"); + if (Debugging.AssertsEnabled) Debugging.Assert(ti.ProxPointer >= lastTi.ProxPointer, () => "proxPointer out of order (" + ti.ProxPointer + " < " + lastTi.ProxPointer + ")"); if (!isIndex && size % indexInterval == 0) { diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40DocValuesWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40DocValuesWriter.cs index 62d6dc1714..ed4709de34 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40DocValuesWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40DocValuesWriter.cs @@ -333,7 +333,7 @@ private void AddVarStraightBytesField(FieldInfo field, IndexOutput data, IndexOu index.WriteVInt64(maxAddress); int maxDoc = state.SegmentInfo.DocCount; - if (Debugging.AssertsEnabled) Debugging.Assert(() => maxDoc != int.MaxValue); // unsupported by the 4.0 impl + if (Debugging.AssertsEnabled) Debugging.Assert(maxDoc != int.MaxValue); // unsupported by the 4.0 impl PackedInt32s.Writer w = PackedInt32s.GetWriter(index, maxDoc + 1, PackedInt32s.BitsRequired(maxAddress), PackedInt32s.DEFAULT); long currentPosition = 0; @@ -346,7 +346,7 @@ private void AddVarStraightBytesField(FieldInfo field, IndexOutput data, IndexOu } } // write sentinel - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentPosition == maxAddress); + if (Debugging.AssertsEnabled) Debugging.Assert(currentPosition == maxAddress); w.Add(currentPosition); w.Finish(); } @@ -375,7 +375,7 @@ private void AddFixedDerefBytesField(FieldInfo field, IndexOutput data, IndexOut /* ordinals */ int valueCount = dictionary.Count; - if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(valueCount > 0); index.WriteInt32(valueCount); int maxDoc = state.SegmentInfo.DocCount; PackedInt32s.Writer w = PackedInt32s.GetWriter(index, maxDoc, PackedInt32s.BitsRequired(valueCount - 1), PackedInt32s.DEFAULT); @@ -439,7 +439,7 @@ private void AddVarDerefBytesField(FieldInfo field, IndexOutput data, IndexOutpu // the little vint encoding used for var-deref private static void WriteVInt16(IndexOutput o, int i) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => i >= 0 && i <= short.MaxValue); + if (Debugging.AssertsEnabled) Debugging.Assert(i >= 0 && i <= short.MaxValue); if (i < 128) { o.WriteByte((byte)(sbyte)i); @@ -545,7 +545,7 @@ private void AddFixedSortedBytesField(FieldInfo field, IndexOutput data, IndexOu index.WriteInt32(valueCount); int maxDoc = state.SegmentInfo.DocCount; - if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(valueCount > 0); PackedInt32s.Writer w = PackedInt32s.GetWriter(index, maxDoc, PackedInt32s.BitsRequired(valueCount - 1), PackedInt32s.DEFAULT); foreach (long n in docToOrd) { @@ -578,7 +578,7 @@ private void AddVarSortedBytesField(FieldInfo field, IndexOutput data, IndexOutp long maxAddress = data.GetFilePointer() - startPos; index.WriteInt64(maxAddress); - if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount != int.MaxValue); // unsupported by the 4.0 impl + if (Debugging.AssertsEnabled) Debugging.Assert(valueCount != int.MaxValue); // unsupported by the 4.0 impl PackedInt32s.Writer w = PackedInt32s.GetWriter(index, valueCount + 1, PackedInt32s.BitsRequired(maxAddress), PackedInt32s.DEFAULT); long currentPosition = 0; @@ -588,14 +588,14 @@ private void AddVarSortedBytesField(FieldInfo field, IndexOutput data, IndexOutp currentPosition += v.Length; } // write sentinel - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentPosition == maxAddress); + if (Debugging.AssertsEnabled) Debugging.Assert(currentPosition == maxAddress); w.Add(currentPosition); w.Finish(); /* ordinals */ int maxDoc = state.SegmentInfo.DocCount; - if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(valueCount > 0); PackedInt32s.Writer ords = PackedInt32s.GetWriter(index, maxDoc, PackedInt32s.BitsRequired(valueCount - 1), PackedInt32s.DEFAULT); foreach (long n in docToOrd) { diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40FieldInfosWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40FieldInfosWriter.cs index d4eb855cbd..3a13facb1e 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40FieldInfosWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40FieldInfosWriter.cs @@ -66,7 +66,7 @@ public override void Write(Directory directory, string segmentName, string segme if (fi.IsIndexed) { bits |= Lucene40FieldInfosFormat.IS_INDEXED; - if (Debugging.AssertsEnabled) Debugging.Assert(() => indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); + if (Debugging.AssertsEnabled) Debugging.Assert(indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); if (indexOptions == IndexOptions.DOCS_ONLY) { bits |= Lucene40FieldInfosFormat.OMIT_TERM_FREQ_AND_POSITIONS; @@ -87,7 +87,7 @@ public override void Write(Directory directory, string segmentName, string segme // pack the DV types in one byte byte dv = DocValuesByte(fi.DocValuesType, fi.GetAttribute(Lucene40FieldInfosReader.LEGACY_DV_TYPE_KEY)); byte nrm = DocValuesByte(fi.NormType, fi.GetAttribute(Lucene40FieldInfosReader.LEGACY_NORM_TYPE_KEY)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => (dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0); + if (Debugging.AssertsEnabled) Debugging.Assert((dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0); var val = (byte)(0xff & ((nrm << 4) | (byte)dv)); output.WriteByte(val); output.WriteStringStringMap(fi.Attributes); @@ -113,12 +113,12 @@ public virtual byte DocValuesByte(DocValuesType type, string legacyTypeAtt) { if (type == DocValuesType.NONE) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => legacyTypeAtt == null); + if (Debugging.AssertsEnabled) Debugging.Assert(legacyTypeAtt == null); return 0; } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => legacyTypeAtt != null); + if (Debugging.AssertsEnabled) Debugging.Assert(legacyTypeAtt != null); //return (sbyte)LegacyDocValuesType.ordinalLookup[legacyTypeAtt]; return (byte)legacyTypeAtt.ToLegacyDocValuesType(); } diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40PostingsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40PostingsWriter.cs index d5b8e24f84..8124e65e8c 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40PostingsWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40PostingsWriter.cs @@ -208,7 +208,7 @@ public override void StartDoc(int docID, int termDocFreq) skipListWriter.BufferSkip(df); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => docID < totalNumDocs, () => "docID=" + docID + " totalNumDocs=" + totalNumDocs); + if (Debugging.AssertsEnabled) Debugging.Assert(docID < totalNumDocs, () => "docID=" + docID + " totalNumDocs=" + totalNumDocs); lastDocID = docID; if (indexOptions == IndexOptions.DOCS_ONLY) @@ -234,12 +234,12 @@ public override void StartDoc(int docID, int termDocFreq) public override void AddPosition(int position, BytesRef payload, int startOffset, int endOffset) { //if (DEBUG) System.out.println("SPW: addPos pos=" + position + " payload=" + (payload == null ? "null" : (payload.Length + " bytes")) + " proxFP=" + proxOut.getFilePointer()); - if (Debugging.AssertsEnabled) Debugging.Assert(() => indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0, () => "invalid indexOptions: " + indexOptions); - if (Debugging.AssertsEnabled) Debugging.Assert(() => proxOut != null); + if (Debugging.AssertsEnabled) Debugging.Assert(indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0, () => "invalid indexOptions: " + indexOptions); + if (Debugging.AssertsEnabled) Debugging.Assert(proxOut != null); int delta = position - lastPosition; - if (Debugging.AssertsEnabled) Debugging.Assert(() => delta >= 0, () => "position=" + position + " lastPosition=" + lastPosition); // not quite right (if pos=0 is repeated twice we don't catch it) + if (Debugging.AssertsEnabled) Debugging.Assert(delta >= 0, () => "position=" + position + " lastPosition=" + lastPosition); // not quite right (if pos=0 is repeated twice we don't catch it) lastPosition = position; @@ -271,7 +271,7 @@ public override void AddPosition(int position, BytesRef payload, int startOffset // and the numbers aren't that much smaller anyways. int offsetDelta = startOffset - lastOffset; int offsetLength = endOffset - startOffset; - if (Debugging.AssertsEnabled) Debugging.Assert(() => offsetDelta >= 0 && offsetLength >= 0, () => "startOffset=" + startOffset + ",lastOffset=" + lastOffset + ",endOffset=" + endOffset); + if (Debugging.AssertsEnabled) Debugging.Assert(offsetDelta >= 0 && offsetLength >= 0, () => "startOffset=" + startOffset + ",lastOffset=" + lastOffset + ",endOffset=" + endOffset); if (offsetLength != lastOffsetLength) { proxOut.WriteVInt32(offsetDelta << 1 | 1); @@ -308,11 +308,11 @@ public override void FinishTerm(BlockTermState state) { StandardTermState state_ = (StandardTermState)state; // if (DEBUG) System.out.println("SPW: finishTerm seg=" + segment + " freqStart=" + freqStart); - if (Debugging.AssertsEnabled) Debugging.Assert(() => state_.DocFreq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(state_.DocFreq > 0); // TODO: wasteful we are counting this (counting # docs // for this term) in two places? - if (Debugging.AssertsEnabled) Debugging.Assert(() => state_.DocFreq == df); + if (Debugging.AssertsEnabled) Debugging.Assert(state_.DocFreq == df); state_.FreqStart = freqStart; state_.ProxStart = proxStart; if (df >= skipMinimum) @@ -337,7 +337,7 @@ public override void EncodeTerm(long[] empty, DataOutput @out, FieldInfo fieldIn @out.WriteVInt64(state_.FreqStart - lastState.FreqStart); if (state_.SkipOffset != -1) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state_.SkipOffset > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(state_.SkipOffset > 0); @out.WriteVInt64(state_.SkipOffset); } if (indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0) diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40SkipListWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40SkipListWriter.cs index d6ffba97aa..5b902a20c5 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40SkipListWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40SkipListWriter.cs @@ -67,8 +67,8 @@ public Lucene40SkipListWriter(int skipInterval, int numberOfSkipLevels, int docC /// public virtual void SetSkipData(int doc, bool storePayloads, int payloadLength, bool storeOffsets, int offsetLength) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => storePayloads || payloadLength == -1); - if (Debugging.AssertsEnabled) Debugging.Assert(() => storeOffsets || offsetLength == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(storePayloads || payloadLength == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(storeOffsets || offsetLength == -1); this.curDoc = doc; this.curStorePayloads = storePayloads; this.curPayloadLength = payloadLength; @@ -120,8 +120,8 @@ protected override void WriteSkipData(int level, IndexOutput skipBuffer) if (curStorePayloads || curStoreOffsets) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => curStorePayloads || curPayloadLength == lastSkipPayloadLength[level]); - if (Debugging.AssertsEnabled) Debugging.Assert(() => curStoreOffsets || curOffsetLength == lastSkipOffsetLength[level]); + if (Debugging.AssertsEnabled) Debugging.Assert(curStorePayloads || curPayloadLength == lastSkipPayloadLength[level]); + if (Debugging.AssertsEnabled) Debugging.Assert(curStoreOffsets || curOffsetLength == lastSkipOffsetLength[level]); if (curPayloadLength == lastSkipPayloadLength[level] && curOffsetLength == lastSkipOffsetLength[level]) { diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42DocValuesConsumer.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42DocValuesConsumer.cs index 6c5c200472..fde6cbfa9d 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42DocValuesConsumer.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42DocValuesConsumer.cs @@ -125,7 +125,7 @@ internal virtual void AddNumericField(FieldInfo field, IEnumerable values ++count; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => count == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(count == maxDoc); } if (uniqueValues != null) diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42FieldInfosWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42FieldInfosWriter.cs index 11287be2d2..f47d6c52bb 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42FieldInfosWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42FieldInfosWriter.cs @@ -66,7 +66,7 @@ public override void Write(Directory directory, string segmentName, string segme if (fi.IsIndexed) { bits |= Lucene42FieldInfosFormat.IS_INDEXED; - if (Debugging.AssertsEnabled) Debugging.Assert(() => indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); + if (Debugging.AssertsEnabled) Debugging.Assert(indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); if (indexOptions == IndexOptions.DOCS_ONLY) { bits |= Lucene42FieldInfosFormat.OMIT_TERM_FREQ_AND_POSITIONS; @@ -87,7 +87,7 @@ public override void Write(Directory directory, string segmentName, string segme // pack the DV types in one byte var dv = DocValuesByte(fi.DocValuesType); var nrm = DocValuesByte(fi.NormType); - if (Debugging.AssertsEnabled) Debugging.Assert(() => (dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0); + if (Debugging.AssertsEnabled) Debugging.Assert((dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0); var val = (byte)(0xff & ((nrm << 4) | (byte)dv)); output.WriteByte(val); output.WriteStringStringMap(fi.Attributes); diff --git a/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockVariableIntBlockPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockVariableIntBlockPostingsFormat.cs index 87344892c6..e484e59743 100644 --- a/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockVariableIntBlockPostingsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockVariableIntBlockPostingsFormat.cs @@ -107,7 +107,7 @@ public virtual int ReadBlock() { buffer[0] = input.ReadVInt32(); int count = buffer[0] <= 3 ? baseBlockSize - 1 : 2 * baseBlockSize - 1; - if (Debugging.AssertsEnabled) Debugging.Assert(() => buffer.Length >= count, () => "buffer.length=" + buffer.Length + " count=" + count); + if (Debugging.AssertsEnabled) Debugging.Assert(buffer.Length >= count, () => "buffer.length=" + buffer.Length + " count=" + count); for (int i = 0; i < count; i++) { buffer[i + 1] = input.ReadVInt32(); diff --git a/src/Lucene.Net.TestFramework/Codecs/MockRandom/MockRandomPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/MockRandom/MockRandomPostingsFormat.cs index 8481e0dd44..b4f7811cc9 100644 --- a/src/Lucene.Net.TestFramework/Codecs/MockRandom/MockRandomPostingsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/MockRandom/MockRandomPostingsFormat.cs @@ -88,7 +88,7 @@ public MockInt32StreamFactory(Random random) private static string GetExtension(string fileName) { int idx = fileName.IndexOf('.'); - if (Debugging.AssertsEnabled) Debugging.Assert(() => idx != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(idx != -1); return fileName.Substring(idx); } diff --git a/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs index ba4029ee48..f39a807a5d 100644 --- a/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs @@ -290,8 +290,8 @@ public override IComparer Comparer public override void FinishTerm(BytesRef text, TermStats stats) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => stats.DocFreq > 0); - if (Debugging.AssertsEnabled) Debugging.Assert(() => stats.DocFreq == current.docs.Count); + if (Debugging.AssertsEnabled) Debugging.Assert(stats.DocFreq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(stats.DocFreq == current.docs.Count); current.totalTermFreq = stats.TotalTermFreq; field.termToDocs[current.term] = current; } @@ -324,8 +324,8 @@ public override void StartDoc(int docID, int freq) public override void AddPosition(int position, BytesRef payload, int startOffset, int endOffset) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => startOffset == -1); - if (Debugging.AssertsEnabled) Debugging.Assert(() => endOffset == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(startOffset == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(endOffset == -1); current.positions[posUpto] = position; if (payload != null && payload.Length > 0) { @@ -341,7 +341,7 @@ public override void AddPosition(int position, BytesRef payload, int startOffset public override void FinishDoc() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => posUpto == current.positions.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(posUpto == current.positions.Length); } } diff --git a/src/Lucene.Net.TestFramework/Index/AllDeletedFilterReader.cs b/src/Lucene.Net.TestFramework/Index/AllDeletedFilterReader.cs index c545d6013e..9342369b63 100644 --- a/src/Lucene.Net.TestFramework/Index/AllDeletedFilterReader.cs +++ b/src/Lucene.Net.TestFramework/Index/AllDeletedFilterReader.cs @@ -31,7 +31,7 @@ public AllDeletedFilterReader(AtomicReader @in) : base(@in) { liveDocs = new Bits.MatchNoBits(@in.MaxDoc); - if (Debugging.AssertsEnabled) Debugging.Assert(() => MaxDoc == 0 || HasDeletions); + if (Debugging.AssertsEnabled) Debugging.Assert(MaxDoc == 0 || HasDeletions); } public override IBits LiveDocs => liveDocs; diff --git a/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs b/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs index 077b27083d..f36ddb87e2 100644 --- a/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs +++ b/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs @@ -36,7 +36,7 @@ public AssertingFields(Fields input) public override IEnumerator GetEnumerator() { IEnumerator iterator = base.GetEnumerator(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => iterator != null); + if (Debugging.AssertsEnabled) Debugging.Assert(iterator != null); return iterator; } @@ -59,8 +59,8 @@ public AssertingTerms(Terms input) public override TermsEnum Intersect(CompiledAutomaton automaton, BytesRef bytes) { TermsEnum termsEnum = m_input.Intersect(automaton, bytes); - if (Debugging.AssertsEnabled) Debugging.Assert(() => termsEnum != null); - if (Debugging.AssertsEnabled) Debugging.Assert(() => bytes == null || bytes.IsValid()); + if (Debugging.AssertsEnabled) Debugging.Assert(termsEnum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(bytes == null || bytes.IsValid()); return new AssertingAtomicReader.AssertingTermsEnum(termsEnum); } @@ -73,7 +73,7 @@ public override TermsEnum GetIterator(TermsEnum reuse) reuse = ((AssertingAtomicReader.AssertingTermsEnum)reuse).m_input; } TermsEnum termsEnum = base.GetIterator(reuse); - if (Debugging.AssertsEnabled) Debugging.Assert(() => termsEnum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(termsEnum != null); return new AssertingAtomicReader.AssertingTermsEnum(termsEnum); } } @@ -102,7 +102,7 @@ public AssertingDocsEnum(DocsEnum @in, bool failOnUnsupportedDocID) try { int docid = @in.DocID; - if (Debugging.AssertsEnabled) Debugging.Assert(() => docid == -1, () => @in.GetType() + ": invalid initial doc id: " + docid); + if (Debugging.AssertsEnabled) Debugging.Assert(docid == -1, () => @in.GetType() + ": invalid initial doc id: " + docid); } catch (NotSupportedException /*e*/) { @@ -116,9 +116,9 @@ public AssertingDocsEnum(DocsEnum @in, bool failOnUnsupportedDocID) public override int NextDoc() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "NextDoc() called after NO_MORE_DOCS"); + if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, () => "NextDoc() called after NO_MORE_DOCS"); int nextDoc = base.NextDoc(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => nextDoc > doc, () => "backwards NextDoc from " + doc + " to " + nextDoc + " " + m_input); + if (Debugging.AssertsEnabled) Debugging.Assert(nextDoc > doc, () => "backwards NextDoc from " + doc + " to " + nextDoc + " " + m_input); if (nextDoc == DocIdSetIterator.NO_MORE_DOCS) { state = DocsEnumState.FINISHED; @@ -127,16 +127,16 @@ public override int NextDoc() { state = DocsEnumState.ITERATING; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => base.DocID == nextDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(base.DocID == nextDoc); return doc = nextDoc; } public override int Advance(int target) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "Advance() called after NO_MORE_DOCS"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => target > doc, () => "target must be > DocID, got " + target + " <= " + doc); + if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, () => "Advance() called after NO_MORE_DOCS"); + if (Debugging.AssertsEnabled) Debugging.Assert(target > doc, () => "target must be > DocID, got " + target + " <= " + doc); int advanced = base.Advance(target); - if (Debugging.AssertsEnabled) Debugging.Assert(() => advanced >= target, () => "backwards advance from: " + target + " to: " + advanced); + if (Debugging.AssertsEnabled) Debugging.Assert(advanced >= target, () => "backwards advance from: " + target + " to: " + advanced); if (advanced == DocIdSetIterator.NO_MORE_DOCS) { state = DocsEnumState.FINISHED; @@ -145,7 +145,7 @@ public override int Advance(int target) { state = DocsEnumState.ITERATING; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => base.DocID == advanced); + if (Debugging.AssertsEnabled) Debugging.Assert(base.DocID == advanced); return doc = advanced; } @@ -153,7 +153,7 @@ public override int DocID { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => doc == base.DocID, () => " invalid DocID in " + m_input.GetType() + " " + base.DocID + " instead of " + doc); + if (Debugging.AssertsEnabled) Debugging.Assert(doc == base.DocID, () => " invalid DocID in " + m_input.GetType() + " " + base.DocID + " instead of " + doc); return doc; } } @@ -162,10 +162,10 @@ public override int Freq { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.START, () => "Freq called before NextDoc()/Advance()"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "Freq called after NO_MORE_DOCS"); + if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.START, () => "Freq called before NextDoc()/Advance()"); + if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, () => "Freq called after NO_MORE_DOCS"); int freq = base.Freq; - if (Debugging.AssertsEnabled) Debugging.Assert(() => freq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(freq > 0); return freq; } } @@ -186,7 +186,7 @@ public AssertingNumericDocValues(NumericDocValues @in, int maxDoc) public override long Get(int docID) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docID >= 0 && docID < maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(docID >= 0 && docID < maxDoc); return @in.Get(docID); } } @@ -206,10 +206,10 @@ public AssertingBinaryDocValues(BinaryDocValues @in, int maxDoc) public override void Get(int docID, BytesRef result) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docID >= 0 && docID < maxDoc); - if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(docID >= 0 && docID < maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid()); @in.Get(docID, result); - if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid()); } } @@ -226,23 +226,23 @@ public AssertingSortedDocValues(SortedDocValues @in, int maxDoc) this.@in = @in; this.maxDoc = maxDoc; this.valueCount = @in.ValueCount; - if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount >= 0 && valueCount <= maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(valueCount >= 0 && valueCount <= maxDoc); } public override int GetOrd(int docID) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docID >= 0 && docID < maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(docID >= 0 && docID < maxDoc); int ord = @in.GetOrd(docID); - if (Debugging.AssertsEnabled) Debugging.Assert(() => ord >= -1 && ord < valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(ord >= -1 && ord < valueCount); return ord; } public override void LookupOrd(int ord, BytesRef result) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ord >= 0 && ord < valueCount); - if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(ord >= 0 && ord < valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid()); @in.LookupOrd(ord, result); - if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid()); } public override int ValueCount @@ -250,25 +250,25 @@ public override int ValueCount get { int valueCount = @in.ValueCount; - if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount == this.valueCount); // should not change + if (Debugging.AssertsEnabled) Debugging.Assert(valueCount == this.valueCount); // should not change return valueCount; } } public override void Get(int docID, BytesRef result) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docID >= 0 && docID < maxDoc); - if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(docID >= 0 && docID < maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid()); @in.Get(docID, result); - if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid()); } public override int LookupTerm(BytesRef key) { - if (Debugging.AssertsEnabled) Debugging.Assert(key.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(key.IsValid()); int result = @in.LookupTerm(key); - if (Debugging.AssertsEnabled) Debugging.Assert(() => result < valueCount); - if (Debugging.AssertsEnabled) Debugging.Assert(key.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(result < valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(key.IsValid()); return result; } } @@ -287,32 +287,32 @@ public AssertingSortedSetDocValues(SortedSetDocValues @in, int maxDoc) this.@in = @in; this.maxDoc = maxDoc; this.valueCount = @in.ValueCount; - if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(valueCount >= 0); } public override long NextOrd() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => lastOrd != NO_MORE_ORDS); + if (Debugging.AssertsEnabled) Debugging.Assert(lastOrd != NO_MORE_ORDS); long ord = @in.NextOrd(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => ord < valueCount); - if (Debugging.AssertsEnabled) Debugging.Assert(() => ord == NO_MORE_ORDS || ord > lastOrd); + if (Debugging.AssertsEnabled) Debugging.Assert(ord < valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(ord == NO_MORE_ORDS || ord > lastOrd); lastOrd = ord; return ord; } public override void SetDocument(int docID) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docID >= 0 && docID < maxDoc, () => "docid=" + docID + ",maxDoc=" + maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(docID >= 0 && docID < maxDoc, () => "docid=" + docID + ",maxDoc=" + maxDoc); @in.SetDocument(docID); lastOrd = -2; } public override void LookupOrd(long ord, BytesRef result) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ord >= 0 && ord < valueCount); - if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(ord >= 0 && ord < valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid()); @in.LookupOrd(ord, result); - if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid()); } public override long ValueCount @@ -320,17 +320,17 @@ public override long ValueCount get { long valueCount = @in.ValueCount; - if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount == this.valueCount); // should not change + if (Debugging.AssertsEnabled) Debugging.Assert(valueCount == this.valueCount); // should not change return valueCount; } } public override long LookupTerm(BytesRef key) { - if (Debugging.AssertsEnabled) Debugging.Assert(key.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(key.IsValid()); long result = @in.LookupTerm(key); - if (Debugging.AssertsEnabled) Debugging.Assert(() => result < valueCount); - if (Debugging.AssertsEnabled) Debugging.Assert(key.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(result < valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(key.IsValid()); return result; } } @@ -348,7 +348,7 @@ public AssertingBits(IBits @in) public virtual bool Get(int index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < Length); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < Length); return @in.Get(index); } @@ -365,10 +365,10 @@ public AssertingAtomicReader(AtomicReader @in) : base(@in) { // check some basic reader sanity - if (Debugging.AssertsEnabled) Debugging.Assert(() => @in.MaxDoc >= 0); - if (Debugging.AssertsEnabled) Debugging.Assert(() => @in.NumDocs <= @in.MaxDoc); - if (Debugging.AssertsEnabled) Debugging.Assert(() => @in.NumDeletedDocs + @in.NumDocs == @in.MaxDoc); - if (Debugging.AssertsEnabled) Debugging.Assert(() => !@in.HasDeletions || @in.NumDeletedDocs > 0 && @in.NumDocs < @in.MaxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(@in.MaxDoc >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(@in.NumDocs <= @in.MaxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(@in.NumDeletedDocs + @in.NumDocs == @in.MaxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(!@in.HasDeletions || @in.NumDeletedDocs > 0 && @in.NumDocs < @in.MaxDoc); } public override Fields Fields @@ -409,7 +409,7 @@ public AssertingTermsEnum(TermsEnum @in) public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state == State.POSITIONED, () => "Docs(...) called on unpositioned TermsEnum"); + if (Debugging.AssertsEnabled) Debugging.Assert(state == State.POSITIONED, () => "Docs(...) called on unpositioned TermsEnum"); // TODO: should we give this thing a random to be super-evil, // and randomly *not* unwrap? @@ -423,7 +423,7 @@ public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPositionsEnum reuse, DocsAndPositionsFlags flags) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state == State.POSITIONED, () => "DocsAndPositions(...) called on unpositioned TermsEnum"); + if (Debugging.AssertsEnabled) Debugging.Assert(state == State.POSITIONED, () => "DocsAndPositions(...) called on unpositioned TermsEnum"); // TODO: should we give this thing a random to be super-evil, // and randomly *not* unwrap? @@ -439,7 +439,7 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPos // someone should not call next() after it returns null!!!! public override BytesRef Next() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state == State.INITIAL || state == State.POSITIONED, () => "Next() called on unpositioned TermsEnum"); + if (Debugging.AssertsEnabled) Debugging.Assert(state == State.INITIAL || state == State.POSITIONED, () => "Next() called on unpositioned TermsEnum"); BytesRef result = base.Next(); if (result == null) { @@ -447,7 +447,7 @@ public override BytesRef Next() } else { - if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(result.IsValid()); state = State.POSITIONED; } return result; @@ -457,7 +457,7 @@ public override long Ord { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state == State.POSITIONED, () => "Ord called on unpositioned TermsEnum"); + if (Debugging.AssertsEnabled) Debugging.Assert(state == State.POSITIONED, () => "Ord called on unpositioned TermsEnum"); return base.Ord; } } @@ -466,7 +466,7 @@ public override int DocFreq { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state == State.POSITIONED, () => "DocFreq called on unpositioned TermsEnum"); + if (Debugging.AssertsEnabled) Debugging.Assert(state == State.POSITIONED, () => "DocFreq called on unpositioned TermsEnum"); return base.DocFreq; } } @@ -475,7 +475,7 @@ public override long TotalTermFreq { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state == State.POSITIONED, () => "TotalTermFreq called on unpositioned TermsEnum"); + if (Debugging.AssertsEnabled) Debugging.Assert(state == State.POSITIONED, () => "TotalTermFreq called on unpositioned TermsEnum"); return base.TotalTermFreq; } } @@ -484,9 +484,9 @@ public override BytesRef Term { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state == State.POSITIONED, () => "Term called on unpositioned TermsEnum"); + if (Debugging.AssertsEnabled) Debugging.Assert(state == State.POSITIONED, () => "Term called on unpositioned TermsEnum"); BytesRef ret = base.Term; - if (Debugging.AssertsEnabled) Debugging.Assert(() => ret == null || ret.IsValid()); + if (Debugging.AssertsEnabled) Debugging.Assert(ret == null || ret.IsValid()); return ret; } } @@ -499,7 +499,7 @@ public override void SeekExact(long ord) public override SeekStatus SeekCeil(BytesRef term) { - if (Debugging.AssertsEnabled) Debugging.Assert(term.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(term.IsValid()); SeekStatus result = base.SeekCeil(term); if (result == SeekStatus.END) { @@ -514,7 +514,7 @@ public override SeekStatus SeekCeil(BytesRef term) public override bool SeekExact(BytesRef text) { - if (Debugging.AssertsEnabled) Debugging.Assert(text.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(text.IsValid()); if (base.SeekExact(text)) { state = State.POSITIONED; @@ -529,13 +529,13 @@ public override bool SeekExact(BytesRef text) public override TermState GetTermState() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state == State.POSITIONED, () => "GetTermState() called on unpositioned TermsEnum"); + if (Debugging.AssertsEnabled) Debugging.Assert(state == State.POSITIONED, () => "GetTermState() called on unpositioned TermsEnum"); return base.GetTermState(); } public override void SeekExact(BytesRef term, TermState state) { - if (Debugging.AssertsEnabled) Debugging.Assert(term.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(term.IsValid()); base.SeekExact(term, state); this.state = State.POSITIONED; } @@ -556,15 +556,15 @@ public AssertingDocsAndPositionsEnum(DocsAndPositionsEnum @in) : base(@in) { int docid = @in.DocID; - if (Debugging.AssertsEnabled) Debugging.Assert(() => docid == -1, () => "invalid initial doc id: " + docid); + if (Debugging.AssertsEnabled) Debugging.Assert(docid == -1, () => "invalid initial doc id: " + docid); doc = -1; } public override int NextDoc() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "NextDoc() called after NO_MORE_DOCS"); + if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, () => "NextDoc() called after NO_MORE_DOCS"); int nextDoc = base.NextDoc(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => nextDoc > doc, () => "backwards nextDoc from " + doc + " to " + nextDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(nextDoc > doc, () => "backwards nextDoc from " + doc + " to " + nextDoc); positionCount = 0; if (nextDoc == DocIdSetIterator.NO_MORE_DOCS) { @@ -576,16 +576,16 @@ public override int NextDoc() state = DocsEnumState.ITERATING; positionMax = base.Freq; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => base.DocID == nextDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(base.DocID == nextDoc); return doc = nextDoc; } public override int Advance(int target) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "Advance() called after NO_MORE_DOCS"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => target > doc, () => "target must be > DocID, got " + target + " <= " + doc); + if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, () => "Advance() called after NO_MORE_DOCS"); + if (Debugging.AssertsEnabled) Debugging.Assert(target > doc, () => "target must be > DocID, got " + target + " <= " + doc); int advanced = base.Advance(target); - if (Debugging.AssertsEnabled) Debugging.Assert(() => advanced >= target, () => "backwards advance from: " + target + " to: " + advanced); + if (Debugging.AssertsEnabled) Debugging.Assert(advanced >= target, () => "backwards advance from: " + target + " to: " + advanced); positionCount = 0; if (advanced == DocIdSetIterator.NO_MORE_DOCS) { @@ -597,7 +597,7 @@ public override int Advance(int target) state = DocsEnumState.ITERATING; positionMax = base.Freq; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => base.DocID == advanced); + if (Debugging.AssertsEnabled) Debugging.Assert(base.DocID == advanced); return doc = advanced; } @@ -605,7 +605,7 @@ public override int DocID { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => doc == base.DocID, () => " invalid DocID in " + m_input.GetType() + " " + base.DocID + " instead of " + doc); + if (Debugging.AssertsEnabled) Debugging.Assert(doc == base.DocID, () => " invalid DocID in " + m_input.GetType() + " " + base.DocID + " instead of " + doc); return doc; } } @@ -614,21 +614,21 @@ public override int Freq { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.START, () => "Freq called before NextDoc()/Advance()"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "Freq called after NO_MORE_DOCS"); + if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.START, () => "Freq called before NextDoc()/Advance()"); + if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, () => "Freq called after NO_MORE_DOCS"); int freq = base.Freq; - if (Debugging.AssertsEnabled) Debugging.Assert(() => freq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(freq > 0); return freq; } } public override int NextPosition() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.START, () => "NextPosition() called before NextDoc()/Advance()"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "NextPosition() called after NO_MORE_DOCS"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => positionCount < positionMax, () => "NextPosition() called more than Freq times!"); + if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.START, () => "NextPosition() called before NextDoc()/Advance()"); + if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, () => "NextPosition() called after NO_MORE_DOCS"); + if (Debugging.AssertsEnabled) Debugging.Assert(positionCount < positionMax, () => "NextPosition() called more than Freq times!"); int position = base.NextPosition(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => position >= 0 || position == -1, () => "invalid position: " + position); + if (Debugging.AssertsEnabled) Debugging.Assert(position >= 0 || position == -1, () => "invalid position: " + position); positionCount++; return position; } @@ -637,9 +637,9 @@ public override int StartOffset { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.START, () => "StartOffset called before NextDoc()/Advance()"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "StartOffset called after NO_MORE_DOCS"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => positionCount > 0, () => "StartOffset called before NextPosition()!"); + if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.START, () => "StartOffset called before NextDoc()/Advance()"); + if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, () => "StartOffset called after NO_MORE_DOCS"); + if (Debugging.AssertsEnabled) Debugging.Assert(positionCount > 0, () => "StartOffset called before NextPosition()!"); return base.StartOffset; } } @@ -648,20 +648,20 @@ public override int EndOffset { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.START, () => "EndOffset called before NextDoc()/Advance()"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "EndOffset called after NO_MORE_DOCS"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => positionCount > 0, () => "EndOffset called before NextPosition()!"); + if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.START, () => "EndOffset called before NextDoc()/Advance()"); + if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, () => "EndOffset called after NO_MORE_DOCS"); + if (Debugging.AssertsEnabled) Debugging.Assert(positionCount > 0, () => "EndOffset called before NextPosition()!"); return base.EndOffset; } } public override BytesRef GetPayload() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.START, () => "GetPayload() called before NextDoc()/Advance()"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != DocsEnumState.FINISHED, () => "GetPayload() called after NO_MORE_DOCS"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => positionCount > 0, () => "GetPayload() called before NextPosition()!"); + if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.START, () => "GetPayload() called before NextDoc()/Advance()"); + if (Debugging.AssertsEnabled) Debugging.Assert(state != DocsEnumState.FINISHED, () => "GetPayload() called after NO_MORE_DOCS"); + if (Debugging.AssertsEnabled) Debugging.Assert(positionCount > 0, () => "GetPayload() called before NextPosition()!"); BytesRef payload = base.GetPayload(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => payload == null || payload.IsValid() && payload.Length > 0, () => "GetPayload() returned payload with invalid length!"); + if (Debugging.AssertsEnabled) Debugging.Assert(payload == null || payload.IsValid() && payload.Length > 0, () => "GetPayload() returned payload with invalid length!"); return payload; } } @@ -681,13 +681,13 @@ public override NumericDocValues GetNumericDocValues(string field) FieldInfo fi = FieldInfos.FieldInfo(field); if (dv != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => fi != null); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fi.DocValuesType == DocValuesType.NUMERIC); + if (Debugging.AssertsEnabled) Debugging.Assert(fi != null); + if (Debugging.AssertsEnabled) Debugging.Assert(fi.DocValuesType == DocValuesType.NUMERIC); return new AssertingNumericDocValues(dv, MaxDoc); } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => fi == null || fi.DocValuesType != DocValuesType.NUMERIC); + if (Debugging.AssertsEnabled) Debugging.Assert(fi == null || fi.DocValuesType != DocValuesType.NUMERIC); return null; } } @@ -698,13 +698,13 @@ public override BinaryDocValues GetBinaryDocValues(string field) FieldInfo fi = FieldInfos.FieldInfo(field); if (dv != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => fi != null); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fi.DocValuesType == DocValuesType.BINARY); + if (Debugging.AssertsEnabled) Debugging.Assert(fi != null); + if (Debugging.AssertsEnabled) Debugging.Assert(fi.DocValuesType == DocValuesType.BINARY); return new AssertingBinaryDocValues(dv, MaxDoc); } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => fi == null || fi.DocValuesType != DocValuesType.BINARY); + if (Debugging.AssertsEnabled) Debugging.Assert(fi == null || fi.DocValuesType != DocValuesType.BINARY); return null; } } @@ -715,13 +715,13 @@ public override SortedDocValues GetSortedDocValues(string field) FieldInfo fi = FieldInfos.FieldInfo(field); if (dv != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => fi != null); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fi.DocValuesType == DocValuesType.SORTED); + if (Debugging.AssertsEnabled) Debugging.Assert(fi != null); + if (Debugging.AssertsEnabled) Debugging.Assert(fi.DocValuesType == DocValuesType.SORTED); return new AssertingSortedDocValues(dv, MaxDoc); } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => fi == null || fi.DocValuesType != DocValuesType.SORTED); + if (Debugging.AssertsEnabled) Debugging.Assert(fi == null || fi.DocValuesType != DocValuesType.SORTED); return null; } } @@ -732,13 +732,13 @@ public override SortedSetDocValues GetSortedSetDocValues(string field) FieldInfo fi = FieldInfos.FieldInfo(field); if (dv != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => fi != null); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fi.DocValuesType == DocValuesType.SORTED_SET); + if (Debugging.AssertsEnabled) Debugging.Assert(fi != null); + if (Debugging.AssertsEnabled) Debugging.Assert(fi.DocValuesType == DocValuesType.SORTED_SET); return new AssertingSortedSetDocValues(dv, MaxDoc); } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => fi == null || fi.DocValuesType != DocValuesType.SORTED_SET); + if (Debugging.AssertsEnabled) Debugging.Assert(fi == null || fi.DocValuesType != DocValuesType.SORTED_SET); return null; } } @@ -749,13 +749,13 @@ public override NumericDocValues GetNormValues(string field) FieldInfo fi = FieldInfos.FieldInfo(field); if (dv != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => fi != null); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fi.HasNorms); + if (Debugging.AssertsEnabled) Debugging.Assert(fi != null); + if (Debugging.AssertsEnabled) Debugging.Assert(fi.HasNorms); return new AssertingNumericDocValues(dv, MaxDoc); } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => fi == null || fi.HasNorms == false); + if (Debugging.AssertsEnabled) Debugging.Assert(fi == null || fi.HasNorms == false); return null; } } @@ -769,13 +769,13 @@ public override IBits LiveDocs IBits liveDocs = base.LiveDocs; if (liveDocs != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => MaxDoc == liveDocs.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(MaxDoc == liveDocs.Length); liveDocs = new AssertingBits(liveDocs); } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => MaxDoc == NumDocs); - if (Debugging.AssertsEnabled) Debugging.Assert(() => !HasDeletions); + if (Debugging.AssertsEnabled) Debugging.Assert(MaxDoc == NumDocs); + if (Debugging.AssertsEnabled) Debugging.Assert(!HasDeletions); } return liveDocs; } @@ -787,14 +787,14 @@ public override IBits GetDocsWithField(string field) FieldInfo fi = FieldInfos.FieldInfo(field); if (docsWithField != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => fi != null); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fi.HasDocValues); - if (Debugging.AssertsEnabled) Debugging.Assert(() => MaxDoc == docsWithField.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(fi != null); + if (Debugging.AssertsEnabled) Debugging.Assert(fi.HasDocValues); + if (Debugging.AssertsEnabled) Debugging.Assert(MaxDoc == docsWithField.Length); docsWithField = new AssertingBits(docsWithField); } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => fi == null || fi.HasDocValues == false); + if (Debugging.AssertsEnabled) Debugging.Assert(fi == null || fi.HasDocValues == false); } return docsWithField; } diff --git a/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs b/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs index e0ab0ac206..4e65680715 100644 --- a/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs +++ b/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs @@ -116,7 +116,7 @@ public virtual void TestOneNumber() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader))).GetNumericDocValues("dv"); Assert.AreEqual(5L, dv.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - 5L required because types don't match (xUnit checks this) } @@ -159,7 +159,7 @@ public virtual void TestOneSingle() // LUCENENET specific - renamed from TestOne { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetNumericDocValues("dv"); Assert.AreEqual((long)J2N.BitConversion.SingleToInt32Bits(5.7f), dv.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - cast required because types don't match (xUnit checks this) } @@ -201,7 +201,7 @@ public virtual void TestTwoNumbers() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetNumericDocValues("dv1"); Assert.AreEqual(5L, dv.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - 5L required because types don't match (xUnit checks this) dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetNumericDocValues("dv2"); @@ -248,7 +248,7 @@ public virtual void TestTwoBinaryValues() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetBinaryDocValues("dv1"); dv.Get(hits.ScoreDocs[i].Doc, scratch); Assert.AreEqual(new BytesRef(longTerm), scratch); @@ -297,7 +297,7 @@ public virtual void TestTwoFieldsMixed() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetNumericDocValues("dv1"); Assert.AreEqual(5L, dv.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - 5L required because types don't match (xUnit checks this) BinaryDocValues dv2 = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetBinaryDocValues("dv2"); @@ -346,7 +346,7 @@ public virtual void TestThreeFieldsMixed() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv1"); int ord = dv.GetOrd(0); dv.LookupOrd(ord, scratch); @@ -399,7 +399,7 @@ public virtual void TestThreeFieldsMixed2() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv2"); int ord = dv.GetOrd(0); dv.LookupOrd(ord, scratch); @@ -438,7 +438,7 @@ public virtual void TestTwoDocumentsNumeric() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv"); Assert.AreEqual(1L, dv.Get(0)); // LUCENENET specific - 1L required because types don't match (xUnit checks this) Assert.AreEqual(2L, dv.Get(1)); // LUCENENET specific - 2L required because types don't match (xUnit checks this) @@ -473,7 +473,7 @@ public virtual void TestTwoDocumentsMerged() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv"); for (int i = 0; i < 2; i++) { @@ -517,7 +517,7 @@ public virtual void TestBigNumericRange() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv"); Assert.AreEqual(long.MinValue, dv.Get(0)); Assert.AreEqual(long.MaxValue, dv.Get(1)); @@ -549,7 +549,7 @@ public virtual void TestBigNumericRange2() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv"); Assert.AreEqual(-8841491950446638677L, dv.Get(0)); Assert.AreEqual(9062230939892376225L, dv.Get(1)); @@ -591,7 +591,7 @@ public virtual void TestBytes() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); dv.Get(hits.ScoreDocs[i].Doc, scratch); Assert.AreEqual(new BytesRef("hello world"), scratch); @@ -627,7 +627,7 @@ public virtual void TestBytesTwoDocumentsMerged() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); BytesRef scratch = new BytesRef(); for (int i = 0; i < 2; i++) @@ -684,7 +684,7 @@ public virtual void TestSortedBytes() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); dv.LookupOrd(dv.GetOrd(hits.ScoreDocs[i].Doc), scratch); Assert.AreEqual(new BytesRef("hello world"), scratch); @@ -717,7 +717,7 @@ public virtual void TestSortedBytesTwoDocuments() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); BytesRef scratch = new BytesRef(); dv.LookupOrd(dv.GetOrd(0), scratch); @@ -755,7 +755,7 @@ public virtual void TestSortedBytesThreeDocuments() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); Assert.AreEqual(2, dv.ValueCount); BytesRef scratch = new BytesRef(); @@ -797,7 +797,7 @@ public virtual void TestSortedBytesTwoDocumentsMerged() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); Assert.AreEqual(2, dv.ValueCount); // 2 ords BytesRef scratch = new BytesRef(); @@ -894,7 +894,7 @@ public virtual void TestBytesWithNewline() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); BytesRef scratch = new BytesRef(); dv.Get(0, scratch); @@ -925,7 +925,7 @@ public virtual void TestMissingSortedBytes() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); BytesRef scratch = new BytesRef(); dv.LookupOrd(dv.GetOrd(0), scratch); @@ -1045,7 +1045,7 @@ public virtual void TestEmptySortedBytes() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); BytesRef scratch = new BytesRef(); Assert.AreEqual(0, dv.GetOrd(0)); @@ -1080,7 +1080,7 @@ public virtual void TestEmptyBytes() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); BytesRef scratch = new BytesRef(); dv.Get(0, scratch); @@ -1114,7 +1114,7 @@ public virtual void TestVeryLargeButLegalBytes() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); BytesRef scratch = new BytesRef(); dv.Get(0, scratch); @@ -1146,7 +1146,7 @@ public virtual void TestVeryLargeButLegalSortedBytes() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); BytesRef scratch = new BytesRef(); dv.Get(0, scratch); @@ -1174,7 +1174,7 @@ public virtual void TestCodecUsesOwnBytes() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); var mybytes = new byte[20]; BytesRef scratch = new BytesRef(mybytes); @@ -1205,7 +1205,7 @@ public virtual void TestCodecUsesOwnSortedBytes() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); var mybytes = new byte[20]; BytesRef scratch = new BytesRef(mybytes); @@ -1239,7 +1239,7 @@ public virtual void TestCodecUsesOwnBytesEachTime() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); BytesRef scratch = new BytesRef(); dv.Get(0, scratch); @@ -1277,7 +1277,7 @@ public virtual void TestCodecUsesOwnSortedBytesEachTime() // Now search the index: using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); BytesRef scratch = new BytesRef(); dv.Get(0, scratch); @@ -1494,7 +1494,7 @@ private void DoTestNumericsVsStoredFields(Int64Producer longs) int numDocs = AtLeast(300); // numDocs should be always > 256 so that in case of a codec that optimizes // for numbers of values <= 256, all storage layouts are tested - if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocs > 256); + if (Debugging.AssertsEnabled) Debugging.Assert(numDocs > 256); for (int i = 0; i < numDocs; i++) { idField.SetStringValue(Convert.ToString(i, CultureInfo.InvariantCulture)); @@ -1580,7 +1580,7 @@ private void DoTestMissingVsFieldCache(Int64Producer longs) int numDocs = AtLeast(300); // numDocs should be always > 256 so that in case of a codec that optimizes // for numbers of values <= 256, all storage layouts are tested - if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocs > 256); + if (Debugging.AssertsEnabled) Debugging.Assert(numDocs > 256); for (int i = 0; i < numDocs; i++) { idField.SetStringValue(Convert.ToString(i, CultureInfo.InvariantCulture)); @@ -2649,13 +2649,13 @@ private void DoTestSortedSetVsStoredFields(int minLength, int maxLength, int max } for (int j = 0; j < stringValues.Length; j++) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docValues != null); + if (Debugging.AssertsEnabled) Debugging.Assert(docValues != null); long ord = docValues.NextOrd(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => ord != SortedSetDocValues.NO_MORE_ORDS); + if (Debugging.AssertsEnabled) Debugging.Assert(ord != SortedSetDocValues.NO_MORE_ORDS); docValues.LookupOrd(ord, scratch); Assert.AreEqual(stringValues[j], scratch.Utf8ToString()); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => docValues == null || docValues.NextOrd() == SortedSetDocValues.NO_MORE_ORDS); + if (Debugging.AssertsEnabled) Debugging.Assert(docValues == null || docValues.NextOrd() == SortedSetDocValues.NO_MORE_ORDS); } } } // ir.Dispose(); diff --git a/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs b/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs index 7498cfef8c..45e2392106 100644 --- a/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs +++ b/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs @@ -239,7 +239,7 @@ public override int NextPosition() posUpto = freq; return 0; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => posUpto < freq); + if (Debugging.AssertsEnabled) Debugging.Assert(posUpto < freq); if (posUpto == 0 && random.NextBoolean()) { diff --git a/src/Lucene.Net.TestFramework/Index/MockRandomMergePolicy.cs b/src/Lucene.Net.TestFramework/Index/MockRandomMergePolicy.cs index 528d0c976e..f4e06752a9 100644 --- a/src/Lucene.Net.TestFramework/Index/MockRandomMergePolicy.cs +++ b/src/Lucene.Net.TestFramework/Index/MockRandomMergePolicy.cs @@ -106,7 +106,7 @@ public override MergeSpecification FindForcedMerges(SegmentInfos segmentInfos, i { foreach (SegmentCommitInfo info in merge.Segments) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => segmentsToMerge.ContainsKey(info)); + if (Debugging.AssertsEnabled) Debugging.Assert(segmentsToMerge.ContainsKey(info)); } } } diff --git a/src/Lucene.Net.TestFramework/Index/RandomCodec.cs b/src/Lucene.Net.TestFramework/Index/RandomCodec.cs index 2b0436f995..74da42eec9 100644 --- a/src/Lucene.Net.TestFramework/Index/RandomCodec.cs +++ b/src/Lucene.Net.TestFramework/Index/RandomCodec.cs @@ -92,7 +92,7 @@ public override PostingsFormat GetPostingsFormatForField(string name) } previousMappings[name] = codec; // Safety: - if (Debugging.AssertsEnabled) Debugging.Assert(() => previousMappings.Count < 10000, () => "test went insane"); + if (Debugging.AssertsEnabled) Debugging.Assert(previousMappings.Count < 10000, () => "test went insane"); } //if (LuceneTestCase.VERBOSE) @@ -115,7 +115,7 @@ public override DocValuesFormat GetDocValuesFormatForField(string name) } previousDVMappings[name] = codec; // Safety: - if (Debugging.AssertsEnabled) Debugging.Assert(() => previousDVMappings.Count < 10000, () => "test went insane"); + if (Debugging.AssertsEnabled) Debugging.Assert(previousDVMappings.Count < 10000, () => "test went insane"); } //if (LuceneTestCase.VERBOSE) diff --git a/src/Lucene.Net.TestFramework/Index/RandomDocumentsWriterPerThreadPool.cs b/src/Lucene.Net.TestFramework/Index/RandomDocumentsWriterPerThreadPool.cs index 52faf55f51..fc82ddb195 100644 --- a/src/Lucene.Net.TestFramework/Index/RandomDocumentsWriterPerThreadPool.cs +++ b/src/Lucene.Net.TestFramework/Index/RandomDocumentsWriterPerThreadPool.cs @@ -36,7 +36,7 @@ internal class RandomDocumentsWriterPerThreadPool : DocumentsWriterPerThreadPool public RandomDocumentsWriterPerThreadPool(int maxNumPerThreads, Random random) : base(maxNumPerThreads) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => MaxThreadStates >= 1); + if (Debugging.AssertsEnabled) Debugging.Assert(MaxThreadStates >= 1); states = new ThreadState[maxNumPerThreads]; this.random = new Random(random.Next()); this.maxRetry = 1 + random.Next(10); @@ -56,14 +56,14 @@ public override ThreadState GetAndLock(Thread requestingThread, DocumentsWriter } } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => NumThreadStatesActive > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(NumThreadStatesActive > 0); for (int i = 0; i < maxRetry; i++) { int ord = random.Next(NumThreadStatesActive); lock (this) { threadState = states[ord]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => threadState != null); + if (Debugging.AssertsEnabled) Debugging.Assert(threadState != null); } if (threadState.TryLock()) @@ -94,7 +94,7 @@ public override ThreadState GetAndLock(Thread requestingThread, DocumentsWriter } // if no new state is available lock the random one } - if (Debugging.AssertsEnabled) Debugging.Assert(() => threadState != null); + if (Debugging.AssertsEnabled) Debugging.Assert(threadState != null); threadState.@Lock(); return threadState; } diff --git a/src/Lucene.Net.TestFramework/Index/RandomIndexWriter.cs b/src/Lucene.Net.TestFramework/Index/RandomIndexWriter.cs index 91a368e000..1506033dd6 100644 --- a/src/Lucene.Net.TestFramework/Index/RandomIndexWriter.cs +++ b/src/Lucene.Net.TestFramework/Index/RandomIndexWriter.cs @@ -440,7 +440,7 @@ private void _DoRandomForceMerge() // LUCENENET specific - added leading undersc Console.WriteLine("RIW: doRandomForceMerge(" + limit + ")"); } IndexWriter.ForceMerge(limit); - if (Debugging.AssertsEnabled) Debugging.Assert(() => !doRandomForceMergeAssert || IndexWriter.SegmentCount <= limit, () => "limit=" + limit + " actual=" + IndexWriter.SegmentCount); + if (Debugging.AssertsEnabled) Debugging.Assert(!doRandomForceMergeAssert || IndexWriter.SegmentCount <= limit, () => "limit=" + limit + " actual=" + IndexWriter.SegmentCount); } } } diff --git a/src/Lucene.Net.TestFramework/Index/ThreadedIndexingAndSearchingTestCase.cs b/src/Lucene.Net.TestFramework/Index/ThreadedIndexingAndSearchingTestCase.cs index 176882cce3..f872bc5ad5 100644 --- a/src/Lucene.Net.TestFramework/Index/ThreadedIndexingAndSearchingTestCase.cs +++ b/src/Lucene.Net.TestFramework/Index/ThreadedIndexingAndSearchingTestCase.cs @@ -227,7 +227,7 @@ public override void Run() if (toDeleteSubDocs.Count > 0 && Random.NextBoolean()) { delSubDocs = toDeleteSubDocs[Random.Next(toDeleteSubDocs.Count)]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => !delSubDocs.Deleted); + if (Debugging.AssertsEnabled) Debugging.Assert(!delSubDocs.Deleted); toDeleteSubDocs.Remove(delSubDocs); // Update doc block, replacing prior packID packID = delSubDocs.PackID; @@ -364,7 +364,7 @@ public override void Run() foreach (SubDocs subDocs in toDeleteSubDocs) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !subDocs.Deleted); + if (Debugging.AssertsEnabled) Debugging.Assert(!subDocs.Deleted); delPackIDs.Add(subDocs.PackID); outerInstance.DeleteDocuments(new Term("packID", subDocs.PackID)); subDocs.Deleted = true; diff --git a/src/Lucene.Net.TestFramework/Search/AssertingBulkScorer.cs b/src/Lucene.Net.TestFramework/Search/AssertingBulkScorer.cs index c3de50616a..4f7057c7cb 100644 --- a/src/Lucene.Net.TestFramework/Search/AssertingBulkScorer.cs +++ b/src/Lucene.Net.TestFramework/Search/AssertingBulkScorer.cs @@ -61,7 +61,7 @@ public override void Score(ICollector collector) try { bool remaining = @in.Score(collector, DocsEnum.NO_MORE_DOCS); - if (Debugging.AssertsEnabled) Debugging.Assert(() => !remaining); + if (Debugging.AssertsEnabled) Debugging.Assert(!remaining); } #pragma warning disable 168 catch (NotSupportedException e) diff --git a/src/Lucene.Net.TestFramework/Search/AssertingCollector.cs b/src/Lucene.Net.TestFramework/Search/AssertingCollector.cs index f183d867c2..c57819c298 100644 --- a/src/Lucene.Net.TestFramework/Search/AssertingCollector.cs +++ b/src/Lucene.Net.TestFramework/Search/AssertingCollector.cs @@ -52,7 +52,7 @@ public virtual void Collect(int doc) { if (inOrder || !AcceptsDocsOutOfOrder) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => doc > lastCollected, () => "Out of order : " + lastCollected + " " + doc); + if (Debugging.AssertsEnabled) Debugging.Assert(doc > lastCollected, () => "Out of order : " + lastCollected + " " + doc); } @in.Collect(doc); lastCollected = doc; diff --git a/src/Lucene.Net.TestFramework/Search/AssertingScorer.cs b/src/Lucene.Net.TestFramework/Search/AssertingScorer.cs index ed7ae45142..3dc951a2a3 100644 --- a/src/Lucene.Net.TestFramework/Search/AssertingScorer.cs +++ b/src/Lucene.Net.TestFramework/Search/AssertingScorer.cs @@ -105,10 +105,10 @@ internal virtual bool Iterating() public override float GetScore() { - if (Debugging.AssertsEnabled) Debugging.Assert(Iterating); + if (Debugging.AssertsEnabled) Debugging.Assert(Iterating()); float score = @in.GetScore(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => !float.IsNaN(score)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => !float.IsNaN(score)); + if (Debugging.AssertsEnabled) Debugging.Assert(!float.IsNaN(score)); + if (Debugging.AssertsEnabled) Debugging.Assert(!float.IsNaN(score)); return score; } @@ -125,7 +125,7 @@ public override int Freq { get { - if (Debugging.AssertsEnabled) Debugging.Assert(Iterating); + if (Debugging.AssertsEnabled) Debugging.Assert(Iterating()); return @in.Freq; } } diff --git a/src/Lucene.Net.TestFramework/Search/QueryUtils.cs b/src/Lucene.Net.TestFramework/Search/QueryUtils.cs index 8f046f89bb..7331687534 100644 --- a/src/Lucene.Net.TestFramework/Search/QueryUtils.cs +++ b/src/Lucene.Net.TestFramework/Search/QueryUtils.cs @@ -294,7 +294,7 @@ private static IndexReader[] LoadEmptyReaders() // LUCENENET: Avoid static const private static IndexReader MakeEmptyIndex(Random random, int numDocs) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocs > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(numDocs > 0); Directory d = new MockDirectoryWrapper(random, new RAMDirectory()); IndexWriter w = new IndexWriter(d, new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, new MockAnalyzer(random))); for (int i = 0; i < numDocs; i++) @@ -487,7 +487,7 @@ public virtual void SetNextReader(AtomicReaderContext context) leafPtr++; } lastReader[0] = (AtomicReader)context.Reader; - if (Debugging.AssertsEnabled) Debugging.Assert(() => readerContextArray[leafPtr].Reader == context.Reader); + if (Debugging.AssertsEnabled) Debugging.Assert(readerContextArray[leafPtr].Reader == context.Reader); this.scorer = null; lastDoc[0] = -1; } diff --git a/src/Lucene.Net.TestFramework/Search/RandomSimilarityProvider.cs b/src/Lucene.Net.TestFramework/Search/RandomSimilarityProvider.cs index 811bcb7045..e7f0b9e6a2 100644 --- a/src/Lucene.Net.TestFramework/Search/RandomSimilarityProvider.cs +++ b/src/Lucene.Net.TestFramework/Search/RandomSimilarityProvider.cs @@ -71,7 +71,7 @@ public override Similarity Get(string field) { lock (this) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => field != null); + if (Debugging.AssertsEnabled) Debugging.Assert(field != null); if (!previousMappings.TryGetValue(field, out Similarity sim) || sim == null) { sim = knownSims[Math.Max(0, Math.Abs(perFieldSeed ^ field.GetHashCode())) % knownSims.Count]; diff --git a/src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs b/src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs index 78f47d997c..67d887b5d9 100644 --- a/src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs +++ b/src/Lucene.Net.TestFramework/Search/ShardSearchingTestBase.cs @@ -220,7 +220,7 @@ internal virtual TopDocs SearchNode(int nodeID, long[] nodeVersions, Query q, So } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => searchAfter == null); // not supported yet + if (Debugging.AssertsEnabled) Debugging.Assert(searchAfter == null); // not supported yet return s.LocalSearch(q, numHits, sort); } } @@ -306,7 +306,7 @@ public ShardIndexSearcher(ShardSearchingTestBase.NodeState nodeState, long[] nod this.outerInstance = nodeState; this.nodeVersions = nodeVersions; MyNodeID = nodeID; - if (Debugging.AssertsEnabled) Debugging.Assert(() => MyNodeID == nodeState.MyNodeID, () => "myNodeID=" + nodeID + " NodeState.this.myNodeID=" + nodeState.MyNodeID); + if (Debugging.AssertsEnabled) Debugging.Assert(MyNodeID == nodeState.MyNodeID, () => "myNodeID=" + nodeID + " NodeState.this.myNodeID=" + nodeState.MyNodeID); } public override Query Rewrite(Query original) @@ -348,7 +348,7 @@ public override Query Rewrite(Query original) public override TermStatistics TermStatistics(Term term, TermContext context) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => term != null); + if (Debugging.AssertsEnabled) Debugging.Assert(term != null); long docFreq = 0; long totalTermFreq = 0; for (int nodeID = 0; nodeID < nodeVersions.Length; nodeID++) @@ -364,7 +364,7 @@ public override TermStatistics TermStatistics(Term term, TermContext context) subStats = outerInstance.termStatsCache[key]; // We pre-cached during rewrite so all terms // better be here... - if (Debugging.AssertsEnabled) Debugging.Assert(() => subStats != null); + if (Debugging.AssertsEnabled) Debugging.Assert(subStats != null); } long nodeDocFreq = subStats.DocFreq; @@ -419,7 +419,7 @@ public override CollectionStatistics CollectionStatistics(string field) } // Collection stats are pre-shared on reopen, so, // we better not have a cache miss: - if (Debugging.AssertsEnabled) Debugging.Assert(() => nodeStats != null, () => "myNodeID=" + MyNodeID + " nodeID=" + nodeID + " version=" + nodeVersions[nodeID] + " field=" + field); + if (Debugging.AssertsEnabled) Debugging.Assert(nodeStats != null, () => "myNodeID=" + MyNodeID + " nodeID=" + nodeID + " version=" + nodeVersions[nodeID] + " field=" + field); long nodeDocCount = nodeStats.DocCount; if (docCount >= 0 && nodeDocCount >= 0) @@ -451,7 +451,7 @@ public override CollectionStatistics CollectionStatistics(string field) sumDocFreq = -1; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => nodeStats.MaxDoc >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(nodeStats.MaxDoc >= 0); maxDoc += nodeStats.MaxDoc; } @@ -551,7 +551,7 @@ public virtual TopDocs LocalSearchAfter(ScoreDoc after, Query query, int numHits public override TopFieldDocs Search(Query query, int numHits, Sort sort) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => sort != null); + if (Debugging.AssertsEnabled) Debugging.Assert(sort != null); TopDocs[] shardHits = new TopDocs[nodeVersions.Length]; for (int nodeID = 0; nodeID < nodeVersions.Length; nodeID++) { @@ -604,7 +604,7 @@ public NodeState(ShardSearchingTestBase shardSearchingTestBase, Random random, i public void InitSearcher(long[] nodeVersions) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentShardSearcher == null); + if (Debugging.AssertsEnabled) Debugging.Assert(currentShardSearcher == null); Array.Copy(nodeVersions, 0, currentNodeVersions, 0, currentNodeVersions.Length); currentShardSearcher = new ShardIndexSearcher(this, GetCurrentNodeVersions(), Mgr.Acquire().IndexReader, MyNodeID); } @@ -781,8 +781,8 @@ protected virtual void Start(int numNodes, double runTimeSec, int maxSearcherAge for (int nodeID = 0; nodeID < numNodes; nodeID++) { IndexSearcher s = m_nodes[nodeID].Mgr.Acquire(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => nodeVersions[nodeID] == m_nodes[nodeID].Searchers.Record(s)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => s != null); + if (Debugging.AssertsEnabled) Debugging.Assert(nodeVersions[nodeID] == m_nodes[nodeID].Searchers.Record(s)); + if (Debugging.AssertsEnabled) Debugging.Assert(s != null); try { BroadcastNodeReopen(nodeID, nodeVersions[nodeID], s); diff --git a/src/Lucene.Net.TestFramework/Store/MockDirectoryWrapper.cs b/src/Lucene.Net.TestFramework/Store/MockDirectoryWrapper.cs index 60aa77d7f1..7428971c5f 100644 --- a/src/Lucene.Net.TestFramework/Store/MockDirectoryWrapper.cs +++ b/src/Lucene.Net.TestFramework/Store/MockDirectoryWrapper.cs @@ -963,7 +963,7 @@ protected override void Dispose(bool disposing) { if (endSet.Contains(s) && !startSet.Contains(s)) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => pendingDeletions.Contains(s)); + if (Debugging.AssertsEnabled) Debugging.Assert(pendingDeletions.Contains(s)); if (LuceneTestCase.Verbose) { Console.WriteLine("MDW: Unreferenced check: Ignoring referenced file: " + s + " " + @@ -1024,7 +1024,7 @@ protected override void Dispose(bool disposing) extras += "\n\nThese files we had previously tried to delete, but couldn't: " + pendingDeletions; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "unreferenced files: before delete:\n " + Arrays.ToString(startFiles) + "\n after delete:\n " + Arrays.ToString(endFiles) + extras); + if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "unreferenced files: before delete:\n " + Arrays.ToString(startFiles) + "\n after delete:\n " + Arrays.ToString(endFiles) + extras); } DirectoryReader ir1 = DirectoryReader.Open(this); @@ -1034,7 +1034,7 @@ protected override void Dispose(bool disposing) DirectoryReader ir2 = DirectoryReader.Open(this); int numDocs2 = ir2.NumDocs; ir2.Dispose(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocs1 == numDocs2, () => "numDocs changed after opening/closing IW: before=" + numDocs1 + " after=" + numDocs2); + if (Debugging.AssertsEnabled) Debugging.Assert(numDocs1 == numDocs2, () => "numDocs changed after opening/closing IW: before=" + numDocs1 + " after=" + numDocs2); } } } diff --git a/src/Lucene.Net.TestFramework/Support/JavaCompatibility/LuceneTestCase.cs b/src/Lucene.Net.TestFramework/Support/JavaCompatibility/LuceneTestCase.cs index 3d9ea80958..d2c7955eee 100644 --- a/src/Lucene.Net.TestFramework/Support/JavaCompatibility/LuceneTestCase.cs +++ b/src/Lucene.Net.TestFramework/Support/JavaCompatibility/LuceneTestCase.cs @@ -259,7 +259,7 @@ internal int randomInt(int max) [ExceptionToNetNumericConvention] // LUCENENET: This is for making test porting easier, keeping as-is internal int randomIntBetween(int min, int max) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => max >= min, () => "max must be >= min: " + min + ", " + max); + if (Debugging.AssertsEnabled) Debugging.Assert(max >= min, () => "max must be >= min: " + min + ", " + max); long range = (long)max - (long)min; if (range < int.MaxValue) { diff --git a/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs b/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs index 52e690435d..fa40f165dd 100644 --- a/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs +++ b/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs @@ -191,7 +191,7 @@ internal static int GetRandomCodePoint(Random r, Transition t) // LUCENENET spec } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => code >= t.Min && code <= t.Max && (code < UnicodeUtil.UNI_SUR_HIGH_START || code > UnicodeUtil.UNI_SUR_LOW_END), () => "code=" + code + " min=" + t.Min + " max=" + t.Max); + if (Debugging.AssertsEnabled) Debugging.Assert(code >= t.Min && code <= t.Max && (code < UnicodeUtil.UNI_SUR_HIGH_START || code > UnicodeUtil.UNI_SUR_LOW_END), () => "code=" + code + " min=" + t.Min + " max=" + t.Max); return code; } @@ -399,7 +399,7 @@ public static void AssertNoDetachedStates(Automaton a) { int numStates = a.GetNumberOfStates(); a.ClearNumberedStates(); // force recomputation of cached numbered states - if (Debugging.AssertsEnabled) Debugging.Assert(() => numStates == a.GetNumberOfStates(), () => "automaton has " + (numStates - a.GetNumberOfStates()) + " detached states"); + if (Debugging.AssertsEnabled) Debugging.Assert(numStates == a.GetNumberOfStates(), () => "automaton has " + (numStates - a.GetNumberOfStates()) + " detached states"); } } diff --git a/src/Lucene.Net.TestFramework/Util/BaseDocIdSetTestCase.cs b/src/Lucene.Net.TestFramework/Util/BaseDocIdSetTestCase.cs index 1c74e96aa9..430cce733a 100644 --- a/src/Lucene.Net.TestFramework/Util/BaseDocIdSetTestCase.cs +++ b/src/Lucene.Net.TestFramework/Util/BaseDocIdSetTestCase.cs @@ -56,7 +56,7 @@ public BaseDocIdSetTestCase(BeforeAfterClass beforeAfter) /// Create a random set which has of its bits set. protected static BitSet RandomSet(int numBits, int numBitsSet) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => numBitsSet <= numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(numBitsSet <= numBits); BitSet set = new BitSet(numBits); Random random = Random; if (numBitsSet == numBits) diff --git a/src/Lucene.Net.TestFramework/Util/FailOnNonBulkMergesInfoStream.cs b/src/Lucene.Net.TestFramework/Util/FailOnNonBulkMergesInfoStream.cs index e573e12bed..7fdfb9087f 100644 --- a/src/Lucene.Net.TestFramework/Util/FailOnNonBulkMergesInfoStream.cs +++ b/src/Lucene.Net.TestFramework/Util/FailOnNonBulkMergesInfoStream.cs @@ -35,7 +35,7 @@ public override bool IsEnabled(string component) public override void Message(string component, string message) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !message.Contains("non-bulk merges")); + if (Debugging.AssertsEnabled) Debugging.Assert(!message.Contains("non-bulk merges")); } } } \ No newline at end of file diff --git a/src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs b/src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs index 875dc698c3..94898c208b 100644 --- a/src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs +++ b/src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs @@ -103,7 +103,7 @@ private static BytesRef ToBytesRef(Int32sRef ir) for (int i = 0; i < ir.Length; i++) { int x = ir.Int32s[ir.Offset + i]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => x >= 0 && x <= 255); + if (Debugging.AssertsEnabled) Debugging.Assert(x >= 0 && x <= 255); br.Bytes[i] = (byte)x; } br.Length = ir.Length; @@ -219,7 +219,7 @@ public virtual void DoTest(bool testPruning) // of the term prefix that matches private T Run(FST fst, Int32sRef term, int[] prefixLength) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => prefixLength == null || prefixLength.Length == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(prefixLength == null || prefixLength.Length == 1); FST.Arc arc = fst.GetFirstArc(new FST.Arc()); T NO_OUTPUT = fst.Outputs.NoOutput; T output = NO_OUTPUT; @@ -690,7 +690,7 @@ private void VerifyUnPruned(int inputMode, FST fst) if (!termsMap.ContainsKey(term) && term.CompareTo(pairs[upto].Input) > 0) { int pos = pairs.BinarySearch(new InputOutput(term, default(T))); - if (Debugging.AssertsEnabled) Debugging.Assert(() => pos < 0); + if (Debugging.AssertsEnabled) Debugging.Assert(pos < 0); upto = -(pos + 1); if (random.NextBoolean()) @@ -887,7 +887,7 @@ private void VerifyPruned(int inputMode, FST fst, int prune1, int prune2) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => prune2 > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(prune2 > 0); if (prune2 > 1 && cmo.Count >= prune2) { keep = true; diff --git a/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs b/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs index bf79a356cf..6460cbdcda 100644 --- a/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs +++ b/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs @@ -2678,7 +2678,7 @@ public virtual void AssertTermsEquals(string info, IndexReader leftReader, Terms /// public virtual void AssertTermsStatisticsEquals(string info, Terms leftTerms, Terms rightTerms) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => leftTerms.Comparer == rightTerms.Comparer); + if (Debugging.AssertsEnabled) Debugging.Assert(leftTerms.Comparer == rightTerms.Comparer); if (leftTerms.DocCount != -1 && rightTerms.DocCount != -1) { Assert.AreEqual(leftTerms.DocCount, rightTerms.DocCount, info); @@ -3053,7 +3053,7 @@ public virtual void AssertNormsEquals(string info, IndexReader leftReader, Index /// public virtual void AssertStoredFieldsEquals(string info, IndexReader leftReader, IndexReader rightReader) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => leftReader.MaxDoc == rightReader.MaxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(leftReader.MaxDoc == rightReader.MaxDoc); for (int i = 0; i < leftReader.MaxDoc; i++) { Document leftDoc = leftReader.Document(i); @@ -3100,7 +3100,7 @@ public virtual void AssertStoredFieldEquals(string info, IIndexableField leftFie /// public virtual void AssertTermVectorsEquals(string info, IndexReader leftReader, IndexReader rightReader) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => leftReader.MaxDoc == rightReader.MaxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(leftReader.MaxDoc == rightReader.MaxDoc); for (int i = 0; i < leftReader.MaxDoc; i++) { Fields leftFields = leftReader.GetTermVectors(i); @@ -3270,7 +3270,7 @@ public virtual void AssertDocValuesEquals(string info, int num, NumericDocValues // TODO: this is kinda stupid, we don't delete documents in the test. public virtual void AssertDeletedDocsEquals(string info, IndexReader leftReader, IndexReader rightReader) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => leftReader.NumDeletedDocs == rightReader.NumDeletedDocs); + if (Debugging.AssertsEnabled) Debugging.Assert(leftReader.NumDeletedDocs == rightReader.NumDeletedDocs); IBits leftBits = MultiFields.GetLiveDocs(leftReader); IBits rightBits = MultiFields.GetLiveDocs(rightReader); @@ -3281,7 +3281,7 @@ public virtual void AssertDeletedDocsEquals(string info, IndexReader leftReader, return; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => leftReader.MaxDoc == rightReader.MaxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(leftReader.MaxDoc == rightReader.MaxDoc); Assert.AreEqual(leftBits.Length, rightBits.Length, info); for (int i = 0; i < leftReader.MaxDoc; i++) { @@ -3365,7 +3365,7 @@ public static bool SlowFileExists(Directory dir, string fileName) //// if (TempDirBase == null) //// { //// DirectoryInfo directory = new DirectoryInfo(System.IO.Path.GetTempPath()); - //// //if (Debugging.AssertsEnabled) Debugging.Assert(() => directory.Exists && directory.Directory != null && directory.CanWrite()); + //// //if (Debugging.AssertsEnabled) Debugging.Assert(directory.Exists && directory.Directory != null && directory.CanWrite()); //// RandomizedContext ctx = RandomizedContext.Current; //// Type clazz = ctx.GetTargetType; @@ -3504,7 +3504,7 @@ public static FileInfo CreateTempFile() /// private static void RegisterToRemoveAfterSuite(FileSystemInfo f) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => f != null); + if (Debugging.AssertsEnabled) Debugging.Assert(f != null); if (LuceneTestCase.LeaveTemporary) { diff --git a/src/Lucene.Net.TestFramework/Util/NullInfoStream.cs b/src/Lucene.Net.TestFramework/Util/NullInfoStream.cs index eafb5f2cf0..315bfb4383 100644 --- a/src/Lucene.Net.TestFramework/Util/NullInfoStream.cs +++ b/src/Lucene.Net.TestFramework/Util/NullInfoStream.cs @@ -29,13 +29,13 @@ public class NullInfoStream : InfoStream { public override void Message(string component, string message) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => component != null); - if (Debugging.AssertsEnabled) Debugging.Assert(() => message != null); + if (Debugging.AssertsEnabled) Debugging.Assert(component != null); + if (Debugging.AssertsEnabled) Debugging.Assert(message != null); } public override bool IsEnabled(string component) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => component != null); + if (Debugging.AssertsEnabled) Debugging.Assert(component != null); return true; // to actually enable logging, we just ignore on message() } diff --git a/src/Lucene.Net.TestFramework/Util/TestRuleSetupAndRestoreClassEnv.cs b/src/Lucene.Net.TestFramework/Util/TestRuleSetupAndRestoreClassEnv.cs index f32b343422..a0ca6d2f9c 100644 --- a/src/Lucene.Net.TestFramework/Util/TestRuleSetupAndRestoreClassEnv.cs +++ b/src/Lucene.Net.TestFramework/Util/TestRuleSetupAndRestoreClassEnv.cs @@ -169,7 +169,7 @@ public override void Before(LuceneTestCase testInstance) !ShouldAvoidCodec("Lucene3x"))) // preflex-only setup { codec = Codec.ForName("Lucene3x"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => (codec is PreFlexRWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); + if (Debugging.AssertsEnabled) Debugging.Assert((codec is PreFlexRWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); LuceneTestCase.OldFormatImpersonationIsActive = true; } else if ("Lucene40".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) || ("random".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) && @@ -179,8 +179,8 @@ public override void Before(LuceneTestCase testInstance) { codec = Codec.ForName("Lucene40"); LuceneTestCase.OldFormatImpersonationIsActive = true; - if (Debugging.AssertsEnabled) Debugging.Assert(() => (codec is Lucene40RWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => (PostingsFormat.ForName("Lucene40") is Lucene40RWPostingsFormat), () => "fix your IPostingsFormatFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); + if (Debugging.AssertsEnabled) Debugging.Assert((codec is Lucene40RWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); + if (Debugging.AssertsEnabled) Debugging.Assert((PostingsFormat.ForName("Lucene40") is Lucene40RWPostingsFormat), () => "fix your IPostingsFormatFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); } else if ("Lucene41".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) || ("random".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) && "random".Equals(LuceneTestCase.TestPostingsFormat, StringComparison.Ordinal) && @@ -190,7 +190,7 @@ public override void Before(LuceneTestCase testInstance) { codec = Codec.ForName("Lucene41"); LuceneTestCase.OldFormatImpersonationIsActive = true; - if (Debugging.AssertsEnabled) Debugging.Assert(() => (codec is Lucene41RWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); + if (Debugging.AssertsEnabled) Debugging.Assert((codec is Lucene41RWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); } else if ("Lucene42".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) || ("random".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) && "random".Equals(LuceneTestCase.TestPostingsFormat, StringComparison.Ordinal) && @@ -200,7 +200,7 @@ public override void Before(LuceneTestCase testInstance) { codec = Codec.ForName("Lucene42"); LuceneTestCase.OldFormatImpersonationIsActive = true; - if (Debugging.AssertsEnabled) Debugging.Assert(() => (codec is Lucene42RWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); + if (Debugging.AssertsEnabled) Debugging.Assert((codec is Lucene42RWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); } else if ("Lucene45".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) || ("random".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) && "random".Equals(LuceneTestCase.TestPostingsFormat, StringComparison.Ordinal) && @@ -210,7 +210,7 @@ public override void Before(LuceneTestCase testInstance) { codec = Codec.ForName("Lucene45"); LuceneTestCase.OldFormatImpersonationIsActive = true; - if (Debugging.AssertsEnabled) Debugging.Assert(() => (codec is Lucene45RWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); + if (Debugging.AssertsEnabled) Debugging.Assert((codec is Lucene45RWCodec), () => "fix your ICodecFactory to scan Lucene.Net.Tests before Lucene.Net.TestFramework"); } else if (("random".Equals(LuceneTestCase.TestPostingsFormat, StringComparison.Ordinal) == false) || ("random".Equals(LuceneTestCase.TestDocValuesFormat, StringComparison.Ordinal) == false)) @@ -275,7 +275,7 @@ public override void Before(LuceneTestCase testInstance) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(false); } Codec.Default = codec; diff --git a/src/Lucene.Net.TestFramework/Util/ThrottledIndexOutput.cs b/src/Lucene.Net.TestFramework/Util/ThrottledIndexOutput.cs index 9730e3f052..738c4442da 100644 --- a/src/Lucene.Net.TestFramework/Util/ThrottledIndexOutput.cs +++ b/src/Lucene.Net.TestFramework/Util/ThrottledIndexOutput.cs @@ -61,7 +61,7 @@ public static int MBitsToBytes(int mbits) public ThrottledIndexOutput(int bytesPerSecond, long flushDelayMillis, long closeDelayMillis, long seekDelayMillis, long minBytesWritten, IndexOutput @delegate) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => bytesPerSecond > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(bytesPerSecond > 0); this.@delegate = @delegate; this.bytesPerSecond = bytesPerSecond; this.flushDelayMillis = flushDelayMillis; diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs index f0b56237a4..2daf82a044 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/CharFilters/TestMappingCharFilter.cs @@ -422,7 +422,7 @@ public virtual void TestRandomMaps2() // Same length: no change to offset } - if (Debugging.AssertsEnabled) Debugging.Assert(() => inputOffsets.Count == output.Length, () => "inputOffsets.size()=" + inputOffsets.Count + " vs output.length()=" + output.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(inputOffsets.Count == output.Length, () => "inputOffsets.size()=" + inputOffsets.Count + " vs output.length()=" + output.Length); } else { diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs index 6333c35fb3..13b2cf322b 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs @@ -211,7 +211,7 @@ private sealed class FactoryAnalyzer : Analyzer internal FactoryAnalyzer(TokenizerFactory tokenizer, TokenFilterFactory tokenfilter, CharFilterFactory charFilter) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => tokenizer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(tokenizer != null); this.tokenizer = tokenizer; this.charFilter = charFilter; this.tokenfilter = tokenfilter; diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs index 20ea9cefb5..0dce8c7fe1 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs @@ -172,7 +172,7 @@ public PredicateAnonymousInnerClassHelper2() public virtual bool Apply(object[] args) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => args.Length == 3); + if (Debugging.AssertsEnabled) Debugging.Assert(args.Length == 3); return !((bool)args[2]); // args are broken if consumeAllTokens is false } } @@ -185,7 +185,7 @@ public PredicateAnonymousInnerClassHelper3() public virtual bool Apply(object[] args) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => args.Length == 3); + if (Debugging.AssertsEnabled) Debugging.Assert(args.Length == 3); return !((bool)args[2]); // args are broken if consumeAllTokens is false } } diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries.cs index 48f9ea82a3..c10147ab75 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries.cs @@ -168,16 +168,16 @@ public virtual void Test() for (int i = 0; i < tests.Length; i += 3) { FileInfo f = new FileInfo(System.IO.Path.Combine(DICTIONARY_HOME.FullName, tests[i])); - if (Debugging.AssertsEnabled) Debugging.Assert(() => f.Exists); + if (Debugging.AssertsEnabled) Debugging.Assert(f.Exists); using (Stream fileStream = f.OpenRead()) { using (ZipArchive zip = new ZipArchive(fileStream, ZipArchiveMode.Read, false, Encoding.UTF8)) { ZipArchiveEntry dicEntry = zip.GetEntry(tests[i + 1]); - if (Debugging.AssertsEnabled) Debugging.Assert(() => dicEntry != null); + if (Debugging.AssertsEnabled) Debugging.Assert(dicEntry != null); ZipArchiveEntry affEntry = zip.GetEntry(tests[i + 2]); - if (Debugging.AssertsEnabled) Debugging.Assert(() => affEntry != null); + if (Debugging.AssertsEnabled) Debugging.Assert(affEntry != null); using (Stream dictionary = dicEntry.Open()) { @@ -208,16 +208,16 @@ public virtual void TestOneDictionary() if (tests[i].Equals(toTest, StringComparison.Ordinal)) { FileInfo f = new FileInfo(System.IO.Path.Combine(DICTIONARY_HOME.FullName, tests[i])); - if (Debugging.AssertsEnabled) Debugging.Assert(() => f.Exists); + if (Debugging.AssertsEnabled) Debugging.Assert(f.Exists); using (Stream fileStream = f.OpenRead()) { using (ZipArchive zip = new ZipArchive(fileStream, ZipArchiveMode.Read, false, Encoding.UTF8)) { ZipArchiveEntry dicEntry = zip.GetEntry(tests[i + 1]); - if (Debugging.AssertsEnabled) Debugging.Assert(() => dicEntry != null); + if (Debugging.AssertsEnabled) Debugging.Assert(dicEntry != null); ZipArchiveEntry affEntry = zip.GetEntry(tests[i + 2]); - if (Debugging.AssertsEnabled) Debugging.Assert(() => affEntry != null); + if (Debugging.AssertsEnabled) Debugging.Assert(affEntry != null); using (Stream dictionary = dicEntry.Open()) { diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries2.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries2.cs index cce39a6f31..a1ac6b5abc 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries2.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries2.cs @@ -184,16 +184,16 @@ public virtual void Test() for (int i = 0; i < tests.Length; i += 3) { FileInfo f = new FileInfo(System.IO.Path.Combine(DICTIONARY_HOME.FullName, tests[i])); - if (Debugging.AssertsEnabled) Debugging.Assert(() => f.Exists); + if (Debugging.AssertsEnabled) Debugging.Assert(f.Exists); using (Stream fileStream = f.OpenRead()) { using (ZipArchive zip = new ZipArchive(fileStream, ZipArchiveMode.Read, false, Encoding.UTF8)) { ZipArchiveEntry dicEntry = zip.GetEntry(tests[i + 1]); - if (Debugging.AssertsEnabled) Debugging.Assert(() => dicEntry != null); + if (Debugging.AssertsEnabled) Debugging.Assert(dicEntry != null); ZipArchiveEntry affEntry = zip.GetEntry(tests[i + 2]); - if (Debugging.AssertsEnabled) Debugging.Assert(() => affEntry != null); + if (Debugging.AssertsEnabled) Debugging.Assert(affEntry != null); using (Stream dictionary = dicEntry.Open()) { @@ -226,16 +226,16 @@ public virtual void TestOneDictionary() if (tests[i].Equals(toTest, StringComparison.Ordinal)) { FileInfo f = new FileInfo(System.IO.Path.Combine(DICTIONARY_HOME.FullName, tests[i])); - if (Debugging.AssertsEnabled) Debugging.Assert(() => f.Exists); + if (Debugging.AssertsEnabled) Debugging.Assert(f.Exists); using (Stream fileStream = f.OpenRead()) { using (ZipArchive zip = new ZipArchive(fileStream, ZipArchiveMode.Read, false, Encoding.UTF8)) { ZipArchiveEntry dicEntry = zip.GetEntry(tests[i + 1]); - if (Debugging.AssertsEnabled) Debugging.Assert(() => dicEntry != null); + if (Debugging.AssertsEnabled) Debugging.Assert(dicEntry != null); ZipArchiveEntry affEntry = zip.GetEntry(tests[i + 2]); - if (Debugging.AssertsEnabled) Debugging.Assert(() => affEntry != null); + if (Debugging.AssertsEnabled) Debugging.Assert(affEntry != null); using (Stream dictionary = dicEntry.Open()) { diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMapFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMapFilter.cs index 44f4ad2dbb..ba8d1dead2 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMapFilter.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMapFilter.cs @@ -268,7 +268,7 @@ public virtual void TestBasic() private string GetRandomString(char start, int alphabetSize, int length) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => alphabetSize <= 26); + if (Debugging.AssertsEnabled) Debugging.Assert(alphabetSize <= 26); char[] s = new char[2 * length]; for (int charIDX = 0; charIDX < length; charIDX++) { diff --git a/src/Lucene.Net.Tests.Facet/FacetTestCase.cs b/src/Lucene.Net.Tests.Facet/FacetTestCase.cs index 5c1afbfaff..b0d4a31be5 100644 --- a/src/Lucene.Net.Tests.Facet/FacetTestCase.cs +++ b/src/Lucene.Net.Tests.Facet/FacetTestCase.cs @@ -154,7 +154,7 @@ protected internal virtual void SortTies(LabelAndValue[] labelValues) if (numInRow > 1) { Array.Sort(labelValues, i - numInRow, i - (i - numInRow), Comparer.Create((a,b)=> { - if (Debugging.AssertsEnabled) Debugging.Assert(() => (double)a.Value == (double)b.Value); + if (Debugging.AssertsEnabled) Debugging.Assert((double)a.Value == (double)b.Value); return (new BytesRef(a.Label)).CompareTo(new BytesRef(b.Label)); })); } diff --git a/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs b/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs index a9d27ab18d..537dada313 100644 --- a/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs +++ b/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs @@ -1197,7 +1197,7 @@ public DrillSidewaysAnonymousInnerClassHelper2(TestRangeFacetCounts outerInstanc protected override Facets BuildFacetsResult(FacetsCollector drillDowns, FacetsCollector[] drillSideways, string[] drillSidewaysDims) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => drillSideways.Length == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(drillSideways.Length == 1); return new DoubleRangeFacetCounts("field", vs, drillSideways[0], fastMatchFilter, ranges); } diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs index 09c2ed7aa0..2e95b2817a 100644 --- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs +++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs @@ -385,7 +385,7 @@ public ValueSourceAnonymousInnerClassHelper(TestTaxonomyFacetSumValueSource oute public override FunctionValues GetValues(IDictionary context, AtomicReaderContext readerContext) { Scorer scorer = (Scorer)context["scorer"]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => scorer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(scorer != null); return new DoubleDocValuesAnonymousInnerClassHelper(this, scorer); } diff --git a/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs b/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs index d89256a5a8..97be3a15ac 100644 --- a/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs +++ b/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs @@ -866,7 +866,7 @@ public virtual void SetScorer(Scorer scorer) public virtual void Collect(int doc) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => doc > lastDocID); + if (Debugging.AssertsEnabled) Debugging.Assert(doc > lastDocID); lastDocID = doc; } @@ -948,8 +948,8 @@ public virtual void Inc(int[] dims, int[] dims2) public virtual void Inc(int[] dims, int[] dims2, int onlyDim) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => dims.Length == counts.Length); - if (Debugging.AssertsEnabled) Debugging.Assert(() => dims2.Length == counts.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(dims.Length == counts.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(dims2.Length == counts.Length); for (int dim = 0; dim < dims.Length; dim++) { if (onlyDim == -1 || dim == onlyDim) diff --git a/src/Lucene.Net.Tests.Grouping/GroupFacetCollectorTest.cs b/src/Lucene.Net.Tests.Grouping/GroupFacetCollectorTest.cs index 3329b699b6..07d085c2ea 100644 --- a/src/Lucene.Net.Tests.Grouping/GroupFacetCollectorTest.cs +++ b/src/Lucene.Net.Tests.Grouping/GroupFacetCollectorTest.cs @@ -559,7 +559,7 @@ private IndexContext CreateIndexContext(bool multipleFacetValuesPerDocument) Field[] facetFields; if (useDv) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !multipleFacetValuesPerDocument); + if (Debugging.AssertsEnabled) Debugging.Assert(!multipleFacetValuesPerDocument); facetFields = new Field[2]; facetFields[0] = NewStringField("facet", "", Field.Store.NO); doc.Add(facetFields[0]); @@ -816,7 +816,7 @@ private AbstractGroupFacetCollector CreateRandomCollector(string groupField, str { BytesRef facetPrefixBR = facetPrefix == null ? null : new BytesRef(facetPrefix); // DocValues cannot be multi-valued: - if (Debugging.AssertsEnabled) Debugging.Assert(() => !multipleFacetsPerDocument || !groupField.EndsWith("_dv", StringComparison.Ordinal)); + if (Debugging.AssertsEnabled) Debugging.Assert(!multipleFacetsPerDocument || !groupField.EndsWith("_dv", StringComparison.Ordinal)); return TermGroupFacetCollector.CreateTermGroupFacetCollector(groupField, facetField, multipleFacetsPerDocument, facetPrefixBR, Random.nextInt(1024)); } diff --git a/src/Lucene.Net.Tests.Highlighter/PostingsHighlight/TestPostingsHighlighter.cs b/src/Lucene.Net.Tests.Highlighter/PostingsHighlight/TestPostingsHighlighter.cs index 99189a1563..8228ecc345 100644 --- a/src/Lucene.Net.Tests.Highlighter/PostingsHighlight/TestPostingsHighlighter.cs +++ b/src/Lucene.Net.Tests.Highlighter/PostingsHighlight/TestPostingsHighlighter.cs @@ -775,8 +775,8 @@ public LoadFieldValuesPostingsHighlighter(int maxLength, string text) protected override IList LoadFieldValues(IndexSearcher searcher, string[] fields, int[] docids, int maxLength) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => fields.Length == 1); - if (Debugging.AssertsEnabled) Debugging.Assert(() => docids.Length == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(fields.Length == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(docids.Length == 1); String[][] contents = RectangularArrays.ReturnRectangularArray(1, 1); //= new String[1][1]; contents[0][0] = text; return contents; @@ -1179,7 +1179,7 @@ internal class GetMultiValuedSeparatorPostingsHighlighter : PostingsHighlighter { protected override char GetMultiValuedSeparator(string field) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => field.Equals("body", StringComparison.Ordinal)); + if (Debugging.AssertsEnabled) Debugging.Assert(field.Equals("body", StringComparison.Ordinal)); return '\u2029'; } } diff --git a/src/Lucene.Net.Tests.Join/TestJoinUtil.cs b/src/Lucene.Net.Tests.Join/TestJoinUtil.cs index 09ea77f41c..59de865fbb 100644 --- a/src/Lucene.Net.Tests.Join/TestJoinUtil.cs +++ b/src/Lucene.Net.Tests.Join/TestJoinUtil.cs @@ -1030,7 +1030,7 @@ private FixedBitSet CreateExpectedResult(string queryValue, bool from, IndexRead { DocsEnum docsEnum = MultiFields.GetTermDocsEnum(topLevelReader, MultiFields.GetLiveDocs(topLevelReader), "id", new BytesRef(otherSideDoc.id), 0); - if (Debugging.AssertsEnabled) Debugging.Assert(() => docsEnum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(docsEnum != null); int doc = docsEnum.NextDoc(); expectedResult.Set(doc); } diff --git a/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs b/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs index 32a1bce486..f8417b739d 100644 --- a/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs +++ b/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs @@ -65,8 +65,8 @@ public override ICommonQueryParserConfiguration GetParserConfig(Analyzer a) public override Query GetQuery(string query, ICommonQueryParserConfiguration cqpC) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => cqpC != null, () => "Parameter must not be null"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => cqpC is QueryParser, () => "Parameter must be instance of QueryParser"); + if (Debugging.AssertsEnabled) Debugging.Assert(cqpC != null, () => "Parameter must not be null"); + if (Debugging.AssertsEnabled) Debugging.Assert(cqpC is QueryParser, () => "Parameter must be instance of QueryParser"); QueryParser qp = (QueryParser)cqpC; return qp.Parse(query); } @@ -83,35 +83,35 @@ public override bool IsQueryParserException(Exception exception) public override void SetDefaultOperatorOR(ICommonQueryParserConfiguration cqpC) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => cqpC is QueryParser); + if (Debugging.AssertsEnabled) Debugging.Assert(cqpC is QueryParser); QueryParser qp = (QueryParser)cqpC; qp.DefaultOperator = Operator.OR; } public override void SetDefaultOperatorAND(ICommonQueryParserConfiguration cqpC) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => cqpC is QueryParser); + if (Debugging.AssertsEnabled) Debugging.Assert(cqpC is QueryParser); QueryParser qp = (QueryParser)cqpC; qp.DefaultOperator = Operator.AND; } public override void SetAnalyzeRangeTerms(ICommonQueryParserConfiguration cqpC, bool value) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => cqpC is QueryParser); + if (Debugging.AssertsEnabled) Debugging.Assert(cqpC is QueryParser); QueryParser qp = (QueryParser)cqpC; qp.AnalyzeRangeTerms = (value); } public override void SetAutoGeneratePhraseQueries(ICommonQueryParserConfiguration cqpC, bool value) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => cqpC is QueryParser); + if (Debugging.AssertsEnabled) Debugging.Assert(cqpC is QueryParser); QueryParser qp = (QueryParser)cqpC; qp.AutoGeneratePhraseQueries = value; } public override void SetDateResolution(ICommonQueryParserConfiguration cqpC, string field, DateTools.Resolution value) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => cqpC is QueryParser); + if (Debugging.AssertsEnabled) Debugging.Assert(cqpC is QueryParser); QueryParser qp = (QueryParser)cqpC; qp.SetDateResolution(field, value); } diff --git a/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestStandardQP.cs b/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestStandardQP.cs index f9d285517d..5cdeb984bb 100644 --- a/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestStandardQP.cs +++ b/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestStandardQP.cs @@ -59,8 +59,8 @@ public override ICommonQueryParserConfiguration GetParserConfig(Analyzer a) public override Query GetQuery(String query, ICommonQueryParserConfiguration cqpC) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => cqpC != null, () => "Parameter must not be null"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => (cqpC is StandardQueryParser), () => "Parameter must be instance of StandardQueryParser"); + if (Debugging.AssertsEnabled) Debugging.Assert(cqpC != null, () => "Parameter must not be null"); + if (Debugging.AssertsEnabled) Debugging.Assert((cqpC is StandardQueryParser), () => "Parameter must be instance of StandardQueryParser"); StandardQueryParser qp = (StandardQueryParser)cqpC; return Parse(query, qp); } @@ -80,7 +80,7 @@ public override bool IsQueryParserException(Exception exception) public override void SetDefaultOperatorOR(ICommonQueryParserConfiguration cqpC) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => cqpC is StandardQueryParser); + if (Debugging.AssertsEnabled) Debugging.Assert(cqpC is StandardQueryParser); StandardQueryParser qp = (StandardQueryParser)cqpC; qp.DefaultOperator = (Operator.OR); } @@ -88,7 +88,7 @@ public override void SetDefaultOperatorOR(ICommonQueryParserConfiguration cqpC) public override void SetDefaultOperatorAND(ICommonQueryParserConfiguration cqpC) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => cqpC is StandardQueryParser); + if (Debugging.AssertsEnabled) Debugging.Assert(cqpC is StandardQueryParser); StandardQueryParser qp = (StandardQueryParser)cqpC; qp.DefaultOperator = (Operator.AND); } @@ -111,7 +111,7 @@ public override void SetAutoGeneratePhraseQueries(ICommonQueryParserConfiguratio public override void SetDateResolution(ICommonQueryParserConfiguration cqpC, string field, DateTools.Resolution value) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => cqpC is StandardQueryParser); + if (Debugging.AssertsEnabled) Debugging.Assert(cqpC is StandardQueryParser); StandardQueryParser qp = (StandardQueryParser)cqpC; qp.DateResolutionMap.Put(field, value); } diff --git a/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs b/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs index a6fe124b20..38f8c2f4f2 100644 --- a/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs +++ b/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs @@ -461,7 +461,7 @@ protected override void HandleUpdateException(Exception exception) { // count-down number of failures failures.DecrementAndGet(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => failures >= 0, () => "handler failed too many times: " + failures); + if (Debugging.AssertsEnabled) Debugging.Assert(failures >= 0, () => "handler failed too many times: " + failures); if (Verbose) { if (failures == 0) diff --git a/src/Lucene.Net.Tests.Replicator/IndexReplicationClientTest.cs b/src/Lucene.Net.Tests.Replicator/IndexReplicationClientTest.cs index ee05bc1833..f4dfbb1e2a 100644 --- a/src/Lucene.Net.Tests.Replicator/IndexReplicationClientTest.cs +++ b/src/Lucene.Net.Tests.Replicator/IndexReplicationClientTest.cs @@ -367,7 +367,7 @@ protected override void HandleUpdateException(Exception exception) { // count-down number of failures failures.DecrementAndGet(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => failures >= 0, () => "handler failed too many times: " + failures); + if (Debugging.AssertsEnabled) Debugging.Assert(failures >= 0, () => "handler failed too many times: " + failures); if (Verbose) { if (failures == 0) diff --git a/src/Lucene.Net.Tests.Spatial/SpatialTestCase.cs b/src/Lucene.Net.Tests.Spatial/SpatialTestCase.cs index 6f54dd4405..eb6479115b 100644 --- a/src/Lucene.Net.Tests.Spatial/SpatialTestCase.cs +++ b/src/Lucene.Net.Tests.Spatial/SpatialTestCase.cs @@ -152,7 +152,7 @@ protected virtual IRectangle randomRectangle() private double randomGaussianMinMeanMax(double min, double mean, double max) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => mean > min); + if (Debugging.AssertsEnabled) Debugging.Assert(mean > min); return randomGaussianMeanMax(mean - min, max - min) + min; } @@ -166,7 +166,7 @@ private double randomGaussianMinMeanMax(double min, double mean, double max) private double randomGaussianMeanMax(double mean, double max) { // DWS: I verified the results empirically - if (Debugging.AssertsEnabled) Debugging.Assert(() => mean <= max && mean >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(mean <= max && mean >= 0); double g = randomGaussian(); double mean2 = mean; double flip = 1; @@ -180,7 +180,7 @@ private double randomGaussianMeanMax(double mean, double max) // 1 standard deviation alters the calculation double pivotMax = max - mean2; double pivot = Math.Min(mean2, pivotMax / 2);//from 0 to max-mean2 - if (Debugging.AssertsEnabled) Debugging.Assert(() => pivot >= 0 && pivotMax >= pivot && g >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(pivot >= 0 && pivotMax >= pivot && g >= 0); double pivotResult; if (g <= 1) pivotResult = pivot * g; diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs index 4b7301d8a5..bc2ad0f1fa 100644 --- a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs +++ b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs @@ -651,7 +651,7 @@ public int CompareTo(TermFreq2 other) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(false); return 0; } } diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs index 4857d73d4c..d88b6f752e 100644 --- a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs +++ b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs @@ -603,7 +603,7 @@ public int CompareTo(TermFreqPayload2 other) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(false); return 0; } } @@ -1313,7 +1313,7 @@ public int Compare(Lookup.LookupResult a, Lookup.LookupResult b) else { int c = CHARSEQUENCE_COMPARER.Compare(a.Key, b.Key); - if (Debugging.AssertsEnabled) Debugging.Assert(() => c != 0, () => "term=" + a.Key); + if (Debugging.AssertsEnabled) Debugging.Assert(c != 0, () => "term=" + a.Key); return c; } } diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs index 0780c825b9..e0778eaa85 100644 --- a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs +++ b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs @@ -680,7 +680,7 @@ private static string GetZipfToken(string[] tokens) return tokens[k]; } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(false); return null; } diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs index 14251e72af..883a11091b 100644 --- a/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs +++ b/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs @@ -75,7 +75,7 @@ public class LookupBenchmarkTest : LuceneTestCase public override void SetUp() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "disable assertions before running benchmarks!"); + if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "disable assertions before running benchmarks!"); IList input = ReadTop50KWiki(); input.Shuffle(Random); dictionaryInput = input.ToArray(); @@ -93,7 +93,7 @@ public static IList ReadTop50KWiki() List input = new List(); var resource = typeof(LookupBenchmarkTest).FindAndGetManifestResourceStream("Top50KWiki.utf8"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => resource != null, () => "Resource missing: Top50KWiki.utf8"); + if (Debugging.AssertsEnabled) Debugging.Assert(resource != null, () => "Resource missing: Top50KWiki.utf8"); string line = null; using (TextReader br = new StreamReader(resource, UTF_8)) diff --git a/src/Lucene.Net.Tests/Analysis/TestGraphTokenizers.cs b/src/Lucene.Net.Tests/Analysis/TestGraphTokenizers.cs index 1322b74063..f351acb20e 100644 --- a/src/Lucene.Net.Tests/Analysis/TestGraphTokenizers.cs +++ b/src/Lucene.Net.Tests/Analysis/TestGraphTokenizers.cs @@ -176,7 +176,7 @@ internal virtual void FillTokens() pos += minPosLength; offset = 2 * pos; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => maxPos <= pos, () => "input string mal-formed: posLength>1 tokens hang over the end"); + if (Debugging.AssertsEnabled) Debugging.Assert(maxPos <= pos, () => "input string mal-formed: posLength>1 tokens hang over the end"); } } diff --git a/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs b/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs index 40b002f3b8..d56e040076 100644 --- a/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs +++ b/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs @@ -296,7 +296,7 @@ private void AssertTermsSeeking(Terms leftTerms, Terms rightTerms) /// public virtual void AssertTermsStatistics(Terms leftTerms, Terms rightTerms) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => leftTerms.Comparer == rightTerms.Comparer); + if (Debugging.AssertsEnabled) Debugging.Assert(leftTerms.Comparer == rightTerms.Comparer); if (leftTerms.DocCount != -1 && rightTerms.DocCount != -1) { Assert.AreEqual(leftTerms.DocCount, rightTerms.DocCount); diff --git a/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldDocValuesFormat.cs b/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldDocValuesFormat.cs index 0a2e89e037..3379c03770 100644 --- a/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldDocValuesFormat.cs +++ b/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldDocValuesFormat.cs @@ -113,7 +113,7 @@ public virtual void TestTwoFieldsTwoFormats() { Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); Assert.AreEqual(text, hitDoc.Get("fieldname")); - if (Debugging.AssertsEnabled) Debugging.Assert(() => ireader.Leaves.Count == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv1"); Assert.AreEqual(5, dv.Get(hits.ScoreDocs[i].Doc)); BinaryDocValues dv2 = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv2"); diff --git a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs index 2ed15aae65..ba498e78f5 100644 --- a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs +++ b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs @@ -434,7 +434,7 @@ public virtual void SearchIndex(Directory dir, string oldName) // true if this is a 4.2+ index bool is42Index = MultiFields.GetMergedFieldInfos(reader).FieldInfo("dvSortedSet") != null; - if (Debugging.AssertsEnabled) Debugging.Assert(() => is40Index); // NOTE: currently we can only do this on trunk! + if (Debugging.AssertsEnabled) Debugging.Assert(is40Index); // NOTE: currently we can only do this on trunk! IBits liveDocs = MultiFields.GetLiveDocs(reader); diff --git a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs index 417df9366e..1a1fea9af1 100644 --- a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs +++ b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs @@ -1039,7 +1039,7 @@ public virtual void TestNegativePositions() Assert.AreEqual(1, td.TotalHits); AtomicReader wrapper = SlowCompositeReaderWrapper.Wrap(ir); DocsAndPositionsEnum de = wrapper.GetTermPositionsEnum(new Term("field3", "broken")); - if (Debugging.AssertsEnabled) Debugging.Assert(() => de != null); + if (Debugging.AssertsEnabled) Debugging.Assert(de != null); Assert.AreEqual(0, de.NextDoc()); Assert.AreEqual(0, de.NextPosition()); ir.Dispose(); diff --git a/src/Lucene.Net.Tests/Index/TestCodecs.cs b/src/Lucene.Net.Tests/Index/TestCodecs.cs index 2888663d84..a9c8d892bf 100644 --- a/src/Lucene.Net.Tests/Index/TestCodecs.cs +++ b/src/Lucene.Net.Tests/Index/TestCodecs.cs @@ -446,7 +446,7 @@ public virtual void TestRandomPostings() for (int i = 0; i < NUM_TEST_THREADS - 1; i++) { threads[i].Join(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => !threads[i].failed); + if (Debugging.AssertsEnabled) Debugging.Assert(!threads[i].failed); } } @@ -759,14 +759,14 @@ public virtual void _run() if (doc == DocIdSetIterator.NO_MORE_DOCS) { // skipped past last doc - if (Debugging.AssertsEnabled) Debugging.Assert(() => upto2 == term2.docs.Length - 1); + if (Debugging.AssertsEnabled) Debugging.Assert(upto2 == term2.docs.Length - 1); ended = true; break; } else { // skipped to next doc - if (Debugging.AssertsEnabled) Debugging.Assert(() => upto2 < term2.docs.Length - 1); + if (Debugging.AssertsEnabled) Debugging.Assert(upto2 < term2.docs.Length - 1); if (doc >= term2.docs[1 + upto2]) { upto2++; diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriter.cs b/src/Lucene.Net.Tests/Index/TestIndexWriter.cs index a2517f58ec..2a80fdf67a 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriter.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriter.cs @@ -2140,14 +2140,14 @@ public virtual void TestNRTReaderVersion() r = w.GetReader(); long version2 = r.Version; r.Dispose(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => version2 > version); + if (Debugging.AssertsEnabled) Debugging.Assert(version2 > version); w.DeleteDocuments(new Term("id", "0")); r = w.GetReader(); w.Dispose(); long version3 = r.Version; r.Dispose(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => version3 > version2); + if (Debugging.AssertsEnabled) Debugging.Assert(version3 > version2); d.Dispose(); } diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs index ffdcfdd778..eee2d87103 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs @@ -315,7 +315,7 @@ public override void Merge(IndexWriter writer, MergeTrigger trigger, bool newMer } for (int i = 0; i < merge.Segments.Count; i++) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => merge.Segments[i].Info.DocCount < 20); + if (Debugging.AssertsEnabled) Debugging.Assert(merge.Segments[i].Info.DocCount < 20); } writer.Merge(merge); } diff --git a/src/Lucene.Net.Tests/Index/TestIndexableField.cs b/src/Lucene.Net.Tests/Index/TestIndexableField.cs index 4308d1a932..f171897c4e 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexableField.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexableField.cs @@ -317,7 +317,7 @@ public virtual void TestArbitraryFields() } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => stringValue != null); + if (Debugging.AssertsEnabled) Debugging.Assert(stringValue != null); Assert.AreEqual(stringValue, f.GetStringValue()); } } @@ -424,7 +424,7 @@ public bool MoveNext() return false; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldUpto < outerInstance.fieldCount); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldUpto < outerInstance.fieldCount); if (fieldUpto == 0) { fieldUpto = 1; diff --git a/src/Lucene.Net.Tests/Index/TestLongPostings.cs b/src/Lucene.Net.Tests/Index/TestLongPostings.cs index c9859d7de3..4ffecfa943 100644 --- a/src/Lucene.Net.Tests/Index/TestLongPostings.cs +++ b/src/Lucene.Net.Tests/Index/TestLongPostings.cs @@ -462,9 +462,9 @@ public virtual void DoTestLongPostingsNoPositions(IndexOptions options) else { docs = postings = TestUtil.Docs(Random, r, "field", new BytesRef(term), null, null, DocsFlags.FREQS); - if (Debugging.AssertsEnabled) Debugging.Assert(() => postings != null); + if (Debugging.AssertsEnabled) Debugging.Assert(postings != null); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => docs != null); + if (Debugging.AssertsEnabled) Debugging.Assert(docs != null); int docID = -1; while (docID < DocIdSetIterator.NO_MORE_DOCS) diff --git a/src/Lucene.Net.Tests/Index/TestNRTThreads.cs b/src/Lucene.Net.Tests/Index/TestNRTThreads.cs index 5dfadbfe0a..9a7916d94e 100644 --- a/src/Lucene.Net.Tests/Index/TestNRTThreads.cs +++ b/src/Lucene.Net.Tests/Index/TestNRTThreads.cs @@ -116,7 +116,7 @@ protected override void DoSearching(TaskScheduler es, long stopTime) protected override Directory GetDirectory(Directory @in) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => @in is MockDirectoryWrapper); + if (Debugging.AssertsEnabled) Debugging.Assert(@in is MockDirectoryWrapper); if (!useNonNrtReaders) { ((MockDirectoryWrapper)@in).AssertNoDeleteOpenFile = true; diff --git a/src/Lucene.Net.Tests/Index/TestPayloads.cs b/src/Lucene.Net.Tests/Index/TestPayloads.cs index 2129822db1..d536b10e2b 100644 --- a/src/Lucene.Net.Tests/Index/TestPayloads.cs +++ b/src/Lucene.Net.Tests/Index/TestPayloads.cs @@ -310,7 +310,7 @@ private void GenerateRandomData(byte[] data) // this test needs the random data to be valid unicode string s = TestUtil.RandomFixedByteLengthUnicodeString(Random, data.Length); var b = s.GetBytes(utf8); - if (Debugging.AssertsEnabled) Debugging.Assert(() => b.Length == data.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(b.Length == data.Length); System.Buffer.BlockCopy(b, 0, data, 0, b.Length); } diff --git a/src/Lucene.Net.Tests/Index/TestPayloadsOnVectors.cs b/src/Lucene.Net.Tests/Index/TestPayloadsOnVectors.cs index f38c9411da..d251def1fa 100644 --- a/src/Lucene.Net.Tests/Index/TestPayloadsOnVectors.cs +++ b/src/Lucene.Net.Tests/Index/TestPayloadsOnVectors.cs @@ -79,7 +79,7 @@ public virtual void TestMixupDocs() DirectoryReader reader = writer.GetReader(); Terms terms = reader.GetTermVector(1, "field"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => terms != null); + if (Debugging.AssertsEnabled) Debugging.Assert(terms != null); TermsEnum termsEnum = terms.GetIterator(null); Assert.IsTrue(termsEnum.SeekExact(new BytesRef("withPayload"))); DocsAndPositionsEnum de = termsEnum.DocsAndPositions(null, null); @@ -128,7 +128,7 @@ public virtual void TestMixupMultiValued() writer.AddDocument(doc); DirectoryReader reader = writer.GetReader(); Terms terms = reader.GetTermVector(0, "field"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => terms != null); + if (Debugging.AssertsEnabled) Debugging.Assert(terms != null); TermsEnum termsEnum = terms.GetIterator(null); Assert.IsTrue(termsEnum.SeekExact(new BytesRef("withPayload"))); DocsAndPositionsEnum de = termsEnum.DocsAndPositions(null, null); diff --git a/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs b/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs index 6e968359da..0f3ac91b5b 100644 --- a/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs +++ b/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs @@ -177,9 +177,9 @@ public virtual void DoTestNumbers(bool withPayloads) { dp.NextPosition(); int start = dp.StartOffset; - if (Debugging.AssertsEnabled) Debugging.Assert(() => start >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(start >= 0); int end = dp.EndOffset; - if (Debugging.AssertsEnabled) Debugging.Assert(() => end >= 0 && end >= start); + if (Debugging.AssertsEnabled) Debugging.Assert(end >= 0 && end >= start); // check that the offsets correspond to the term in the src text Assert.IsTrue(storedNumbers.Substring(start, end - start).Equals(term, StringComparison.Ordinal)); if (withPayloads) @@ -208,9 +208,9 @@ public virtual void DoTestNumbers(bool withPayloads) string storedNumbers = reader.Document(doc).Get("numbers"); dp.NextPosition(); int start = dp.StartOffset; - if (Debugging.AssertsEnabled) Debugging.Assert(() => start >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(start >= 0); int end = dp.EndOffset; - if (Debugging.AssertsEnabled) Debugging.Assert(() => end >= 0 && end >= start); + if (Debugging.AssertsEnabled) Debugging.Assert(end >= 0 && end >= start); // check that the offsets correspond to the term in the src text Assert.IsTrue(storedNumbers.Substring(start, end - start).Equals("hundred", StringComparison.Ordinal)); if (withPayloads) diff --git a/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs b/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs index b7d7a3c602..32097493b4 100644 --- a/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs +++ b/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs @@ -347,7 +347,7 @@ public virtual void VerifyEquals(DirectoryReader r1, DirectoryReader r2, string } if (r1.NumDocs != r2.NumDocs) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "r1.NumDocs=" + r1.NumDocs + " vs r2.NumDocs=" + r2.NumDocs); + if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "r1.NumDocs=" + r1.NumDocs + " vs r2.NumDocs=" + r2.NumDocs); } bool hasDeletes = !(r1.MaxDoc == r2.MaxDoc && r1.NumDocs == r1.MaxDoc); @@ -682,7 +682,7 @@ public static void VerifyEquals(Document d1, Document d2) IIndexableField f2 = ff2[i]; if (f1.GetBinaryValue() != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => f2.GetBinaryValue() != null); + if (Debugging.AssertsEnabled) Debugging.Assert(f2.GetBinaryValue() != null); } else { diff --git a/src/Lucene.Net.Tests/Index/TestStressNRT.cs b/src/Lucene.Net.Tests/Index/TestStressNRT.cs index 3154b56a2d..b552cdbd87 100644 --- a/src/Lucene.Net.Tests/Index/TestStressNRT.cs +++ b/src/Lucene.Net.Tests/Index/TestStressNRT.cs @@ -263,8 +263,8 @@ public override void Run() { // install the new reader if it's newest (and check the current version since another reader may have already been installed) //System.out.println(Thread.currentThread().getName() + ": newVersion=" + newReader.getVersion()); - if (Debugging.AssertsEnabled) Debugging.Assert(() => newReader.RefCount > 0); - if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.reader.RefCount > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(newReader.RefCount > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.reader.RefCount > 0); if (newReader.Version > outerInstance.reader.Version) { if (Verbose) diff --git a/src/Lucene.Net.Tests/Index/TestTermsEnum.cs b/src/Lucene.Net.Tests/Index/TestTermsEnum.cs index d8dada1fd5..5ac0bf5dfd 100644 --- a/src/Lucene.Net.Tests/Index/TestTermsEnum.cs +++ b/src/Lucene.Net.Tests/Index/TestTermsEnum.cs @@ -829,7 +829,7 @@ private void TestRandomSeeks(IndexReader r, params string[] validTermStrings) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => loc >= -validTerms.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(loc >= -validTerms.Length); Assert.AreEqual(TermsEnum.SeekStatus.NOT_FOUND, result); } } diff --git a/src/Lucene.Net.Tests/Search/Spans/MultiSpansWrapper.cs b/src/Lucene.Net.Tests/Search/Spans/MultiSpansWrapper.cs index bd1216b985..d738765271 100644 --- a/src/Lucene.Net.Tests/Search/Spans/MultiSpansWrapper.cs +++ b/src/Lucene.Net.Tests/Search/Spans/MultiSpansWrapper.cs @@ -110,7 +110,7 @@ public override bool SkipTo(int target) } int subIndex = ReaderUtil.SubIndex(target, leaves); - if (Debugging.AssertsEnabled) Debugging.Assert(() => subIndex >= leafOrd); + if (Debugging.AssertsEnabled) Debugging.Assert(subIndex >= leafOrd); if (subIndex != leafOrd) { AtomicReaderContext ctx = leaves[subIndex]; diff --git a/src/Lucene.Net.Tests/Search/TestBooleanScorer.cs b/src/Lucene.Net.Tests/Search/TestBooleanScorer.cs index b870992f16..a257e2bc95 100644 --- a/src/Lucene.Net.Tests/Search/TestBooleanScorer.cs +++ b/src/Lucene.Net.Tests/Search/TestBooleanScorer.cs @@ -119,7 +119,7 @@ private class BulkScorerAnonymousInnerClassHelper : BulkScorer public override bool Score(ICollector c, int maxDoc) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => doc == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(doc == -1); doc = 3000; FakeScorer fs = new FakeScorer(); fs.doc = doc; diff --git a/src/Lucene.Net.Tests/Search/TestConstantScoreQuery.cs b/src/Lucene.Net.Tests/Search/TestConstantScoreQuery.cs index 8a244a0c9b..80747c331e 100644 --- a/src/Lucene.Net.Tests/Search/TestConstantScoreQuery.cs +++ b/src/Lucene.Net.Tests/Search/TestConstantScoreQuery.cs @@ -234,7 +234,7 @@ public virtual void TestQueryWrapperFilter() Filter filter = new QueryWrapperFilter(AssertingQuery.Wrap(Random, new TermQuery(new Term("field", "a")))); IndexSearcher s = NewSearcher(r); - if (Debugging.AssertsEnabled) Debugging.Assert(() => s is AssertingIndexSearcher); + if (Debugging.AssertsEnabled) Debugging.Assert(s is AssertingIndexSearcher); // this used to fail s.Search(new ConstantScoreQuery(filter), new TotalHitCountCollector()); diff --git a/src/Lucene.Net.Tests/Search/TestFieldCache.cs b/src/Lucene.Net.Tests/Search/TestFieldCache.cs index 7295a56140..4c93135d72 100644 --- a/src/Lucene.Net.Tests/Search/TestFieldCache.cs +++ b/src/Lucene.Net.Tests/Search/TestFieldCache.cs @@ -372,7 +372,7 @@ public virtual void Test() break; } long ord = termOrds.NextOrd(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => ord != SortedSetDocValues.NO_MORE_ORDS); + if (Debugging.AssertsEnabled) Debugging.Assert(ord != SortedSetDocValues.NO_MORE_ORDS); BytesRef scratch = new BytesRef(); termOrds.LookupOrd(ord, scratch); Assert.AreEqual(v, scratch); diff --git a/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs b/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs index de9b7be1d2..bdecf25518 100644 --- a/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs +++ b/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs @@ -365,14 +365,14 @@ internal SlowMinShouldMatchScorer(BooleanWeight weight, AtomicReader reader, Ind this.sims = new SimScorer[(int)dv.ValueCount]; foreach (BooleanClause clause in bq.GetClauses()) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !clause.IsProhibited); - if (Debugging.AssertsEnabled) Debugging.Assert(() => !clause.IsRequired); + if (Debugging.AssertsEnabled) Debugging.Assert(!clause.IsProhibited); + if (Debugging.AssertsEnabled) Debugging.Assert(!clause.IsRequired); Term term = ((TermQuery)clause.Query).Term; long ord = dv.LookupTerm(term.Bytes); if (ord >= 0) { bool success = ords.Add(ord); - if (Debugging.AssertsEnabled) Debugging.Assert(() => success); // no dups + if (Debugging.AssertsEnabled) Debugging.Assert(success); // no dups TermContext context = TermContext.Build(reader.Context, term); SimWeight w = weight.Similarity.ComputeWeight(1f, searcher.CollectionStatistics("field"), searcher.TermStatistics(term, context)); var dummy = w.GetValueForNormalization(); // ignored @@ -384,7 +384,7 @@ internal SlowMinShouldMatchScorer(BooleanWeight weight, AtomicReader reader, Ind public override float GetScore() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => score != 0, currentMatched.ToString); + if (Debugging.AssertsEnabled) Debugging.Assert(score != 0, currentMatched.ToString); return (float)score * ((BooleanWeight)m_weight).Coord(currentMatched, ((BooleanWeight)m_weight).MaxCoord); } @@ -394,7 +394,7 @@ public override float GetScore() public override int NextDoc() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentDoc != NO_MORE_DOCS); + if (Debugging.AssertsEnabled) Debugging.Assert(currentDoc != NO_MORE_DOCS); for (currentDoc = currentDoc + 1; currentDoc < maxDoc; currentDoc++) { currentMatched = 0; diff --git a/src/Lucene.Net.Tests/Search/TestMultiThreadTermVectors.cs b/src/Lucene.Net.Tests/Search/TestMultiThreadTermVectors.cs index d1e61d861a..03e4f658ff 100644 --- a/src/Lucene.Net.Tests/Search/TestMultiThreadTermVectors.cs +++ b/src/Lucene.Net.Tests/Search/TestMultiThreadTermVectors.cs @@ -223,7 +223,7 @@ private void VerifyVectors(Fields vectors, int num) foreach (string field in vectors) { Terms terms = vectors.GetTerms(field); - if (Debugging.AssertsEnabled) Debugging.Assert(() => terms != null); + if (Debugging.AssertsEnabled) Debugging.Assert(terms != null); VerifyVector(terms.GetIterator(null), num); } } diff --git a/src/Lucene.Net.Tests/Search/TestNumericRangeQuery32.cs b/src/Lucene.Net.Tests/Search/TestNumericRangeQuery32.cs index 050e28c6d2..82e70ea663 100644 --- a/src/Lucene.Net.Tests/Search/TestNumericRangeQuery32.cs +++ b/src/Lucene.Net.Tests/Search/TestNumericRangeQuery32.cs @@ -474,13 +474,13 @@ public virtual void TestEmptyEnums() int count = 3000; int lower = (distance * 3 / 2) + startOffset, upper = lower + count * distance + (distance / 3); // test empty enum - if (Debugging.AssertsEnabled) Debugging.Assert(() => lower < upper); + if (Debugging.AssertsEnabled) Debugging.Assert(lower < upper); Assert.IsTrue(0 < CountTerms(NumericRangeQuery.NewInt32Range("field4", 4, lower, upper, true, true))); Assert.AreEqual(0, CountTerms(NumericRangeQuery.NewInt32Range("field4", 4, upper, lower, true, true))); // test empty enum outside of bounds lower = distance * noDocs + startOffset; upper = 2 * lower; - if (Debugging.AssertsEnabled) Debugging.Assert(() => lower < upper); + if (Debugging.AssertsEnabled) Debugging.Assert(lower < upper); Assert.AreEqual(0, CountTerms(NumericRangeQuery.NewInt32Range("field4", 4, lower, upper, true, true))); } diff --git a/src/Lucene.Net.Tests/Search/TestNumericRangeQuery64.cs b/src/Lucene.Net.Tests/Search/TestNumericRangeQuery64.cs index 4262d3cbec..db8377f428 100644 --- a/src/Lucene.Net.Tests/Search/TestNumericRangeQuery64.cs +++ b/src/Lucene.Net.Tests/Search/TestNumericRangeQuery64.cs @@ -501,13 +501,13 @@ public virtual void TestEmptyEnums() int count = 3000; long lower = (distance * 3 / 2) + startOffset, upper = lower + count * distance + (distance / 3); // test empty enum - if (Debugging.AssertsEnabled) Debugging.Assert(() => lower < upper); + if (Debugging.AssertsEnabled) Debugging.Assert(lower < upper); Assert.IsTrue(0 < CountTerms(NumericRangeQuery.NewInt64Range("field4", 4, lower, upper, true, true))); Assert.AreEqual(0, CountTerms(NumericRangeQuery.NewInt64Range("field4", 4, upper, lower, true, true))); // test empty enum outside of bounds lower = distance * noDocs + startOffset; upper = 2L * lower; - if (Debugging.AssertsEnabled) Debugging.Assert(() => lower < upper); + if (Debugging.AssertsEnabled) Debugging.Assert(lower < upper); Assert.AreEqual(0, CountTerms(NumericRangeQuery.NewInt64Range("field4", 4, lower, upper, true, true))); } diff --git a/src/Lucene.Net.Tests/Search/TestTimeLimitingCollector.cs b/src/Lucene.Net.Tests/Search/TestTimeLimitingCollector.cs index 8118ca6410..8162e52e3b 100644 --- a/src/Lucene.Net.Tests/Search/TestTimeLimitingCollector.cs +++ b/src/Lucene.Net.Tests/Search/TestTimeLimitingCollector.cs @@ -418,7 +418,7 @@ public virtual void Collect(int doc) //#endif } - if (Debugging.AssertsEnabled) Debugging.Assert(() => docId >= 0, () => " base=" + docBase + " doc=" + doc); + if (Debugging.AssertsEnabled) Debugging.Assert(docId >= 0, () => " base=" + docBase + " doc=" + doc); bits.Set(docId); lastDocCollected = docId; } diff --git a/src/Lucene.Net.Tests/Util/Automaton/TestUTF32ToUTF8.cs b/src/Lucene.Net.Tests/Util/Automaton/TestUTF32ToUTF8.cs index 5addb895ec..8f72234d55 100644 --- a/src/Lucene.Net.Tests/Util/Automaton/TestUTF32ToUTF8.cs +++ b/src/Lucene.Net.Tests/Util/Automaton/TestUTF32ToUTF8.cs @@ -76,7 +76,7 @@ private void TestOne(Random r, ByteRunAutomaton a, int startCode, int endCode, i nonSurrogateCount = endCode - startCode + 1 - (UnicodeUtil.UNI_SUR_LOW_END - UnicodeUtil.UNI_SUR_HIGH_START + 1); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => nonSurrogateCount > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(nonSurrogateCount > 0); for (int iter = 0; iter < iters; iter++) { @@ -95,8 +95,8 @@ private void TestOne(Random r, ByteRunAutomaton a, int startCode, int endCode, i } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => code >= startCode && code <= endCode, () => "code=" + code + " start=" + startCode + " end=" + endCode); - if (Debugging.AssertsEnabled) Debugging.Assert(() => !IsSurrogate(code)); + if (Debugging.AssertsEnabled) Debugging.Assert(code >= startCode && code <= endCode, () => "code=" + code + " start=" + startCode + " end=" + endCode); + if (Debugging.AssertsEnabled) Debugging.Assert(!IsSurrogate(code)); Assert.IsTrue(Matches(a, code), "DFA for range " + startCode + "-" + endCode + " failed to match code=" + code); } diff --git a/src/Lucene.Net.Tests/Util/Fst/TestFSTs.cs b/src/Lucene.Net.Tests/Util/Fst/TestFSTs.cs index b262eda0a0..0720328b6f 100644 --- a/src/Lucene.Net.Tests/Util/Fst/TestFSTs.cs +++ b/src/Lucene.Net.Tests/Util/Fst/TestFSTs.cs @@ -571,7 +571,7 @@ public virtual void Run(int limit, bool verify, bool verifyByOutput) long tMid = Environment.TickCount; Console.WriteLine(((tMid - tStart) / 1000.0) + " sec to add all terms"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => builder.TermCount == ord); + if (Debugging.AssertsEnabled) Debugging.Assert(builder.TermCount == ord); FST fst = builder.Finish(); long tEnd = Environment.TickCount; Console.WriteLine(((tEnd - tMid) / 1000.0) + " sec to finish/pack"); diff --git a/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoDocIdSet.cs b/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoDocIdSet.cs index 007eee8ed0..5d3d0402cb 100644 --- a/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoDocIdSet.cs +++ b/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoDocIdSet.cs @@ -52,7 +52,7 @@ public override int NextDoc() { doc = NO_MORE_DOCS; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => doc < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(doc < numBits); return doc; } diff --git a/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoSequence.cs b/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoSequence.cs index 9111a35aeb..ee29105fa8 100644 --- a/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoSequence.cs +++ b/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoSequence.cs @@ -91,7 +91,7 @@ private static void TstDecodeAllAdvanceToExpected(long[] values, EliasFanoDecode private static void TstDecodeAdvanceToMultiples(long[] values, EliasFanoDecoder efd, long m) { // test advancing to multiples of m - if (Debugging.AssertsEnabled) Debugging.Assert(() => m > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(m > 0); long previousValue = -1L; long index = 0; long mm = m; @@ -120,7 +120,7 @@ private static void TstDecodeAdvanceToMultiples(long[] values, EliasFanoDecoder private static void TstDecodeBackToMultiples(long[] values, EliasFanoDecoder efd, long m) { // test backing to multiples of m - if (Debugging.AssertsEnabled) Debugging.Assert(() => m > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(m > 0); efd.ToAfterSequence(); int index = values.Length - 1; if (index < 0) @@ -135,7 +135,7 @@ private static void TstDecodeBackToMultiples(long[] values, EliasFanoDecoder efd while (index >= 0) { expValue = values[index]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => mm < previousValue); + if (Debugging.AssertsEnabled) Debugging.Assert(mm < previousValue); if (expValue <= mm) { long backValue_ = efd.BackToValue(mm); diff --git a/src/Lucene.Net.Tests/Util/Test2BPagedBytes.cs b/src/Lucene.Net.Tests/Util/Test2BPagedBytes.cs index 8fad1f744e..9c044cca79 100644 --- a/src/Lucene.Net.Tests/Util/Test2BPagedBytes.cs +++ b/src/Lucene.Net.Tests/Util/Test2BPagedBytes.cs @@ -57,7 +57,7 @@ public virtual void Test() r2.NextBytes(bytes); dataOutput.WriteBytes(bytes, bytes.Length); long fp = dataOutput.GetFilePointer(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fp == lastFP + numBytes); + if (Debugging.AssertsEnabled) Debugging.Assert(fp == lastFP + numBytes); lastFP = fp; netBytes += numBytes; } diff --git a/src/Lucene.Net.Tests/Util/TestWAH8DocIdSet.cs b/src/Lucene.Net.Tests/Util/TestWAH8DocIdSet.cs index 3c272c7f71..a71ef90226 100644 --- a/src/Lucene.Net.Tests/Util/TestWAH8DocIdSet.cs +++ b/src/Lucene.Net.Tests/Util/TestWAH8DocIdSet.cs @@ -94,7 +94,7 @@ public virtual void TestUnion() /// Create a random set which has of its bits set. protected static OpenBitSet RandomOpenSet(int numBits, int numBitsSet) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => numBitsSet <= numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(numBitsSet <= numBits); OpenBitSet set = new OpenBitSet(numBits); Random random = Random; if (numBitsSet == numBits) diff --git a/src/Lucene.Net/Analysis/NumericTokenStream.cs b/src/Lucene.Net/Analysis/NumericTokenStream.cs index ace2805b0d..67a3671d66 100644 --- a/src/Lucene.Net/Analysis/NumericTokenStream.cs +++ b/src/Lucene.Net/Analysis/NumericTokenStream.cs @@ -178,7 +178,7 @@ public NumericTermAttribute() public void FillBytesRef() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ValueSize == 64 || ValueSize == 32); + if (Debugging.AssertsEnabled) Debugging.Assert(ValueSize == 64 || ValueSize == 32); if (ValueSize == 64) { NumericUtils.Int64ToPrefixCoded(_value, Shift, _bytes); diff --git a/src/Lucene.Net/Analysis/TokenStreamToAutomaton.cs b/src/Lucene.Net/Analysis/TokenStreamToAutomaton.cs index c70f8fee78..c015135e3d 100644 --- a/src/Lucene.Net/Analysis/TokenStreamToAutomaton.cs +++ b/src/Lucene.Net/Analysis/TokenStreamToAutomaton.cs @@ -155,7 +155,7 @@ public virtual Automaton ToAutomaton(TokenStream @in) { posInc = 1; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => pos > -1 || posInc > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(pos > -1 || posInc > 0); if (posInc > 0) { @@ -163,7 +163,7 @@ public virtual Automaton ToAutomaton(TokenStream @in) pos += posInc; posData = positions.Get(pos); - if (Debugging.AssertsEnabled) Debugging.Assert(() => posData.leaving == null); + if (Debugging.AssertsEnabled) Debugging.Assert(posData.leaving == null); if (posData.arriving == null) { diff --git a/src/Lucene.Net/Analysis/Tokenizer.cs b/src/Lucene.Net/Analysis/Tokenizer.cs index 36acdea6a8..249b60f664 100644 --- a/src/Lucene.Net/Analysis/Tokenizer.cs +++ b/src/Lucene.Net/Analysis/Tokenizer.cs @@ -115,7 +115,7 @@ public void SetReader(TextReader input) throw new InvalidOperationException("TokenStream contract violation: Close() call missing"); } this.inputPending = input; - if (Debugging.AssertsEnabled) Debugging.Assert(SetReaderTestPoint); + if (Debugging.AssertsEnabled) Debugging.Assert(SetReaderTestPoint()); } public override void Reset() diff --git a/src/Lucene.Net/Codecs/BlockTermState.cs b/src/Lucene.Net/Codecs/BlockTermState.cs index 13a8bd7a21..3768b6bdc3 100644 --- a/src/Lucene.Net/Codecs/BlockTermState.cs +++ b/src/Lucene.Net/Codecs/BlockTermState.cs @@ -57,7 +57,7 @@ protected internal BlockTermState() public override void CopyFrom(TermState other) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => other is BlockTermState, () => "can not copy from " + other.GetType().Name); + if (Debugging.AssertsEnabled) Debugging.Assert(other is BlockTermState, () => "can not copy from " + other.GetType().Name); BlockTermState other2 = (BlockTermState)other; base.CopyFrom(other); DocFreq = other2.DocFreq; diff --git a/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs b/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs index a3f1591ba5..5d1b55b21c 100644 --- a/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs +++ b/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs @@ -167,13 +167,13 @@ public BlockTreeTermsReader(Directory dir, FieldInfos fieldInfos, SegmentInfo in { int field = @in.ReadVInt32(); long numTerms = @in.ReadVInt64(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => numTerms >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(numTerms >= 0); int numBytes = @in.ReadVInt32(); BytesRef rootCode = new BytesRef(new byte[numBytes]); @in.ReadBytes(rootCode.Bytes, 0, numBytes); rootCode.Length = numBytes; FieldInfo fieldInfo = fieldInfos.FieldInfo(field); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldInfo != null, () => "field=" + field); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldInfo != null, () => "field=" + field); long sumTotalTermFreq = fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY ? -1 : @in.ReadVInt64(); long sumDocFreq = @in.ReadVInt64(); int docCount = @in.ReadVInt32(); @@ -291,7 +291,7 @@ public override IEnumerator GetEnumerator() public override Terms GetTerms(string field) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => field != null); + if (Debugging.AssertsEnabled) Debugging.Assert(field != null); FieldReader ret; fields.TryGetValue(field, out ret); return ret; @@ -478,7 +478,7 @@ internal virtual void EndBlock(FieldReader.SegmentTermsEnum.Frame frame) } endBlockCount++; long otherBytes = frame.fpEnd - frame.fp - frame.suffixesReader.Length - frame.statsReader.Length; - if (Debugging.AssertsEnabled) Debugging.Assert(() => otherBytes > 0, () => "otherBytes=" + otherBytes + " frame.fp=" + frame.fp + " frame.fpEnd=" + frame.fpEnd); + if (Debugging.AssertsEnabled) Debugging.Assert(otherBytes > 0, () => "otherBytes=" + otherBytes + " frame.fp=" + frame.fp + " frame.fpEnd=" + frame.fpEnd); TotalBlockOtherBytes += otherBytes; } @@ -491,9 +491,9 @@ internal virtual void Finish() { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => startBlockCount == endBlockCount, () => "startBlockCount=" + startBlockCount + " endBlockCount=" + endBlockCount); - Debugging.Assert(() => TotalBlockCount == FloorSubBlockCount + NonFloorBlockCount, () => "floorSubBlockCount=" + FloorSubBlockCount + " nonFloorBlockCount=" + NonFloorBlockCount + " totalBlockCount=" + TotalBlockCount); - Debugging.Assert(() => TotalBlockCount == MixedBlockCount + TermsOnlyBlockCount + SubBlocksOnlyBlockCount, () => "totalBlockCount=" + TotalBlockCount + " mixedBlockCount=" + MixedBlockCount + " subBlocksOnlyBlockCount=" + SubBlocksOnlyBlockCount + " termsOnlyBlockCount=" + TermsOnlyBlockCount); + Debugging.Assert(startBlockCount == endBlockCount, () => "startBlockCount=" + startBlockCount + " endBlockCount=" + endBlockCount); + Debugging.Assert(TotalBlockCount == FloorSubBlockCount + NonFloorBlockCount, () => "floorSubBlockCount=" + FloorSubBlockCount + " nonFloorBlockCount=" + NonFloorBlockCount + " totalBlockCount=" + TotalBlockCount); + Debugging.Assert(TotalBlockCount == MixedBlockCount + TermsOnlyBlockCount + SubBlocksOnlyBlockCount, () => "totalBlockCount=" + TotalBlockCount + " mixedBlockCount=" + MixedBlockCount + " subBlocksOnlyBlockCount=" + SubBlocksOnlyBlockCount + " termsOnlyBlockCount=" + TermsOnlyBlockCount); } } @@ -532,7 +532,7 @@ public override string ToString() @out.AppendLine(" " + prefix.ToString().PadLeft(2, ' ') + ": " + blockCount); } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => TotalBlockCount == total); + if (Debugging.AssertsEnabled) Debugging.Assert(TotalBlockCount == total); } return @out.ToString(); } @@ -563,7 +563,7 @@ public sealed class FieldReader : Terms internal FieldReader(BlockTreeTermsReader outerInstance, FieldInfo fieldInfo, long numTerms, BytesRef rootCode, long sumTotalTermFreq, long sumDocFreq, int docCount, long indexStartFP, int longsSize, IndexInput indexIn) { this.outerInstance = outerInstance; - if (Debugging.AssertsEnabled) Debugging.Assert(() => numTerms > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(numTerms > 0); this.fieldInfo = fieldInfo; //DEBUG = BlockTreeTermsReader.DEBUG && fieldInfo.name.Equals("id", StringComparison.Ordinal); this.numTerms = numTerms; @@ -763,7 +763,7 @@ public Frame(BlockTreeTermsReader.FieldReader.IntersectEnum outerInstance, int o internal void LoadNextFloorBlock() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => numFollowFloorBlocks > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(numFollowFloorBlocks > 0); //if (DEBUG) System.out.println(" loadNextFoorBlock trans=" + transitions[transitionIndex]); do @@ -848,7 +848,7 @@ internal void Load(BytesRef frameIndexData) outerInstance.@in.Seek(fp); int code_ = outerInstance.@in.ReadVInt32(); entCount = (int)((uint)code_ >> 1); - if (Debugging.AssertsEnabled) Debugging.Assert(() => entCount > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(entCount > 0); isLastInFloor = (code_ & 1) != 0; // term suffixes: @@ -909,7 +909,7 @@ public bool Next() public bool NextLeaf() { //if (DEBUG) System.out.println(" frame.next ord=" + ord + " nextEnt=" + nextEnt + " entCount=" + entCount); - if (Debugging.AssertsEnabled) Debugging.Assert(() => nextEnt != -1 && nextEnt < entCount, () => "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); + if (Debugging.AssertsEnabled) Debugging.Assert(nextEnt != -1 && nextEnt < entCount, () => "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); nextEnt++; suffix = suffixesReader.ReadVInt32(); startBytePos = suffixesReader.Position; @@ -920,7 +920,7 @@ public bool NextLeaf() public bool NextNonLeaf() { //if (DEBUG) System.out.println(" frame.next ord=" + ord + " nextEnt=" + nextEnt + " entCount=" + entCount); - if (Debugging.AssertsEnabled) Debugging.Assert(() => nextEnt != -1 && nextEnt < entCount, () => "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); + if (Debugging.AssertsEnabled) Debugging.Assert(nextEnt != -1 && nextEnt < entCount, () => "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); nextEnt++; int code = suffixesReader.ReadVInt32(); suffix = (int)((uint)code >> 1); @@ -947,7 +947,7 @@ public void DecodeMetaData() // lazily catch up on metadata decode: int limit = TermBlockOrd; bool absolute = metaDataUpto == 0; - if (Debugging.AssertsEnabled) Debugging.Assert(() => limit > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(limit > 0); // TODO: better API would be "jump straight to term=N"??? while (metaDataUpto < limit) @@ -1024,7 +1024,7 @@ public IntersectEnum(BlockTreeTermsReader.FieldReader outerInstance, CompiledAut FST.Arc arc = outerInstance.index.GetFirstArc(arcs[0]); // Empty string prefix must have an output in the index! - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.IsFinal); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.IsFinal); // Special pushFrame since it's the first one: Frame f = stack[0]; @@ -1036,7 +1036,7 @@ public IntersectEnum(BlockTreeTermsReader.FieldReader outerInstance, CompiledAut f.Load(outerInstance.rootCode); // for assert: - if (Debugging.AssertsEnabled) Debugging.Assert(() => SetSavedStartTerm(startTerm)); + if (Debugging.AssertsEnabled) Debugging.Assert(SetSavedStartTerm(startTerm)); currentFrame = f; if (startTerm != null) @@ -1070,7 +1070,7 @@ private Frame GetFrame(int ord) } stack = next; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => stack[ord].ord == ord); + if (Debugging.AssertsEnabled) Debugging.Assert(stack[ord].ord == ord); return stack[ord]; } @@ -1104,7 +1104,7 @@ private Frame PushFrame(int state) // possible: FST.Arc arc = currentFrame.arc; int idx = currentFrame.prefix; - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentFrame.suffix > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(currentFrame.suffix > 0); BytesRef output = currentFrame.outputPrefix; while (idx < f.prefix) { @@ -1113,14 +1113,14 @@ private Frame PushFrame(int state) // case by using current arc as starting point, // passed to findTargetArc arc = outerInstance.index.FindTargetArc(target, arc, GetArc(1 + idx), fstReader); - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc != null); + if (Debugging.AssertsEnabled) Debugging.Assert(arc != null); output = outerInstance.outerInstance.fstOutputs.Add(output, arc.Output); idx++; } f.arc = arc; f.outputPrefix = output; - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.IsFinal); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.IsFinal); f.Load(outerInstance.outerInstance.fstOutputs.Add(output, arc.NextFinalOutput)); return f; } @@ -1171,7 +1171,7 @@ private int GetState() for (int idx = 0; idx < currentFrame.suffix; idx++) { state = runAutomaton.Step(state, currentFrame.suffixBytes[currentFrame.startBytePos + idx] & 0xff); - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(state != -1); } return state; } @@ -1183,13 +1183,13 @@ private int GetState() private void SeekToStartTerm(BytesRef target) { //if (DEBUG) System.out.println("seek to startTerm=" + target.utf8ToString()); - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentFrame.ord == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(currentFrame.ord == 0); if (term.Length < target.Length) { term.Bytes = ArrayUtil.Grow(term.Bytes, target.Length); } FST.Arc arc = arcs[0]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc == currentFrame.arc); + if (Debugging.AssertsEnabled) Debugging.Assert(arc == currentFrame.arc); for (int idx = 0; idx <= target.Length; idx++) { @@ -1267,7 +1267,7 @@ private void SeekToStartTerm(BytesRef target) } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(false); } public override BytesRef Next() @@ -1297,7 +1297,7 @@ public override BytesRef Next() } long lastFP = currentFrame.fpOrig; currentFrame = stack[currentFrame.ord - 1]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentFrame.lastSubFP == lastFP); + if (Debugging.AssertsEnabled) Debugging.Assert(currentFrame.lastSubFP == lastFP); //if (DEBUG) System.out.println("\n frame ord=" + currentFrame.ord + " prefix=" + brToString(new BytesRef(term.bytes, term.offset, currentFrame.prefix)) + " state=" + currentFrame.state + " lastInFloor?=" + currentFrame.isLastInFloor + " fp=" + currentFrame.fp + " trans=" + (currentFrame.transitions.length == 0 ? "n/a" : currentFrame.transitions[currentFrame.transitionIndex]) + " outputPrefix=" + currentFrame.outputPrefix); } } @@ -1351,7 +1351,7 @@ public override BytesRef Next() byte[] commonSuffixBytes = compiledAutomaton.CommonSuffixRef.Bytes; int lenInPrefix = compiledAutomaton.CommonSuffixRef.Length - currentFrame.suffix; - if (Debugging.AssertsEnabled) Debugging.Assert(() => compiledAutomaton.CommonSuffixRef.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(compiledAutomaton.CommonSuffixRef.Offset == 0); int suffixBytesPos; int commonSuffixBytesPos = 0; @@ -1362,7 +1362,7 @@ public override BytesRef Next() // test whether the prefix part matches: byte[] termBytes = term.Bytes; int termBytesPos = currentFrame.prefix - lenInPrefix; - if (Debugging.AssertsEnabled) Debugging.Assert(() => termBytesPos >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(termBytesPos >= 0); int termBytesPosEnd = currentFrame.prefix; while (termBytesPos < termBytesPosEnd) { @@ -1430,7 +1430,7 @@ public override BytesRef Next() { CopyTerm(); //if (DEBUG) System.out.println(" term match to state=" + state + "; return term=" + brToString(term)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => savedStartTerm == null || term.CompareTo(savedStartTerm) > 0, () => "saveStartTerm=" + savedStartTerm.Utf8ToString() + " term=" + term.Utf8ToString()); + if (Debugging.AssertsEnabled) Debugging.Assert(savedStartTerm == null || term.CompareTo(savedStartTerm) > 0, () => "saveStartTerm=" + savedStartTerm.Utf8ToString() + " term=" + term.Utf8ToString()); return term; } else @@ -1534,7 +1534,7 @@ public SegmentTermsEnum(BlockTreeTermsReader.FieldReader outerInstance) { arc = outerInstance.index.GetFirstArc(arcs[0]); // Empty string prefix must have an output in the index! - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.IsFinal); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.IsFinal); } else { @@ -1582,7 +1582,7 @@ public Stats ComputeBlockStats() { arc = outerInstance.index.GetFirstArc(arcs[0]); // Empty string prefix must have an output in the index! - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.IsFinal); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.IsFinal); } else { @@ -1617,7 +1617,7 @@ public Stats ComputeBlockStats() } long lastFP = currentFrame.fpOrig; currentFrame = stack[currentFrame.ord - 1]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => lastFP == currentFrame.lastSubFP); + if (Debugging.AssertsEnabled) Debugging.Assert(lastFP == currentFrame.lastSubFP); // if (DEBUG) { // System.out.println(" reset validIndexPrefix=" + validIndexPrefix); // } @@ -1657,7 +1657,7 @@ public Stats ComputeBlockStats() { arc = outerInstance.index.GetFirstArc(arcs[0]); // Empty string prefix must have an output in the index! - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.IsFinal); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.IsFinal); } else { @@ -1684,7 +1684,7 @@ private Frame GetFrame(int ord) } stack = next; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => stack[ord].ord == ord); + if (Debugging.AssertsEnabled) Debugging.Assert(stack[ord].ord == ord); return stack[ord]; } @@ -1743,7 +1743,7 @@ internal Frame PushFrame(FST.Arc arc, long fp, int length) // System.out.println(" skip rewind!"); // } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => length == f.prefix); + if (Debugging.AssertsEnabled) Debugging.Assert(length == f.prefix); } else { @@ -1789,7 +1789,7 @@ public override bool SeekExact(BytesRef target) term.Bytes = ArrayUtil.Grow(term.Bytes, 1 + target.Length); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => ClearEOF()); + if (Debugging.AssertsEnabled) Debugging.Assert(ClearEOF()); FST.Arc arc; int targetUpto; @@ -1811,12 +1811,12 @@ public override bool SeekExact(BytesRef target) // } arc = arcs[0]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.IsFinal); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.IsFinal); output = arc.Output; targetUpto = 0; Frame lastFrame = stack[0]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => validIndexPrefix <= term.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(validIndexPrefix <= term.Length); int targetLimit = Math.Min(target.Length, validIndexPrefix); @@ -1840,7 +1840,7 @@ public override bool SeekExact(BytesRef target) //if (arc.label != (target.bytes[target.offset + targetUpto] & 0xFF)) { //System.out.println("FAIL: arc.label=" + (char) arc.label + " targetLabel=" + (char) (target.bytes[target.offset + targetUpto] & 0xFF)); //} - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.Label == (target.Bytes[target.Offset + targetUpto] & 0xFF), () => "arc.label=" + (char)arc.Label + " targetLabel=" + (char)(target.Bytes[target.Offset + targetUpto] & 0xFF)); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.Label == (target.Bytes[target.Offset + targetUpto] & 0xFF), () => "arc.label=" + (char)arc.Label + " targetLabel=" + (char)(target.Bytes[target.Offset + targetUpto] & 0xFF)); if (arc.Output != outerInstance.outerInstance.NO_OUTPUT) { output = outerInstance.outerInstance.fstOutputs.Add(output, arc.Output); @@ -1907,7 +1907,7 @@ public override bool SeekExact(BytesRef target) else { // Target is exactly the same as current term - if (Debugging.AssertsEnabled) Debugging.Assert(() => term.Length == target.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(term.Length == target.Length); if (termExists) { // if (DEBUG) { @@ -1934,8 +1934,8 @@ public override bool SeekExact(BytesRef target) // Empty string prefix must have an output (block) in the index! if (Debugging.AssertsEnabled) { - Debugging.Assert(() => arc.IsFinal); - Debugging.Assert(() => arc.Output != null); + Debugging.Assert(arc.IsFinal); + Debugging.Assert(arc.Output != null); } // if (DEBUG) { @@ -2008,7 +2008,7 @@ public override bool SeekExact(BytesRef target) arc = nextArc; term.Bytes[targetUpto] = (byte)targetLabel; // Aggregate output as we go: - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.Output != null); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.Output != null); if (arc.Output != outerInstance.outerInstance.NO_OUTPUT) { output = outerInstance.outerInstance.fstOutputs.Add(output, arc.Output); @@ -2076,7 +2076,7 @@ public override SeekStatus SeekCeil(BytesRef target) term.Bytes = ArrayUtil.Grow(term.Bytes, 1 + target.Length); } - if (Debugging.AssertsEnabled) Debugging.Assert(ClearEOF); + if (Debugging.AssertsEnabled) Debugging.Assert(ClearEOF()); //if (DEBUG) { //System.out.println("\nBTTR.seekCeil seg=" + segment + " target=" + fieldInfo.name + ":" + target.utf8ToString() + " " + target + " current=" + brToString(term) + " (exists?=" + termExists + ") validIndexPrefix= " + validIndexPrefix); @@ -2103,12 +2103,12 @@ public override SeekStatus SeekCeil(BytesRef target) //} arc = arcs[0]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.IsFinal); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.IsFinal); output = arc.Output; targetUpto = 0; Frame lastFrame = stack[0]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => validIndexPrefix <= term.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(validIndexPrefix <= term.Length); int targetLimit = Math.Min(target.Length, validIndexPrefix); @@ -2129,7 +2129,7 @@ public override SeekStatus SeekCeil(BytesRef target) break; } arc = arcs[1 + targetUpto]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.Label == (target.Bytes[target.Offset + targetUpto] & 0xFF), () => "arc.label=" + (char)arc.Label + " targetLabel=" + (char)(target.Bytes[target.Offset + targetUpto] & 0xFF)); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.Label == (target.Bytes[target.Offset + targetUpto] & 0xFF), () => "arc.label=" + (char)arc.Label + " targetLabel=" + (char)(target.Bytes[target.Offset + targetUpto] & 0xFF)); // TOOD: we could save the outputs in local // byte[][] instead of making new objs ever // seek; but, often the FST doesn't have any @@ -2198,7 +2198,7 @@ public override SeekStatus SeekCeil(BytesRef target) else { // Target is exactly the same as current term - if (Debugging.AssertsEnabled) Debugging.Assert(() => term.Length == target.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(term.Length == target.Length); if (termExists) { //if (DEBUG) { @@ -2222,8 +2222,8 @@ public override SeekStatus SeekCeil(BytesRef target) // Empty string prefix must have an output (block) in the index! if (Debugging.AssertsEnabled) { - Debugging.Assert(() => arc.IsFinal); - Debugging.Assert(() => arc.Output != null); + Debugging.Assert(arc.IsFinal); + Debugging.Assert(arc.Output != null); } //if (DEBUG) { @@ -2298,7 +2298,7 @@ public override SeekStatus SeekCeil(BytesRef target) term.Bytes[targetUpto] = (byte)targetLabel; arc = nextArc; // Aggregate output as we go: - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.Output != null); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.Output != null); if (arc.Output != outerInstance.outerInstance.NO_OUTPUT) { output = outerInstance.outerInstance.fstOutputs.Add(output, arc.Output); @@ -2431,7 +2431,7 @@ public override BytesRef Next() { arc = outerInstance.index.GetFirstArc(arcs[0]); // Empty string prefix must have an output in the index! - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.IsFinal); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.IsFinal); } else { @@ -2443,7 +2443,7 @@ public override BytesRef Next() targetBeforeCurrentLength = currentFrame.ord; - if (Debugging.AssertsEnabled) Debugging.Assert(() => !eof); + if (Debugging.AssertsEnabled) Debugging.Assert(!eof); //if (DEBUG) { //System.out.println("\nBTTR.next seg=" + segment + " term=" + brToString(term) + " termExists?=" + termExists + " field=" + fieldInfo.name + " termBlockOrd=" + currentFrame.state.termBlockOrd + " validIndexPrefix=" + validIndexPrefix); //printSeekState(); @@ -2459,7 +2459,7 @@ public override BytesRef Next() // works properly: //if (DEBUG) System.out.println(" re-seek to pending term=" + term.utf8ToString() + " " + term); bool result = SeekExact(term); - if (Debugging.AssertsEnabled) Debugging.Assert(() => result); + if (Debugging.AssertsEnabled) Debugging.Assert(result); } // Pop finished blocks @@ -2475,7 +2475,7 @@ public override BytesRef Next() if (currentFrame.ord == 0) { //if (DEBUG) System.out.println(" return null"); - if (Debugging.AssertsEnabled) Debugging.Assert(SetEOF); + if (Debugging.AssertsEnabled) Debugging.Assert(SetEOF()); term.Length = 0; validIndexPrefix = 0; currentFrame.Rewind(); @@ -2529,7 +2529,7 @@ public override BytesRef Term { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !eof); + if (Debugging.AssertsEnabled) Debugging.Assert(!eof); return term; } } @@ -2538,7 +2538,7 @@ public override int DocFreq { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !eof); + if (Debugging.AssertsEnabled) Debugging.Assert(!eof); //if (DEBUG) System.out.println("BTR.docFreq"); currentFrame.DecodeMetaData(); //if (DEBUG) System.out.println(" return " + currentFrame.state.docFreq); @@ -2550,7 +2550,7 @@ public override long TotalTermFreq { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !eof); + if (Debugging.AssertsEnabled) Debugging.Assert(!eof); currentFrame.DecodeMetaData(); return currentFrame.state.TotalTermFreq; } @@ -2558,7 +2558,7 @@ public override long TotalTermFreq public override DocsEnum Docs(IBits skipDocs, DocsEnum reuse, DocsFlags flags) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !eof); + if (Debugging.AssertsEnabled) Debugging.Assert(!eof); //if (DEBUG) { //System.out.println("BTTR.docs seg=" + segment); //} @@ -2577,7 +2577,7 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits skipDocs, DocsAndPos return null; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => !eof); + if (Debugging.AssertsEnabled) Debugging.Assert(!eof); currentFrame.DecodeMetaData(); return outerInstance.outerInstance.postingsReader.DocsAndPositions(outerInstance.fieldInfo, currentFrame.state, skipDocs, reuse, flags); } @@ -2587,15 +2587,15 @@ public override void SeekExact(BytesRef target, TermState otherState) // if (DEBUG) { // System.out.println("BTTR.seekExact termState seg=" + segment + " target=" + target.utf8ToString() + " " + target + " state=" + otherState); // } - if (Debugging.AssertsEnabled) Debugging.Assert(ClearEOF); + if (Debugging.AssertsEnabled) Debugging.Assert(ClearEOF()); if (target.CompareTo(term) != 0 || !termExists) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => otherState != null && otherState is BlockTermState); + if (Debugging.AssertsEnabled) Debugging.Assert(otherState != null && otherState is BlockTermState); currentFrame = staticFrame; currentFrame.state.CopyFrom(otherState); term.CopyBytes(target); currentFrame.metaDataUpto = currentFrame.TermBlockOrd; - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentFrame.metaDataUpto > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(currentFrame.metaDataUpto > 0); validIndexPrefix = 0; } else @@ -2608,7 +2608,7 @@ public override void SeekExact(BytesRef target, TermState otherState) public override TermState GetTermState() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !eof); + if (Debugging.AssertsEnabled) Debugging.Assert(!eof); currentFrame.DecodeMetaData(); TermState ts = (TermState)currentFrame.state.Clone(); //if (DEBUG) System.out.println("BTTR.termState seg=" + segment + " state=" + ts); @@ -2740,7 +2740,7 @@ internal void LoadNextFloorBlock() //if (DEBUG) { //System.out.println(" loadNextFloorBlock fp=" + fp + " fpEnd=" + fpEnd); //} - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc == null || isFloor, () => "arc=" + arc + " isFloor=" + isFloor); + if (Debugging.AssertsEnabled) Debugging.Assert(arc == null || isFloor, () => "arc=" + arc + " isFloor=" + isFloor); fp = fpEnd; nextEnt = -1; LoadBlock(); @@ -2774,9 +2774,9 @@ internal void LoadBlock() outerInstance.@in.Seek(fp); int code = outerInstance.@in.ReadVInt32(); entCount = (int)((uint)code >> 1); - if (Debugging.AssertsEnabled) Debugging.Assert(() => entCount > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(entCount > 0); isLastInFloor = (code & 1) != 0; - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc == null || (isLastInFloor || isFloor)); + if (Debugging.AssertsEnabled) Debugging.Assert(arc == null || (isLastInFloor || isFloor)); // TODO: if suffixes were stored in random-access // array structure, then we could do binary search @@ -2897,7 +2897,7 @@ public bool Next() public bool NextLeaf() { //if (DEBUG) System.out.println(" frame.next ord=" + ord + " nextEnt=" + nextEnt + " entCount=" + entCount); - if (Debugging.AssertsEnabled) Debugging.Assert(() => nextEnt != -1 && nextEnt < entCount, () => "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); + if (Debugging.AssertsEnabled) Debugging.Assert(nextEnt != -1 && nextEnt < entCount, () => "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); nextEnt++; suffix = suffixesReader.ReadVInt32(); startBytePos = suffixesReader.Position; @@ -2915,7 +2915,7 @@ public bool NextLeaf() public bool NextNonLeaf() { //if (DEBUG) System.out.println(" frame.next ord=" + ord + " nextEnt=" + nextEnt + " entCount=" + entCount); - if (Debugging.AssertsEnabled) Debugging.Assert(() => nextEnt != -1 && nextEnt < entCount, () => "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); + if (Debugging.AssertsEnabled) Debugging.Assert(nextEnt != -1 && nextEnt < entCount, () => "nextEnt=" + nextEnt + " entCount=" + entCount + " fp=" + fp); nextEnt++; int code = suffixesReader.ReadVInt32(); suffix = (int)((uint)code >> 1); @@ -2974,7 +2974,7 @@ public void ScanToFloorFrame(BytesRef target) return; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => numFollowFloorBlocks != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(numFollowFloorBlocks != 0); long newFP = fpOrig; while (true) @@ -3034,7 +3034,7 @@ public void DecodeMetaData() // lazily catch up on metadata decode: int limit = TermBlockOrd; bool absolute = metaDataUpto == 0; - if (Debugging.AssertsEnabled) Debugging.Assert(() => limit > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(limit > 0); // TODO: better API would be "jump straight to term=N"??? while (metaDataUpto < limit) @@ -3090,7 +3090,7 @@ private bool PrefixMatches(BytesRef target) /// public void ScanToSubBlock(long subFP) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !isLeafBlock); + if (Debugging.AssertsEnabled) Debugging.Assert(!isLeafBlock); //if (DEBUG) System.out.println(" scanToSubBlock fp=" + fp + " subFP=" + subFP + " entCount=" + entCount + " lastSubFP=" + lastSubFP); //assert nextEnt == 0; if (lastSubFP == subFP) @@ -3098,12 +3098,12 @@ public void ScanToSubBlock(long subFP) //if (DEBUG) System.out.println(" already positioned"); return; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => subFP < fp, () => "fp=" + fp + " subFP=" + subFP); + if (Debugging.AssertsEnabled) Debugging.Assert(subFP < fp, () => "fp=" + fp + " subFP=" + subFP); long targetSubCode = fp - subFP; //if (DEBUG) System.out.println(" targetSubCode=" + targetSubCode); while (true) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => nextEnt < entCount); + if (Debugging.AssertsEnabled) Debugging.Assert(nextEnt < entCount); nextEnt++; int code = suffixesReader.ReadVInt32(); suffixesReader.SkipBytes(isLeafBlock ? code : (int)((uint)code >> 1)); @@ -3142,7 +3142,7 @@ public SeekStatus ScanToTermLeaf(BytesRef target, bool exactOnly) { // if (DEBUG) System.out.println(" scanToTermLeaf: block fp=" + fp + " prefix=" + prefix + " nextEnt=" + nextEnt + " (of " + entCount + ") target=" + brToString(target) + " term=" + brToString(term)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => nextEnt != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(nextEnt != -1); outerInstance.termExists = true; subCode = 0; @@ -3156,7 +3156,7 @@ public SeekStatus ScanToTermLeaf(BytesRef target, bool exactOnly) return SeekStatus.END; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => PrefixMatches(target)); + if (Debugging.AssertsEnabled) Debugging.Assert(PrefixMatches(target)); // Loop over each entry (term or sub-block) in this block: //nextTerm: while(nextEnt < entCount) { @@ -3195,7 +3195,7 @@ public SeekStatus ScanToTermLeaf(BytesRef target, bool exactOnly) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => targetPos == targetLimit); + if (Debugging.AssertsEnabled) Debugging.Assert(targetPos == targetLimit); cmp = termLen - target.Length; stop = true; } @@ -3251,7 +3251,7 @@ public SeekStatus ScanToTermLeaf(BytesRef target, bool exactOnly) // would have followed the index to this // sub-block from the start: - if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.termExists); + if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.termExists); FillTerm(); //if (DEBUG) System.out.println(" found!"); return SeekStatus.FOUND; @@ -3288,7 +3288,7 @@ public SeekStatus ScanToTermNonLeaf(BytesRef target, bool exactOnly) { //if (DEBUG) System.out.println(" scanToTermNonLeaf: block fp=" + fp + " prefix=" + prefix + " nextEnt=" + nextEnt + " (of " + entCount + ") target=" + brToString(target) + " term=" + brToString(term)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => nextEnt != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(nextEnt != -1); if (nextEnt == entCount) { @@ -3300,7 +3300,7 @@ public SeekStatus ScanToTermNonLeaf(BytesRef target, bool exactOnly) return SeekStatus.END; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => PrefixMatches(target)); + if (Debugging.AssertsEnabled) Debugging.Assert(PrefixMatches(target)); // Loop over each entry (term or sub-block) in this block: //nextTerm: while(nextEnt < entCount) { @@ -3350,7 +3350,7 @@ public SeekStatus ScanToTermNonLeaf(BytesRef target, bool exactOnly) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => targetPos == targetLimit); + if (Debugging.AssertsEnabled) Debugging.Assert(targetPos == targetLimit); cmp = termLen - target.Length; stop = true; } @@ -3407,7 +3407,7 @@ public SeekStatus ScanToTermNonLeaf(BytesRef target, bool exactOnly) // would have followed the index to this // sub-block from the start: - if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.termExists); + if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.termExists); FillTerm(); //if (DEBUG) System.out.println(" found!"); return SeekStatus.FOUND; diff --git a/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs b/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs index 7f5065ac29..43b9a9f4ed 100644 --- a/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs +++ b/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs @@ -259,9 +259,9 @@ private class FieldMetaData public FieldMetaData(FieldInfo fieldInfo, BytesRef rootCode, long numTerms, long indexStartFP, long sumTotalTermFreq, long sumDocFreq, int docCount, int longsSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => numTerms > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(numTerms > 0); this.FieldInfo = fieldInfo; - if (Debugging.AssertsEnabled) Debugging.Assert(() => rootCode != null, () => "field=" + fieldInfo.Name + " numTerms=" + numTerms); + if (Debugging.AssertsEnabled) Debugging.Assert(rootCode != null, () => "field=" + fieldInfo.Name + " numTerms=" + numTerms); this.RootCode = rootCode; this.IndexStartFP = indexStartFP; this.NumTerms = numTerms; @@ -368,14 +368,14 @@ public override TermsConsumer AddField(FieldInfo field) { //DEBUG = field.name.Equals("id", StringComparison.Ordinal); //if (DEBUG) System.out.println("\nBTTW.addField seg=" + segment + " field=" + field.name); - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentField == null || currentField.Name.CompareToOrdinal(field.Name) < 0); + if (Debugging.AssertsEnabled) Debugging.Assert(currentField == null || currentField.Name.CompareToOrdinal(field.Name) < 0); currentField = field; return new TermsWriter(this, field); } internal static long EncodeOutput(long fp, bool hasTerms, bool isFloor) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => fp < (1L << 62)); + if (Debugging.AssertsEnabled) Debugging.Assert(fp < (1L << 62)); return (fp << 2) | (uint)(hasTerms ? OUTPUT_FLAG_HAS_TERMS : 0) | (uint)(isFloor ? OUTPUT_FLAG_IS_FLOOR : 0); } @@ -481,10 +481,10 @@ public void CompileIndex(IList floorBlocks, RAMOutputStream scratc // LUCENENET specific - we use a custom wrapper function to display floorBlocks, since // it might contain garbage that cannot be converted into text. if (Debugging.AssertsEnabled) Debugging.Assert( - () => (IsFloor && floorBlocks != null && floorBlocks.Count != 0) || (!IsFloor && floorBlocks == null), + (IsFloor && floorBlocks != null && floorBlocks.Count != 0) || (!IsFloor && floorBlocks == null), () => "isFloor=" + IsFloor + " floorBlocks=" + ToString(floorBlocks)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => scratchBytes.GetFilePointer() == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(scratchBytes.GetFilePointer() == 0); // TODO: try writing the leading vLong in MSB order // (opposite of what Lucene does today), for better @@ -495,12 +495,12 @@ public void CompileIndex(IList floorBlocks, RAMOutputStream scratc scratchBytes.WriteVInt32(floorBlocks.Count); foreach (PendingBlock sub in floorBlocks) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => sub.FloorLeadByte != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(sub.FloorLeadByte != -1); //if (DEBUG) { // System.out.println(" write floorLeadByte=" + Integer.toHexString(sub.floorLeadByte&0xff)); //} scratchBytes.WriteByte((byte)(sbyte)sub.FloorLeadByte); - if (Debugging.AssertsEnabled) Debugging.Assert(() => sub.Fp > Fp); + if (Debugging.AssertsEnabled) Debugging.Assert(sub.Fp > Fp); scratchBytes.WriteVInt64((sub.Fp - Fp) << 1 | (uint)(sub.HasTerms ? 1 : 0)); } } @@ -508,7 +508,7 @@ public void CompileIndex(IList floorBlocks, RAMOutputStream scratc ByteSequenceOutputs outputs = ByteSequenceOutputs.Singleton; Builder indexBuilder = new Builder(FST.INPUT_TYPE.BYTE1, 0, 0, true, false, int.MaxValue, outputs, null, false, PackedInt32s.COMPACT, true, 15); var bytes = new byte[(int)scratchBytes.GetFilePointer()]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => bytes.Length > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(bytes.Length > 0); scratchBytes.WriteTo(bytes, 0); indexBuilder.Add(Util.ToInt32sRef(Prefix, scratchIntsRef), new BytesRef(bytes, 0, bytes.Length)); scratchBytes.Reset(); @@ -729,8 +729,8 @@ internal virtual void WriteBlocks(Int32sRef prevTerm, int prefixLength, int coun // in this block if (Debugging.AssertsEnabled) { - Debugging.Assert(() => lastSuffixLeadLabel == -1); - Debugging.Assert(() => numSubs == 0); + Debugging.Assert(lastSuffixLeadLabel == -1); + Debugging.Assert(numSubs == 0); } suffixLeadLabel = -1; } @@ -742,7 +742,7 @@ internal virtual void WriteBlocks(Int32sRef prevTerm, int prefixLength, int coun else { PendingBlock block = (PendingBlock)ent; - if (Debugging.AssertsEnabled) Debugging.Assert(() => block.Prefix.Length > prefixLength); + if (Debugging.AssertsEnabled) Debugging.Assert(block.Prefix.Length > prefixLength); suffixLeadLabel = block.Prefix.Bytes[block.Prefix.Offset + prefixLength] & 0xff; } @@ -863,7 +863,7 @@ internal virtual void WriteBlocks(Int32sRef prevTerm, int prefixLength, int coun //System.out.println(" = " + pendingCount); pendingCount = 0; - if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.minItemsInBlock == 1 || subCount > 1, () => "minItemsInBlock=" + outerInstance.minItemsInBlock + " subCount=" + subCount + " sub=" + sub + " of " + numSubs + " subTermCount=" + subTermCountSums[sub] + " subSubCount=" + subSubCounts[sub] + " depth=" + prefixLength); + if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.minItemsInBlock == 1 || subCount > 1, () => "minItemsInBlock=" + outerInstance.minItemsInBlock + " subCount=" + subCount + " sub=" + sub + " of " + numSubs + " subTermCount=" + subTermCountSums[sub] + " subSubCount=" + subSubCounts[sub] + " depth=" + prefixLength); subCount = 0; startLabel = subBytes[sub + 1]; @@ -880,8 +880,8 @@ internal virtual void WriteBlocks(Int32sRef prevTerm, int prefixLength, int coun // here if (Debugging.AssertsEnabled) { - Debugging.Assert(() => startLabel != -1); - Debugging.Assert(() => firstBlock != null); + Debugging.Assert(startLabel != -1); + Debugging.Assert(firstBlock != null); } prevTerm.Int32s[prevTerm.Offset + prefixLength] = startLabel; //System.out.println(" final " + (numSubs-sub-1) + " subs"); @@ -902,7 +902,7 @@ internal virtual void WriteBlocks(Int32sRef prevTerm, int prefixLength, int coun prevTerm.Int32s[prevTerm.Offset + prefixLength] = savLabel; - if (Debugging.AssertsEnabled) Debugging.Assert(() => firstBlock != null); + if (Debugging.AssertsEnabled) Debugging.Assert(firstBlock != null); firstBlock.CompileIndex(floorBlocks, outerInstance.scratchBytes); pending.Add(firstBlock); @@ -931,11 +931,11 @@ private string ToString(BytesRef b) // block: private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexPrefixLength, int startBackwards, int length, int futureTermCount, bool isFloor, int floorLeadByte, bool isLastInFloor) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => length > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(length > 0); int start = pending.Count - startBackwards; - if (Debugging.AssertsEnabled) Debugging.Assert(() => start >= 0, () => "pending.Count=" + pending.Count + " startBackwards=" + startBackwards + " length=" + length); + if (Debugging.AssertsEnabled) Debugging.Assert(start >= 0, () => "pending.Count=" + pending.Count + " startBackwards=" + startBackwards + " length=" + length); IList slice = pending.SubList(start, start + length); @@ -994,7 +994,7 @@ private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexP subIndices = null; foreach (PendingEntry ent in slice) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ent.IsTerm); + if (Debugging.AssertsEnabled) Debugging.Assert(ent.IsTerm); PendingTerm term = (PendingTerm)ent; BlockTermState state = term.State; int suffix = term.Term.Length - prefixLength; @@ -1012,7 +1012,7 @@ private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexP statsWriter.WriteVInt32(state.DocFreq); if (fieldInfo.IndexOptions != IndexOptions.DOCS_ONLY) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state.TotalTermFreq >= state.DocFreq, () => state.TotalTermFreq + " vs " + state.DocFreq); + if (Debugging.AssertsEnabled) Debugging.Assert(state.TotalTermFreq >= state.DocFreq, () => state.TotalTermFreq + " vs " + state.DocFreq); statsWriter.WriteVInt64(state.TotalTermFreq - state.DocFreq); } @@ -1020,7 +1020,7 @@ private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexP outerInstance.postingsWriter.EncodeTerm(longs, bytesWriter, fieldInfo, state, absolute); for (int pos = 0; pos < longsSize; pos++) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => longs[pos] >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(longs[pos] >= 0); metaWriter.WriteVInt64(longs[pos]); } bytesWriter.WriteTo(metaWriter); @@ -1055,7 +1055,7 @@ private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexP statsWriter.WriteVInt32(state.DocFreq); if (fieldInfo.IndexOptions != IndexOptions.DOCS_ONLY) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state.TotalTermFreq >= state.DocFreq); + if (Debugging.AssertsEnabled) Debugging.Assert(state.TotalTermFreq >= state.DocFreq); statsWriter.WriteVInt64(state.TotalTermFreq - state.DocFreq); } @@ -1071,7 +1071,7 @@ private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexP outerInstance.postingsWriter.EncodeTerm(longs, bytesWriter, fieldInfo, state, absolute); for (int pos = 0; pos < longsSize; pos++) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => longs[pos] >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(longs[pos] >= 0); metaWriter.WriteVInt64(longs[pos]); } bytesWriter.WriteTo(metaWriter); @@ -1085,13 +1085,13 @@ private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexP PendingBlock block = (PendingBlock)ent; int suffix = block.Prefix.Length - prefixLength; - if (Debugging.AssertsEnabled) Debugging.Assert(() => suffix > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(suffix > 0); // For non-leaf block we borrow 1 bit to record // if entry is term or sub-block suffixWriter.WriteVInt32((suffix << 1) | 1); suffixWriter.WriteBytes(block.Prefix.Bytes, prefixLength, suffix); - if (Debugging.AssertsEnabled) Debugging.Assert(() => block.Fp < startFP); + if (Debugging.AssertsEnabled) Debugging.Assert(block.Fp < startFP); // if (DEBUG) { // BytesRef suffixBytes = new BytesRef(suffix); @@ -1105,7 +1105,7 @@ private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexP } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => subIndices.Count != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(subIndices.Count != 0); } // TODO: we could block-write the term suffix pointers; @@ -1184,7 +1184,7 @@ public override PostingsConsumer StartTerm(BytesRef text) public override void FinishTerm(BytesRef text, TermStats stats) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => stats.DocFreq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(stats.DocFreq > 0); //if (DEBUG) System.out.println("BTTW.finishTerm term=" + fieldInfo.name + ":" + toString(text) + " seg=" + segment + " df=" + stats.docFreq); blockBuilder.Add(Util.ToInt32sRef(text, scratchIntsRef), noOutputs.NoOutput); @@ -1206,12 +1206,12 @@ public override void Finish(long sumTotalTermFreq, long sumDocFreq, int docCount blockBuilder.Finish(); // We better have one final "root" block: - if (Debugging.AssertsEnabled) Debugging.Assert(() => pending.Count == 1 && !pending[0].IsTerm, () => "pending.size()=" + pending.Count + " pending=" + pending); + if (Debugging.AssertsEnabled) Debugging.Assert(pending.Count == 1 && !pending[0].IsTerm, () => "pending.size()=" + pending.Count + " pending=" + pending); PendingBlock root = (PendingBlock)pending[0]; if (Debugging.AssertsEnabled) { - Debugging.Assert(() => root.Prefix.Length == 0); - Debugging.Assert(() => root.Index.EmptyOutput != null); + Debugging.Assert(root.Prefix.Length == 0); + Debugging.Assert(root.Index.EmptyOutput != null); } this.sumTotalTermFreq = sumTotalTermFreq; @@ -1237,9 +1237,9 @@ public override void Finish(long sumTotalTermFreq, long sumDocFreq, int docCount { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => sumTotalTermFreq == 0 || fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY && sumTotalTermFreq == -1); - Debugging.Assert(() => sumDocFreq == 0); - Debugging.Assert(() => docCount == 0); + Debugging.Assert(sumTotalTermFreq == 0 || fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY && sumTotalTermFreq == -1); + Debugging.Assert(sumDocFreq == 0); + Debugging.Assert(docCount == 0); } } } diff --git a/src/Lucene.Net/Codecs/CodecUtil.cs b/src/Lucene.Net/Codecs/CodecUtil.cs index 66980d925d..f78b5b9d6b 100644 --- a/src/Lucene.Net/Codecs/CodecUtil.cs +++ b/src/Lucene.Net/Codecs/CodecUtil.cs @@ -268,7 +268,7 @@ public static long ChecksumEntireFile(IndexInput input) IndexInput clone = (IndexInput)input.Clone(); clone.Seek(0); ChecksumIndexInput @in = new BufferedChecksumIndexInput(clone); - if (Debugging.AssertsEnabled) Debugging.Assert(() => @in.GetFilePointer() == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(@in.GetFilePointer() == 0); @in.Seek(@in.Length - FooterLength()); return CheckFooter(@in); } diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs index a87b9187d3..21b7f2511a 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs @@ -106,7 +106,7 @@ private void Reset() private void WriteBlock() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => blockChunks > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(blockChunks > 0); fieldsIndexOut.WriteVInt32(blockChunks); // The trick here is that we only store the difference from the average start @@ -144,7 +144,7 @@ private void WriteBlock() for (int i = 0; i < blockChunks; ++i) { long delta = docBase - avgChunkDocs * i; - if (Debugging.AssertsEnabled) Debugging.Assert(() => PackedInt32s.BitsRequired(MoveSignToLowOrderBit(delta)) <= writer.BitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(PackedInt32s.BitsRequired(MoveSignToLowOrderBit(delta)) <= writer.BitsPerValue); writer.Add(MoveSignToLowOrderBit(delta)); docBase += docBaseDeltas[i]; } @@ -179,7 +179,7 @@ private void WriteBlock() { startPointer += startPointerDeltas[i]; long delta = startPointer - avgChunkSize * i; - if (Debugging.AssertsEnabled) Debugging.Assert(() => PackedInt32s.BitsRequired(MoveSignToLowOrderBit(delta)) <= writer.BitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(PackedInt32s.BitsRequired(MoveSignToLowOrderBit(delta)) <= writer.BitsPerValue); writer.Add(MoveSignToLowOrderBit(delta)); } writer.Finish(); @@ -197,7 +197,7 @@ internal void WriteIndex(int numDocs, long startPointer) { firstStartPointer = maxStartPointer = startPointer; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => firstStartPointer > 0 && startPointer >= firstStartPointer); + if (Debugging.AssertsEnabled) Debugging.Assert(firstStartPointer > 0 && startPointer >= firstStartPointer); docBaseDeltas[blockChunks] = numDocs; startPointerDeltas[blockChunks] = startPointer - maxStartPointer; diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs index b741c66f18..e657a6f1ad 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs @@ -101,7 +101,7 @@ public CompressingStoredFieldsReader(Directory d, SegmentInfo si, string segment indexStream = d.OpenChecksumInput(indexStreamFN, context); string codecNameIdx = formatName + CompressingStoredFieldsWriter.CODEC_SFX_IDX; version = CodecUtil.CheckHeader(indexStream, codecNameIdx, CompressingStoredFieldsWriter.VERSION_START, CompressingStoredFieldsWriter.VERSION_CURRENT); - if (Debugging.AssertsEnabled) Debugging.Assert(() => CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); + if (Debugging.AssertsEnabled) Debugging.Assert(CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); indexReader = new CompressingStoredFieldsIndexReader(indexStream, si); long maxPointer = -1; @@ -140,7 +140,7 @@ public CompressingStoredFieldsReader(Directory d, SegmentInfo si, string segment { throw new CorruptIndexException("Version mismatch between stored fields index and data: " + version + " != " + fieldsVersion); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => CodecUtil.HeaderLength(codecNameDat) == fieldsStream.GetFilePointer()); + if (Debugging.AssertsEnabled) Debugging.Assert(CodecUtil.HeaderLength(codecNameDat) == fieldsStream.GetFilePointer()); if (version >= CompressingStoredFieldsWriter.VERSION_BIG_CHUNKS) { @@ -335,8 +335,8 @@ public override void VisitDocument(int docID, StoredFieldVisitor visitor) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => chunkSize > 0); - Debugging.Assert(() => offset < chunkSize); + Debugging.Assert(chunkSize > 0); + Debugging.Assert(offset < chunkSize); } decompressor.Decompress(fieldsStream, chunkSize, offset, Math.Min(length, chunkSize - offset), bytes); @@ -346,7 +346,7 @@ public override void VisitDocument(int docID, StoredFieldVisitor visitor) { BytesRef bytes = totalLength <= BUFFER_REUSE_THRESHOLD ? this.bytes : new BytesRef(); decompressor.Decompress(fieldsStream, totalLength, offset, length, bytes); - if (Debugging.AssertsEnabled) Debugging.Assert(() => bytes.Length == length); + if (Debugging.AssertsEnabled) Debugging.Assert(bytes.Length == length); documentInput = new ByteArrayDataInput(bytes.Bytes, bytes.Offset, bytes.Length); } @@ -357,7 +357,7 @@ public override void VisitDocument(int docID, StoredFieldVisitor visitor) FieldInfo fieldInfo = fieldInfos.FieldInfo(fieldNumber); int bits = (int)(infoAndBits & CompressingStoredFieldsWriter.TYPE_MASK); - if (Debugging.AssertsEnabled) Debugging.Assert(() => bits <= CompressingStoredFieldsWriter.NUMERIC_DOUBLE, () => "bits=" + bits.ToString("x")); + if (Debugging.AssertsEnabled) Debugging.Assert(bits <= CompressingStoredFieldsWriter.NUMERIC_DOUBLE, () => "bits=" + bits.ToString("x")); switch (visitor.NeedsField(fieldInfo)) { @@ -394,7 +394,7 @@ public DataInputAnonymousInnerClassHelper(CompressingStoredFieldsReader outerIns internal virtual void FillBuffer() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => decompressed <= length); + if (Debugging.AssertsEnabled) Debugging.Assert(decompressed <= length); if (decompressed == length) { throw new Exception(); @@ -492,7 +492,7 @@ internal int ChunkSize() /// internal void Next(int doc) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => doc >= this.docBase + this.chunkDocs, () => doc + " " + this.docBase + " " + this.chunkDocs); + if (Debugging.AssertsEnabled) Debugging.Assert(doc >= this.docBase + this.chunkDocs, () => doc + " " + this.docBase + " " + this.chunkDocs); fieldsStream.Seek(outerInstance.indexReader.GetStartPointer(doc)); int docBase = fieldsStream.ReadVInt32(); @@ -591,7 +591,7 @@ internal void Decompress() /// internal void CopyCompressedData(DataOutput @out) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.Version == CompressingStoredFieldsWriter.VERSION_CURRENT); + if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.Version == CompressingStoredFieldsWriter.VERSION_CURRENT); long chunkEnd = docBase + chunkDocs == outerInstance.numDocs ? outerInstance.maxPointer : outerInstance.indexReader.GetStartPointer(docBase + chunkDocs); @out.CopyBytes(fieldsStream, chunkEnd - fieldsStream.GetFilePointer()); } diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsWriter.cs b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsWriter.cs index 025efa393f..3791f741f7 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsWriter.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsWriter.cs @@ -91,7 +91,7 @@ public sealed class CompressingStoredFieldsWriter : StoredFieldsWriter /// Sole constructor. public CompressingStoredFieldsWriter(Directory directory, SegmentInfo si, string segmentSuffix, IOContext context, string formatName, CompressionMode compressionMode, int chunkSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => directory != null); + if (Debugging.AssertsEnabled) Debugging.Assert(directory != null); this.directory = directory; this.segment = si.Name; this.segmentSuffix = segmentSuffix; @@ -116,8 +116,8 @@ public CompressingStoredFieldsWriter(Directory directory, SegmentInfo si, string CodecUtil.WriteHeader(fieldsStream, codecNameDat, VERSION_CURRENT); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => CodecUtil.HeaderLength(codecNameDat) == fieldsStream.GetFilePointer()); - Debugging.Assert(() => CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); + Debugging.Assert(CodecUtil.HeaderLength(codecNameDat) == fieldsStream.GetFilePointer()); + Debugging.Assert(CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); } indexWriter = new CompressingStoredFieldsIndexWriter(indexStream); @@ -181,7 +181,7 @@ public override void FinishDocument() /// private static void SaveInt32s(int[] values, int length, DataOutput @out) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => length > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(length > 0); if (length == 1) { @out.WriteVInt32(values[0]); @@ -249,7 +249,7 @@ private void Flush() for (int i = numBufferedDocs - 1; i > 0; --i) { lengths[i] = endOffsets[i] - endOffsets[i - 1]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => lengths[i] >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(lengths[i] >= 0); } WriteHeader(docBase, numBufferedDocs, numStoredFields, lengths); @@ -377,7 +377,7 @@ public override void Finish(FieldInfos fis, int numDocs) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => bufferedDocs.Length == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(bufferedDocs.Length == 0); } if (docBase != numDocs) { @@ -385,7 +385,7 @@ public override void Finish(FieldInfos fis, int numDocs) } indexWriter.Finish(numDocs, fieldsStream.GetFilePointer()); CodecUtil.WriteFooter(fieldsStream); - if (Debugging.AssertsEnabled) Debugging.Assert(() => bufferedDocs.Length == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(bufferedDocs.Length == 0); } [MethodImpl(MethodImplOptions.NoInlining)] @@ -446,7 +446,7 @@ public override int Merge(MergeState mergeState) if (numBufferedDocs == 0 && startOffsets[it.chunkDocs - 1] < chunkSize && startOffsets[it.chunkDocs - 1] + it.lengths[it.chunkDocs - 1] >= chunkSize && NextDeletedDoc(it.docBase, liveDocs, it.docBase + it.chunkDocs) == it.docBase + it.chunkDocs) // no deletion in the chunk - chunk is large enough - chunk is small enough - starting a new chunk { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docID == it.docBase); + if (Debugging.AssertsEnabled) Debugging.Assert(docID == it.docBase); // no need to decompress, just copy data indexWriter.WriteIndex(it.chunkDocs, fieldsStream.GetFilePointer()); diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs index b1dd11a1d4..bce8eae515 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs @@ -78,7 +78,7 @@ public CompressingTermVectorsReader(Directory d, SegmentInfo si, string segmentS indexStream = d.OpenChecksumInput(indexStreamFN, context); string codecNameIdx = formatName + CompressingTermVectorsWriter.CODEC_SFX_IDX; version = CodecUtil.CheckHeader(indexStream, codecNameIdx, CompressingTermVectorsWriter.VERSION_START, CompressingTermVectorsWriter.VERSION_CURRENT); - if (Debugging.AssertsEnabled) Debugging.Assert(() => CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); + if (Debugging.AssertsEnabled) Debugging.Assert(CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); indexReader = new CompressingStoredFieldsIndexReader(indexStream, si); if (version >= CompressingTermVectorsWriter.VERSION_CHECKSUM) @@ -104,7 +104,7 @@ public CompressingTermVectorsReader(Directory d, SegmentInfo si, string segmentS { throw new Exception("Version mismatch between stored fields index and data: " + version + " != " + version2); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => CodecUtil.HeaderLength(codecNameDat) == vectorsStream.GetFilePointer()); + if (Debugging.AssertsEnabled) Debugging.Assert(CodecUtil.HeaderLength(codecNameDat) == vectorsStream.GetFilePointer()); packedIntsVersion = vectorsStream.ReadVInt32(); chunkSize = vectorsStream.ReadVInt32(); @@ -216,7 +216,7 @@ public override Fields Get(int doc) int[] fieldNums; { int token = vectorsStream.ReadByte() & 0xFF; - if (Debugging.AssertsEnabled) Debugging.Assert(() => token != 0); // means no term vectors, cannot happen since we checked for numFields == 0 + if (Debugging.AssertsEnabled) Debugging.Assert(token != 0); // means no term vectors, cannot happen since we checked for numFields == 0 int bitsPerFieldNum = token & 0x1F; int totalDistinctFields = (int)((uint)token >> 5); if (totalDistinctFields == 0x07) @@ -246,7 +246,7 @@ public override Fields Get(int doc) for (int i = 0; i < totalFields; ++i) { int fieldNumOff = (int)allFieldNumOffs.Get(i); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldNumOff >= 0 && fieldNumOff < fieldNums.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldNumOff >= 0 && fieldNumOff < fieldNums.Length); int fgs = (int)fieldFlags.Get(fieldNumOff); f.Set(i, fgs); } @@ -383,7 +383,7 @@ public override Fields Get(int doc) totalPayloads += freq; } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => i != totalFields - 1 || termIndex == totalTerms, () => termIndex + " " + totalTerms); + if (Debugging.AssertsEnabled) Debugging.Assert(i != totalFields - 1 || termIndex == totalTerms, () => termIndex + " " + totalTerms); } int[][] positionIndex = PositionIndex(skip, numFields, numTerms, termFreqs); @@ -516,7 +516,7 @@ public override Fields Get(int doc) ++posIdx; } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => posIdx == totalFreq); + if (Debugging.AssertsEnabled) Debugging.Assert(posIdx == totalFreq); } termIndex += termCount; } @@ -538,7 +538,7 @@ public override Fields Get(int doc) } termIndex += termCount; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => termIndex == totalTerms, () => termIndex + " " + totalTerms); + if (Debugging.AssertsEnabled) Debugging.Assert(termIndex == totalTerms, () => termIndex + " " + totalTerms); } // decompress data @@ -577,7 +577,7 @@ public override Fields Get(int doc) } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => Sum(fieldLengths) == docLen, () => Sum(fieldLengths) + " != " + docLen); + if (Debugging.AssertsEnabled) Debugging.Assert(Sum(fieldLengths) == docLen, () => Sum(fieldLengths) + " != " + docLen); return new TVFields(this, fieldNums, FieldFlags, fieldNumOffs, fieldNumTerms, fieldLengths, prefixLengths, suffixLengths, fieldTermFreqs, positionIndex, positions, startOffsets, lengths, payloadBytes, payloadIndex, suffixBytes); } @@ -732,7 +732,7 @@ public override Terms GetTerms(string field) break; } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldLen >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldLen >= 0); return new TVTerms(outerInstance, numTerms[idx], fieldFlags[idx], prefixLengths[idx], suffixLengths[idx], termFreqs[idx], positionIndex[idx], positions[idx], startOffsets[idx], lengths[idx], payloadIndex[idx], payloadBytes, new BytesRef(suffixBytes.Bytes, suffixBytes.Offset + fieldOff, fieldLen)); } @@ -843,7 +843,7 @@ public override BytesRef Next() } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ord < numTerms); + if (Debugging.AssertsEnabled) Debugging.Assert(ord < numTerms); ++ord; } diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs index 04ea6cf52c..43d8acb46e 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs @@ -245,7 +245,7 @@ internal virtual void AddPosition(int position, int startOffset, int length, int /// Sole constructor. public CompressingTermVectorsWriter(Directory directory, SegmentInfo si, string segmentSuffix, IOContext context, string formatName, CompressionMode compressionMode, int chunkSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => directory != null); + if (Debugging.AssertsEnabled) Debugging.Assert(directory != null); this.directory = directory; this.segment = si.Name; this.segmentSuffix = segmentSuffix; @@ -271,8 +271,8 @@ public CompressingTermVectorsWriter(Directory directory, SegmentInfo si, string CodecUtil.WriteHeader(vectorsStream, codecNameDat, VERSION_CURRENT); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => CodecUtil.HeaderLength(codecNameDat) == vectorsStream.GetFilePointer()); - Debugging.Assert(() => CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); + Debugging.Assert(CodecUtil.HeaderLength(codecNameDat) == vectorsStream.GetFilePointer()); + Debugging.Assert(CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer()); } indexWriter = new CompressingStoredFieldsIndexWriter(indexStream); @@ -354,7 +354,7 @@ public override void FinishField() public override void StartTerm(BytesRef term, int freq) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => freq >= 1); + if (Debugging.AssertsEnabled) Debugging.Assert(freq >= 1); int prefix = StringHelper.BytesDifference(lastTerm, term); curField.AddTerm(freq, prefix, term.Length - prefix); termSuffixes.WriteBytes(term.Bytes, term.Offset + prefix, term.Length - prefix); @@ -370,7 +370,7 @@ public override void StartTerm(BytesRef term, int freq) public override void AddPosition(int position, int startOffset, int endOffset, BytesRef payload) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => curField.flags != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(curField.flags != 0); curField.AddPosition(position, startOffset, endOffset - startOffset, payload == null ? 0 : payload.Length); if (curField.hasPayloads && payload != null) { @@ -387,7 +387,7 @@ private bool TriggerFlush() private void Flush() { int chunkDocs = pendingDocs.Count; - if (Debugging.AssertsEnabled) Debugging.Assert(() => chunkDocs > 0, chunkDocs.ToString); + if (Debugging.AssertsEnabled) Debugging.Assert(chunkDocs > 0, chunkDocs.ToString); // write the index file indexWriter.WriteIndex(chunkDocs, vectorsStream.GetFilePointer()); @@ -467,7 +467,7 @@ private int[] FlushFieldNums() } int numDistinctFields = fieldNums.Count; - if (Debugging.AssertsEnabled) Debugging.Assert(() => numDistinctFields > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(numDistinctFields > 0); int bitsRequired = PackedInt32s.BitsRequired(fieldNums.Max); int token = (Math.Min(numDistinctFields - 1, 0x07) << 5) | bitsRequired; vectorsStream.WriteByte((byte)(sbyte)token); @@ -499,7 +499,7 @@ private void FlushFields(int totalFields, int[] fieldNums) foreach (FieldData fd in dd.fields) { int fieldNumIndex = Array.BinarySearch(fieldNums, fd.fieldNum); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldNumIndex >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldNumIndex >= 0); writer.Add(fieldNumIndex); } } @@ -519,7 +519,7 @@ private void FlushFlags(int totalFields, int[] fieldNums) foreach (FieldData fd in dd.fields) { int fieldNumOff = Array.BinarySearch(fieldNums, fd.fieldNum); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldNumOff >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldNumOff >= 0); if (fieldFlags[fieldNumOff] == -1) { fieldFlags[fieldNumOff] = fd.flags; @@ -541,10 +541,10 @@ private void FlushFlags(int totalFields, int[] fieldNums) PackedInt32s.Writer writer = PackedInt32s.GetWriterNoHeader(vectorsStream, PackedInt32s.Format.PACKED, fieldFlags.Length, FLAGS_BITS, 1); foreach (int flags in fieldFlags) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => flags >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(flags >= 0); writer.Add(flags); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => writer.Ord == fieldFlags.Length - 1); + if (Debugging.AssertsEnabled) Debugging.Assert(writer.Ord == fieldFlags.Length - 1); writer.Finish(); } else @@ -559,7 +559,7 @@ private void FlushFlags(int totalFields, int[] fieldNums) writer.Add(fd.flags); } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => writer.Ord == totalFields - 1); + if (Debugging.AssertsEnabled) Debugging.Assert(writer.Ord == totalFields - 1); writer.Finish(); } } @@ -584,7 +584,7 @@ private void FlushNumTerms(int totalFields) writer.Add(fd.numTerms); } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => writer.Ord == totalFields - 1); + if (Debugging.AssertsEnabled) Debugging.Assert(writer.Ord == totalFields - 1); writer.Finish(); } @@ -652,7 +652,7 @@ private void FlushPositions() previousPosition = position; } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => pos == fd.totalPositions); + if (Debugging.AssertsEnabled) Debugging.Assert(pos == fd.totalPositions); } } } @@ -688,7 +688,7 @@ private void FlushOffsets(int[] fieldNums) ++pos; } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => pos == fd.totalPositions); + if (Debugging.AssertsEnabled) Debugging.Assert(pos == fd.totalPositions); } } } @@ -756,7 +756,7 @@ private void FlushOffsets(int[] fieldNums) writer.Add(lengthsBuf[fd.offStart + pos++] - fd.prefixLengths[i] - fd.suffixLengths[i]); } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => pos == fd.totalPositions); + if (Debugging.AssertsEnabled) Debugging.Assert(pos == fd.totalPositions); } } } @@ -802,8 +802,8 @@ public override void AddProx(int numProx, DataInput positions, DataInput offsets { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => (curField.hasPositions) == (positions != null)); - Debugging.Assert(() => (curField.hasOffsets) == (offsets != null)); + Debugging.Assert((curField.hasPositions) == (positions != null)); + Debugging.Assert((curField.hasOffsets) == (offsets != null)); } if (curField.hasPositions) @@ -926,7 +926,7 @@ public override int Merge(MergeState mergeState) { int docBase = vectorsStream.ReadVInt32(); int chunkDocs = vectorsStream.ReadVInt32(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => docBase + chunkDocs <= matchingSegmentReader.MaxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(docBase + chunkDocs <= matchingSegmentReader.MaxDoc); if (docBase + chunkDocs < matchingSegmentReader.MaxDoc && NextDeletedDoc(docBase, liveDocs, docBase + chunkDocs) == docBase + chunkDocs) { long chunkEnd = index.GetStartPointer(docBase + chunkDocs); diff --git a/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs b/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs index 0801ce3100..e216d9acc7 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs @@ -152,7 +152,7 @@ public DecompressorAnonymousInnerClassHelper() public override void Decompress(DataInput @in, int originalLength, int offset, int length, BytesRef bytes) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => offset + length <= originalLength); + if (Debugging.AssertsEnabled) Debugging.Assert(offset + length <= originalLength); // add 7 padding bytes, this is not necessary but can help decompression run faster if (bytes.Bytes.Length < originalLength + 7) { @@ -212,7 +212,7 @@ internal DeflateDecompressor() public override void Decompress(DataInput input, int originalLength, int offset, int length, BytesRef bytes) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => offset + length <= originalLength); + if (Debugging.AssertsEnabled) Debugging.Assert(offset + length <= originalLength); if (length == 0) { bytes.Length = 0; @@ -275,7 +275,7 @@ public override void Compress(byte[] bytes, int off, int len, DataOutput output) if (resultArray.Length == 0) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => len == 0, len.ToString); + if (Debugging.AssertsEnabled) Debugging.Assert(len == 0, len.ToString); output.WriteVInt32(0); return; } diff --git a/src/Lucene.Net/Codecs/Compressing/LZ4.cs b/src/Lucene.Net/Codecs/Compressing/LZ4.cs index f48d040325..1889b914e2 100644 --- a/src/Lucene.Net/Codecs/Compressing/LZ4.cs +++ b/src/Lucene.Net/Codecs/Compressing/LZ4.cs @@ -76,7 +76,7 @@ private static bool ReadInt32Equals(byte[] buf, int i, int j) private static int CommonBytes(byte[] b, int o1, int o2, int limit) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => o1 < o2); + if (Debugging.AssertsEnabled) Debugging.Assert(o1 < o2); int count = 0; while (o2 < limit && b[o1++] == b[o2++]) { @@ -135,7 +135,7 @@ public static int Decompress(DataInput compressed, int decompressedLen, byte[] d var byte1 = compressed.ReadByte(); var byte2 = compressed.ReadByte(); int matchDec = (byte1 & 0xFF) | ((byte2 & 0xFF) << 8); - if (Debugging.AssertsEnabled) Debugging.Assert(() => matchDec > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(matchDec > 0); int matchLen = token & 0x0F; if (matchLen == 0x0F) @@ -203,14 +203,14 @@ private static void EncodeLastLiterals(byte[] bytes, int anchor, int literalLen, private static void EncodeSequence(byte[] bytes, int anchor, int matchRef, int matchOff, int matchLen, DataOutput @out) { int literalLen = matchOff - anchor; - if (Debugging.AssertsEnabled) Debugging.Assert(() => matchLen >= 4); + if (Debugging.AssertsEnabled) Debugging.Assert(matchLen >= 4); // encode token int token = (Math.Min(literalLen, 0x0F) << 4) | Math.Min(matchLen - 4, 0x0F); EncodeLiterals(bytes, token, anchor, literalLen, @out); // encode match dec int matchDec = matchOff - matchRef; - if (Debugging.AssertsEnabled) Debugging.Assert(() => matchDec > 0 && matchDec < 1 << 16); + if (Debugging.AssertsEnabled) Debugging.Assert(matchDec > 0 && matchDec < 1 << 16); @out.WriteByte((byte)(sbyte)matchDec); @out.WriteByte((byte)(sbyte)((int)((uint)matchDec >> 8))); @@ -275,7 +275,7 @@ public static void Compress(byte[] bytes, int off, int len, DataOutput @out, Has int v = ReadInt32(bytes, off); int h = Hash(v, hashLog); @ref = @base + (int)hashTable.Get(h); - if (Debugging.AssertsEnabled) Debugging.Assert(() => PackedInt32s.BitsRequired(off - @base) <= hashTable.BitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(PackedInt32s.BitsRequired(off - @base) <= hashTable.BitsPerValue); hashTable.Set(h, off - @base); if (off - @ref < MAX_DISTANCE && ReadInt32(bytes, @ref) == v) { @@ -297,7 +297,7 @@ public static void Compress(byte[] bytes, int off, int len, DataOutput @out, Has // last literals int literalLen = end - anchor; - if (Debugging.AssertsEnabled) Debugging.Assert(() => literalLen >= LAST_LITERALS || literalLen == len); + if (Debugging.AssertsEnabled) Debugging.Assert(literalLen >= LAST_LITERALS || literalLen == len); EncodeLastLiterals(bytes, anchor, end - anchor, @out); } @@ -365,7 +365,7 @@ private void AddHash(byte[] bytes, int off) int v = ReadInt32(bytes, off); int h = HashHC(v); int delta = off - hashTable[h]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => delta > 0, delta.ToString); + if (Debugging.AssertsEnabled) Debugging.Assert(delta > 0, delta.ToString); if (delta >= MAX_DISTANCE) { delta = MAX_DISTANCE - 1; @@ -513,7 +513,7 @@ public static void CompressHC(byte[] src, int srcOff, int srcLen, DataOutput @ou while (true) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => match1.start >= anchor); + if (Debugging.AssertsEnabled) Debugging.Assert(match1.start >= anchor); if (match1.End() >= mfLimit || !ht.InsertAndFindWiderMatch(src, match1.End() - 2, match1.start + 1, matchLimit, match1.len, match2)) { // no better match @@ -529,7 +529,7 @@ public static void CompressHC(byte[] src, int srcOff, int srcLen, DataOutput @ou CopyTo(match0, match1); } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => match2.start > match1.start); + if (Debugging.AssertsEnabled) Debugging.Assert(match2.start > match1.start); if (match2.start - match1.start < 3) // First Match too small : removed { diff --git a/src/Lucene.Net/Codecs/DocValuesConsumer.cs b/src/Lucene.Net/Codecs/DocValuesConsumer.cs index d1ac158bce..9a0cbdd9ec 100644 --- a/src/Lucene.Net/Codecs/DocValuesConsumer.cs +++ b/src/Lucene.Net/Codecs/DocValuesConsumer.cs @@ -486,7 +486,7 @@ private IEnumerable GetMergeSortedSetValuesEnumerable(OrdinalMap map, if (currentLiveDocs == null || currentLiveDocs.Get(docIDUpto)) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docIDUpto < currentReader.MaxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(docIDUpto < currentReader.MaxDoc); SortedSetDocValues dv = dvs[readerUpto]; dv.SetDocument(docIDUpto); ordUpto = ordLength = 0; @@ -516,7 +516,7 @@ internal class BitsFilteredTermsEnum : FilteredTermsEnum internal BitsFilteredTermsEnum(TermsEnum @in, Int64BitSet liveTerms) : base(@in, false) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => liveTerms != null); + if (Debugging.AssertsEnabled) Debugging.Assert(liveTerms != null); this.liveTerms = liveTerms; } diff --git a/src/Lucene.Net/Codecs/FieldsConsumer.cs b/src/Lucene.Net/Codecs/FieldsConsumer.cs index a39d96f7b8..9c1bd14b90 100644 --- a/src/Lucene.Net/Codecs/FieldsConsumer.cs +++ b/src/Lucene.Net/Codecs/FieldsConsumer.cs @@ -87,7 +87,7 @@ public virtual void Merge(MergeState mergeState, Fields fields) foreach (string field in fields) { FieldInfo info = mergeState.FieldInfos.FieldInfo(field); - if (Debugging.AssertsEnabled) Debugging.Assert(() => info != null, () => "FieldInfo for field is null: " + field); + if (Debugging.AssertsEnabled) Debugging.Assert(info != null, () => "FieldInfo for field is null: " + field); Terms terms = fields.GetTerms(field); if (terms != null) { diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xFields.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xFields.cs index e4ffdbf322..b37a77d0f6 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xFields.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xFields.cs @@ -164,7 +164,7 @@ public override int Count { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => preTerms.Count == fields.Count); + if (Debugging.AssertsEnabled) Debugging.Assert(preTerms.Count == fields.Count); return fields.Count; } } @@ -249,7 +249,7 @@ public override bool HasOffsets get { // preflex doesn't support this - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldInfo.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) < 0); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldInfo.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) < 0); return false; } } @@ -304,11 +304,11 @@ private bool SeekToNonBMP(SegmentTermEnum te, BytesRef term, int pos) { int savLength = term.Length; - if (Debugging.AssertsEnabled) Debugging.Assert(() => term.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(term.Offset == 0); // The 3 bytes starting at downTo make up 1 // unicode character: - if (Debugging.AssertsEnabled) Debugging.Assert(() => IsHighBMPChar(term.Bytes, pos)); + if (Debugging.AssertsEnabled) Debugging.Assert(IsHighBMPChar(term.Bytes, pos)); // NOTE: we cannot make this assert, because // AutomatonQuery legitimately sends us malformed UTF8 @@ -361,7 +361,7 @@ private bool SeekToNonBMP(SegmentTermEnum te, BytesRef term, int pos) // Now test if prefix is identical and we found // a non-BMP char at the same position: BytesRef b2 = t2.Bytes; - if (Debugging.AssertsEnabled) Debugging.Assert(() => b2.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(b2.Offset == 0); bool matches; if (b2.Length >= term.Length && IsNonBMPChar(b2.Bytes, pos)) @@ -467,8 +467,8 @@ private bool DoPop() if (Debugging.AssertsEnabled) { - Debugging.Assert(() => newSuffixStart <= prevTerm.Length); - Debugging.Assert(() => newSuffixStart < scratchTerm.Length || newSuffixStart == 0); + Debugging.Assert(newSuffixStart <= prevTerm.Length); + Debugging.Assert(newSuffixStart < scratchTerm.Length || newSuffixStart == 0); } if (prevTerm.Length > newSuffixStart && IsNonBMPChar(prevTerm.Bytes, newSuffixStart) && IsHighBMPChar(scratchTerm.Bytes, newSuffixStart)) @@ -498,7 +498,7 @@ private bool DoPop() } BytesRef b2 = t2.Bytes; - if (Debugging.AssertsEnabled) Debugging.Assert(() => b2.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(b2.Offset == 0); // Set newSuffixStart -- we can't use // termEnum's since the above seek may have @@ -601,8 +601,8 @@ private void SurrogateDance() // always use BytesRef.offset == 0 if (Debugging.AssertsEnabled) { - Debugging.Assert(() => prevTerm.Offset == 0); - Debugging.Assert(() => scratchTerm.Offset == 0); + Debugging.Assert(prevTerm.Offset == 0); + Debugging.Assert(scratchTerm.Offset == 0); } // Need to loop here because we may need to do multiple @@ -656,7 +656,7 @@ private void DoPushes() if (IsNonBMPChar(scratchTerm.Bytes, upTo) && (upTo > newSuffixStart || (upTo >= prevTerm.Length || (!IsNonBMPChar(prevTerm.Bytes, upTo) && !IsHighBMPChar(prevTerm.Bytes, upTo))))) { // A non-BMP char (4 bytes UTF8) starts here: - if (Debugging.AssertsEnabled) Debugging.Assert(() => scratchTerm.Length >= upTo + 4); + if (Debugging.AssertsEnabled) Debugging.Assert(scratchTerm.Length >= upTo + 4); int savLength = scratchTerm.Length; scratch[0] = (sbyte)scratchTerm.Bytes[upTo]; @@ -704,7 +704,7 @@ private void DoPushes() if (t2 != null && t2.Field == internedFieldName) { BytesRef b2 = t2.Bytes; - if (Debugging.AssertsEnabled) Debugging.Assert(() => b2.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(b2.Offset == 0); if (b2.Length >= upTo + 3 && IsHighBMPChar(b2.Bytes, upTo)) { matches = true; @@ -829,7 +829,7 @@ public override SeekStatus SeekCeil(BytesRef term) TermInfosReader tis = outerInstance.TermsDict; Term t0 = new Term(fieldInfo.Name, term); - if (Debugging.AssertsEnabled) Debugging.Assert(() => termEnum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(termEnum != null); tis.SeekEnum(termEnum, t0, false); @@ -860,7 +860,7 @@ public override SeekStatus SeekCeil(BytesRef term) // find an E, try swapping in S, backwards: scratchTerm.CopyBytes(term); - if (Debugging.AssertsEnabled) Debugging.Assert(() => scratchTerm.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(scratchTerm.Offset == 0); for (int i = scratchTerm.Length - 1; i >= 0; i--) { @@ -909,7 +909,7 @@ public override SeekStatus SeekCeil(BytesRef term) } BytesRef br = t.Bytes; - if (Debugging.AssertsEnabled) Debugging.Assert(() => br.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(br.Offset == 0); SetNewSuffixStart(term, br); @@ -919,14 +919,14 @@ public override SeekStatus SeekCeil(BytesRef term) if (t2 == null || t2.Field != internedFieldName) { // PreFlex codec interns field names; verify: - if (Debugging.AssertsEnabled) Debugging.Assert(() => t2 == null || !t2.Field.Equals(internedFieldName, StringComparison.Ordinal)); + if (Debugging.AssertsEnabled) Debugging.Assert(t2 == null || !t2.Field.Equals(internedFieldName, StringComparison.Ordinal)); current = null; return SeekStatus.END; } else { current = t2.Bytes; - if (Debugging.AssertsEnabled) Debugging.Assert(() => !unicodeSortOrder || term.CompareTo(current) < 0, () => "term=" + UnicodeUtil.ToHexString(term.Utf8ToString()) + " vs current=" + UnicodeUtil.ToHexString(current.Utf8ToString())); + if (Debugging.AssertsEnabled) Debugging.Assert(!unicodeSortOrder || term.CompareTo(current) < 0, () => "term=" + UnicodeUtil.ToHexString(term.Utf8ToString()) + " vs current=" + UnicodeUtil.ToHexString(current.Utf8ToString())); return SeekStatus.NOT_FOUND; } } @@ -1002,7 +1002,7 @@ public override BytesRef Next() if (t == null || t.Field != internedFieldName) { // PreFlex codec interns field names; verify: - if (Debugging.AssertsEnabled) Debugging.Assert(() => t == null || !t.Field.Equals(internedFieldName, StringComparison.Ordinal)); + if (Debugging.AssertsEnabled) Debugging.Assert(t == null || !t.Field.Equals(internedFieldName, StringComparison.Ordinal)); current = null; } else @@ -1027,7 +1027,7 @@ public override BytesRef Next() if (t == null || t.Field != internedFieldName) { // PreFlex codec interns field names; verify: - if (Debugging.AssertsEnabled) Debugging.Assert(() => t == null || !t.Field.Equals(internedFieldName, StringComparison.Ordinal)); + if (Debugging.AssertsEnabled) Debugging.Assert(t == null || !t.Field.Equals(internedFieldName, StringComparison.Ordinal)); return null; } else @@ -1196,7 +1196,7 @@ public override int Advance(int target) public override int NextPosition() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docID != NO_MORE_DOCS); + if (Debugging.AssertsEnabled) Debugging.Assert(docID != NO_MORE_DOCS); return pos.NextPosition(); } diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xNormsProducer.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xNormsProducer.cs index 36cb64e3e5..bcc354139a 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xNormsProducer.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xNormsProducer.cs @@ -136,7 +136,7 @@ public Lucene3xNormsProducer(Directory dir, SegmentInfo info, FieldInfos fields, } } // TODO: change to a real check? see LUCENE-3619 - if (Debugging.AssertsEnabled) Debugging.Assert(() => singleNormStream == null || nextNormSeek == singleNormStream.Length, () => singleNormStream != null ? "len: " + singleNormStream.Length + " expected: " + nextNormSeek : "null"); + if (Debugging.AssertsEnabled) Debugging.Assert(singleNormStream == null || nextNormSeek == singleNormStream.Length, () => singleNormStream != null ? "len: " + singleNormStream.Length + " expected: " + nextNormSeek : "null"); success = true; } finally @@ -188,7 +188,7 @@ private static bool HasSeparateNorms(SegmentInfo info, int number) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => Convert.ToInt64(v, CultureInfo.InvariantCulture) != SegmentInfo.NO); + if (Debugging.AssertsEnabled) Debugging.Assert(Convert.ToInt64(v, CultureInfo.InvariantCulture) != SegmentInfo.NO); return true; } } @@ -258,7 +258,7 @@ public override long Get(int docID) public override NumericDocValues GetNumeric(FieldInfo field) { var dv = norms[field.Name]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => dv != null); + if (Debugging.AssertsEnabled) Debugging.Assert(dv != null); return dv.Instance; } diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSegmentInfoReader.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSegmentInfoReader.cs index 9b12c54f16..147f178233 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSegmentInfoReader.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSegmentInfoReader.cs @@ -193,7 +193,7 @@ private SegmentCommitInfo ReadLegacySegmentInfo(Directory dir, int format, Index //System.out.println("version=" + version + " name=" + name + " docCount=" + docCount + " delGen=" + delGen + " dso=" + docStoreOffset + " dss=" + docStoreSegment + " dssCFs=" + docStoreIsCompoundFile + " b=" + b + " format=" + format); - if (Debugging.AssertsEnabled) Debugging.Assert(() => 1 == b, () => "expected 1 but was: " + b + " format: " + format); + if (Debugging.AssertsEnabled) Debugging.Assert(1 == b, () => "expected 1 but was: " + b + " format: " + format); int numNormGen = input.ReadInt32(); IDictionary normGen; if (numNormGen == SegmentInfo.NO) @@ -211,7 +211,7 @@ private SegmentCommitInfo ReadLegacySegmentInfo(Directory dir, int format, Index bool isCompoundFile = input.ReadByte() == SegmentInfo.YES; int delCount = input.ReadInt32(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => delCount <= docCount); + if (Debugging.AssertsEnabled) Debugging.Assert(delCount <= docCount); bool hasProx = input.ReadByte() == 1; @@ -283,7 +283,7 @@ private SegmentCommitInfo ReadLegacySegmentInfo(Directory dir, int format, Index else { // We should have already hit indexformat too old exception - if (Debugging.AssertsEnabled) Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(false); } } } diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xStoredFieldsReader.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xStoredFieldsReader.cs index 95649dd0a3..f5b09aa413 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xStoredFieldsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xStoredFieldsReader.cs @@ -193,7 +193,7 @@ public Lucene3xStoredFieldsReader(Directory d, SegmentInfo si, FieldInfos fn, IO // Verify the file is long enough to hold all of our // docs - if (Debugging.AssertsEnabled) Debugging.Assert(() => ((int)(indexSize / 8)) >= size + this.docStoreOffset, () => "indexSize=" + indexSize + " size=" + size + " docStoreOffset=" + docStoreOffset); + if (Debugging.AssertsEnabled) Debugging.Assert(((int)(indexSize / 8)) >= size + this.docStoreOffset, () => "indexSize=" + indexSize + " size=" + size + " docStoreOffset=" + docStoreOffset); } else { @@ -273,7 +273,7 @@ public override sealed void VisitDocument(int n, StoredFieldVisitor visitor) FieldInfo fieldInfo = fieldInfos.FieldInfo(fieldNumber); int bits = fieldsStream.ReadByte() & 0xFF; - if (Debugging.AssertsEnabled) Debugging.Assert(() => bits <= (FIELD_IS_NUMERIC_MASK | FIELD_IS_BINARY), () => "bits=" + bits.ToString("x")); + if (Debugging.AssertsEnabled) Debugging.Assert(bits <= (FIELD_IS_NUMERIC_MASK | FIELD_IS_BINARY), () => "bits=" + bits.ToString("x")); switch (visitor.NeedsField(fieldInfo)) { diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs index fd12bbaf81..5e23d2cbce 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs @@ -142,8 +142,8 @@ public Lucene3xTermVectorsReader(Directory d, SegmentInfo si, FieldInfos fieldIn if (Debugging.AssertsEnabled) { - Debugging.Assert(() => format == tvdFormat); - Debugging.Assert(() => format == tvfFormat); + Debugging.Assert(format == tvdFormat); + Debugging.Assert(format == tvfFormat); } numTotalDocs = (int)(tvx.Length >> 4); @@ -152,7 +152,7 @@ public Lucene3xTermVectorsReader(Directory d, SegmentInfo si, FieldInfos fieldIn { this.docStoreOffset = 0; this.size = numTotalDocs; - if (Debugging.AssertsEnabled) Debugging.Assert(() => size == 0 || numTotalDocs == size); + if (Debugging.AssertsEnabled) Debugging.Assert(size == 0 || numTotalDocs == size); } else { @@ -160,7 +160,7 @@ public Lucene3xTermVectorsReader(Directory d, SegmentInfo si, FieldInfos fieldIn this.size = size; // Verify the file is long enough to hold all of our // docs - if (Debugging.AssertsEnabled) Debugging.Assert(() => numTotalDocs >= size + docStoreOffset, () => "numTotalDocs=" + numTotalDocs + " size=" + size + " docStoreOffset=" + docStoreOffset); + if (Debugging.AssertsEnabled) Debugging.Assert(numTotalDocs >= size + docStoreOffset, () => "numTotalDocs=" + numTotalDocs + " size=" + size + " docStoreOffset=" + docStoreOffset); } this.fieldInfos = fieldInfos; @@ -236,7 +236,7 @@ public TVFields(Lucene3xTermVectorsReader outerInstance, int docID) outerInstance.tvd.Seek(outerInstance.tvx.ReadInt64()); int fieldCount = outerInstance.tvd.ReadVInt32(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldCount >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldCount >= 0); if (fieldCount != 0) { fieldNumbers = new int[fieldCount]; @@ -689,7 +689,7 @@ public override int Freq } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => startOffsets != null); + if (Debugging.AssertsEnabled) Debugging.Assert(startOffsets != null); return startOffsets.Length; } } @@ -740,7 +740,7 @@ public override BytesRef GetPayload() public override int NextPosition() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => (positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length); + if (Debugging.AssertsEnabled) Debugging.Assert((positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length); if (positions != null) { diff --git a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermDocs.cs b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermDocs.cs index 80fe7fa61e..1057564720 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermDocs.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermDocs.cs @@ -171,7 +171,7 @@ public virtual bool Next() else { freq = m_freqStream.ReadVInt32(); // else read freq - if (Debugging.AssertsEnabled) Debugging.Assert(() => freq != 1); + if (Debugging.AssertsEnabled) Debugging.Assert(freq != 1); } } diff --git a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermEnum.cs b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermEnum.cs index 5bf10aa3ea..0d0848a397 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermEnum.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermEnum.cs @@ -107,8 +107,8 @@ public SegmentTermEnum(IndexInput i, FieldInfos fis, bool isi) maxSkipLevels = input.ReadInt32(); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => indexInterval > 0, () => "indexInterval=" + indexInterval + " is negative; must be > 0"); - Debugging.Assert(() => skipInterval > 0, () => "skipInterval=" + skipInterval + " is negative; must be > 0"); + Debugging.Assert(indexInterval > 0, () => "indexInterval=" + indexInterval + " is negative; must be > 0"); + Debugging.Assert(skipInterval > 0, () => "skipInterval=" + skipInterval + " is negative; must be > 0"); } } } diff --git a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermPositions.cs b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermPositions.cs index c7dd0e2d21..f426967e2a 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermPositions.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermPositions.cs @@ -169,7 +169,7 @@ protected internal override void SkipProx(long proxPointer, int payloadLength) private void SkipPositions(int n) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => m_indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); + if (Debugging.AssertsEnabled) Debugging.Assert(m_indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); for (int f = n; f > 0; f--) // skip unread positions { ReadDeltaPosition(); diff --git a/src/Lucene.Net/Codecs/Lucene3x/TermBuffer.cs b/src/Lucene.Net/Codecs/Lucene3x/TermBuffer.cs index de422f4775..e85fb50273 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/TermBuffer.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/TermBuffer.cs @@ -70,7 +70,7 @@ public void Read(IndexInput input, FieldInfos fieldInfos) newSuffixStart = input.ReadVInt32(); int length = input.ReadVInt32(); int totalLength = newSuffixStart + length; - if (Debugging.AssertsEnabled) Debugging.Assert(() => totalLength <= ByteBlockPool.BYTE_BLOCK_SIZE - 2, () => "termLength=" + totalLength + ",resource=" + input); + if (Debugging.AssertsEnabled) Debugging.Assert(totalLength <= ByteBlockPool.BYTE_BLOCK_SIZE - 2, () => "termLength=" + totalLength + ",resource=" + input); if (bytes.Bytes.Length < totalLength) { bytes.Grow(totalLength); @@ -88,14 +88,14 @@ public void Read(IndexInput input, FieldInfos fieldInfos) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldInfos.FieldInfo(currentFieldNumber) != null, currentFieldNumber.ToString); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldInfos.FieldInfo(currentFieldNumber) != null, currentFieldNumber.ToString); field = fieldInfos.FieldInfo(currentFieldNumber).Name.Intern(); } } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => field.Equals(fieldInfos.FieldInfo(fieldNumber).Name, StringComparison.Ordinal), + if (Debugging.AssertsEnabled) Debugging.Assert(field.Equals(fieldInfos.FieldInfo(fieldNumber).Name, StringComparison.Ordinal), () => "currentFieldNumber=" + currentFieldNumber + " field=" + field + " vs " + fieldInfos.FieldInfo(fieldNumber) == null ? "null" : fieldInfos.FieldInfo(fieldNumber).Name); diff --git a/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs b/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs index fd148496ac..3b585716ce 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs @@ -65,7 +65,7 @@ public sealed class TermInfoAndOrd : TermInfo public TermInfoAndOrd(TermInfo ti, long termOrd) : base(ti) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => termOrd >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(termOrd >= 0); this.termOrd = termOrd; } } @@ -293,8 +293,8 @@ internal TermInfo SeekEnum(SegmentTermEnum enumerator, Term term, TermInfoAndOrd } else if (Debugging.AssertsEnabled) { - Debugging.Assert(() => SameTermInfo(ti, tiOrd, enumerator)); - Debugging.Assert(() => (int)enumerator.position == tiOrd.termOrd); + Debugging.Assert(SameTermInfo(ti, tiOrd, enumerator)); + Debugging.Assert((int)enumerator.position == tiOrd.termOrd); } } } @@ -335,8 +335,8 @@ internal TermInfo SeekEnum(SegmentTermEnum enumerator, Term term, TermInfoAndOrd } else if (Debugging.AssertsEnabled) { - Debugging.Assert(() => SameTermInfo(ti_, tiOrd, enumerator)); - Debugging.Assert(() => enumerator.position == tiOrd.termOrd); + Debugging.Assert(SameTermInfo(ti_, tiOrd, enumerator)); + Debugging.Assert(enumerator.position == tiOrd.termOrd); } } else diff --git a/src/Lucene.Net/Codecs/Lucene40/BitVector.cs b/src/Lucene.Net/Codecs/Lucene40/BitVector.cs index c68d8e489e..ddf6d168d8 100644 --- a/src/Lucene.Net/Codecs/Lucene40/BitVector.cs +++ b/src/Lucene.Net/Codecs/Lucene40/BitVector.cs @@ -126,7 +126,7 @@ public bool GetAndSet(int bit) if (count != -1) { count++; - if (Debugging.AssertsEnabled) Debugging.Assert(() => count <= size); + if (Debugging.AssertsEnabled) Debugging.Assert(count <= size); } return false; } @@ -163,7 +163,7 @@ public bool GetAndClear(int bit) if (count != -1) { count--; - if (Debugging.AssertsEnabled) Debugging.Assert(() => count >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(count >= 0); } return true; } @@ -175,7 +175,7 @@ public bool GetAndClear(int bit) /// public bool Get(int bit) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => bit >= 0 && bit < size, () => "bit " + bit + " is out of bounds 0.." + (size - 1)); + if (Debugging.AssertsEnabled) Debugging.Assert(bit >= 0 && bit < size, () => "bit " + bit + " is out of bounds 0.." + (size - 1)); return (bits[bit >> 3] & (1 << (bit & 7))) != 0; } @@ -215,7 +215,7 @@ public int Count() // LUCENENET TODO: API - make into a property } count = c; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => count <= size, () => "count=" + count + " size=" + size); + if (Debugging.AssertsEnabled) Debugging.Assert(count <= size, () => "count=" + count + " size=" + size); return count; } @@ -259,7 +259,7 @@ public int GetRecomputedCount() /// public void Write(Directory d, string name, IOContext context) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !(d is CompoundFileDirectory)); + if (Debugging.AssertsEnabled) Debugging.Assert(!(d is CompoundFileDirectory)); IndexOutput output = d.CreateOutput(name, context); try { @@ -275,7 +275,7 @@ public void Write(Directory d, string name, IOContext context) WriteBits(output); } CodecUtil.WriteFooter(output); - if (Debugging.AssertsEnabled) Debugging.Assert(VerifyCount); + if (Debugging.AssertsEnabled) Debugging.Assert(VerifyCount()); } finally { @@ -351,7 +351,7 @@ private void WriteClearedDgaps(IndexOutput output) output.WriteByte(bits[i]); last = i; numCleared -= (8 - BitUtil.BitCount(bits[i])); - if (Debugging.AssertsEnabled) Debugging.Assert(() => numCleared >= 0 || (i == (bits.Length - 1) && numCleared == -(8 - (size & 7)))); + if (Debugging.AssertsEnabled) Debugging.Assert(numCleared >= 0 || (i == (bits.Length - 1) && numCleared == -(8 - (size & 7)))); } } } @@ -460,7 +460,7 @@ public BitVector(Directory d, string name, IOContext context) CodecUtil.CheckEOF(input); #pragma warning restore 612, 618 } - if (Debugging.AssertsEnabled) Debugging.Assert(VerifyCount); + if (Debugging.AssertsEnabled) Debugging.Assert(VerifyCount()); } finally { @@ -471,10 +471,10 @@ public BitVector(Directory d, string name, IOContext context) // asserts only private bool VerifyCount() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => count != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(count != -1); int countSav = count; count = -1; - if (Debugging.AssertsEnabled) Debugging.Assert(() => countSav == Count(), () => "saved count was " + countSav + " but recomputed count is " + count); + if (Debugging.AssertsEnabled) Debugging.Assert(countSav == Count(), () => "saved count was " + countSav + " but recomputed count is " + count); return true; } @@ -501,7 +501,7 @@ private void ReadSetDgaps(IndexInput input) last += input.ReadVInt32(); bits[last] = input.ReadByte(); n -= BitUtil.BitCount(bits[last]); - if (Debugging.AssertsEnabled) Debugging.Assert(() => n >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(n >= 0); } } @@ -524,7 +524,7 @@ private void ReadClearedDgaps(IndexInput input) last += input.ReadVInt32(); bits[last] = input.ReadByte(); numCleared -= 8 - BitUtil.BitCount(bits[last]); - if (Debugging.AssertsEnabled) Debugging.Assert(() => numCleared >= 0 || (last == (bits.Length - 1) && numCleared == -(8 - (size & 7)))); + if (Debugging.AssertsEnabled) Debugging.Assert(numCleared >= 0 || (last == (bits.Length - 1) && numCleared == -(8 - (size & 7)))); } } } diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40LiveDocsFormat.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40LiveDocsFormat.cs index f0ef72a5c2..efb194059c 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40LiveDocsFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40LiveDocsFormat.cs @@ -94,8 +94,8 @@ public override IBits ReadLiveDocs(Directory dir, SegmentCommitInfo info, IOCont BitVector liveDocs = new BitVector(dir, filename, context); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => liveDocs.Count() == info.Info.DocCount - info.DelCount, () => "liveDocs.count()=" + liveDocs.Count() + " info.docCount=" + info.Info.DocCount + " info.getDelCount()=" + info.DelCount); - Debugging.Assert(() => liveDocs.Length == info.Info.DocCount); + Debugging.Assert(liveDocs.Count() == info.Info.DocCount - info.DelCount, () => "liveDocs.count()=" + liveDocs.Count() + " info.docCount=" + info.Info.DocCount + " info.getDelCount()=" + info.DelCount); + Debugging.Assert(liveDocs.Length == info.Info.DocCount); } return liveDocs; } @@ -107,8 +107,8 @@ public override void WriteLiveDocs(IMutableBits bits, Directory dir, SegmentComm BitVector liveDocs = (BitVector)bits; if (Debugging.AssertsEnabled) { - Debugging.Assert(() => liveDocs.Count() == info.Info.DocCount - info.DelCount - newDelCount); - Debugging.Assert(() => liveDocs.Length == info.Info.DocCount); + Debugging.Assert(liveDocs.Count() == info.Info.DocCount - info.DelCount - newDelCount); + Debugging.Assert(liveDocs.Length == info.Info.DocCount); } liveDocs.Write(dir, filename, context); } diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsFormat.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsFormat.cs index 65c6b31471..f374d976a3 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsFormat.cs @@ -235,7 +235,7 @@ private Lucene40PostingsFormat(int minBlockSize, int maxBlockSize) : base() { this.m_minBlockSize = minBlockSize; - if (Debugging.AssertsEnabled) Debugging.Assert(() => minBlockSize > 1); + if (Debugging.AssertsEnabled) Debugging.Assert(minBlockSize > 1); this.m_maxBlockSize = maxBlockSize; } diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsReader.cs index b6d75b11d5..2b4fdbb3dd 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsReader.cs @@ -193,13 +193,13 @@ public override void DecodeTerm(long[] longs, DataInput @in, FieldInfo fieldInfo System.out.println(" freqFP=" + termState2.freqOffset); } */ - if (Debugging.AssertsEnabled) Debugging.Assert(() => termState2.freqOffset < freqIn.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(termState2.freqOffset < freqIn.Length); if (termState2.DocFreq >= skipMinimum) { termState2.skipOffset = @in.ReadVInt64(); // if (DEBUG) System.out.println(" skipOffset=" + termState2.skipOffset + " vs freqIn.length=" + freqIn.length()); - if (Debugging.AssertsEnabled) Debugging.Assert(() => termState2.freqOffset + termState2.skipOffset < freqIn.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(termState2.freqOffset + termState2.skipOffset < freqIn.Length); } else { @@ -356,7 +356,7 @@ internal virtual DocsEnum Reset(FieldInfo fieldInfo, StandardTermState termState // cases freqIn.Seek(termState.freqOffset); m_limit = termState.DocFreq; - if (Debugging.AssertsEnabled) Debugging.Assert(() => m_limit > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(m_limit > 0); m_ord = 0; m_doc = -1; m_accum = 0; @@ -546,7 +546,7 @@ internal AllDocsSegmentDocsEnum(Lucene40PostingsReader outerInstance, IndexInput : base(outerInstance, startFreqIn, null) { this.outerInstance = outerInstance; - if (Debugging.AssertsEnabled) Debugging.Assert(() => m_liveDocs == null); + if (Debugging.AssertsEnabled) Debugging.Assert(m_liveDocs == null); } public override int NextDoc() @@ -639,7 +639,7 @@ internal LiveDocsSegmentDocsEnum(Lucene40PostingsReader outerInstance, IndexInpu : base(outerInstance, startFreqIn, liveDocs) { this.outerInstance = outerInstance; - if (Debugging.AssertsEnabled) Debugging.Assert(() => liveDocs != null); + if (Debugging.AssertsEnabled) Debugging.Assert(liveDocs != null); } public override int NextDoc() @@ -785,8 +785,8 @@ public SegmentDocsAndPositionsEnum Reset(FieldInfo fieldInfo, StandardTermState { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => fieldInfo.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); - Debugging.Assert(() => !fieldInfo.HasPayloads); + Debugging.Assert(fieldInfo.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); + Debugging.Assert(!fieldInfo.HasPayloads); } this.liveDocs = liveDocs; @@ -798,7 +798,7 @@ public SegmentDocsAndPositionsEnum Reset(FieldInfo fieldInfo, StandardTermState lazyProxPointer = termState.proxOffset; limit = termState.DocFreq; - if (Debugging.AssertsEnabled) Debugging.Assert(() => limit > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(limit > 0); ord = 0; doc = -1; @@ -933,7 +933,7 @@ public override int NextPosition() posPendingCount--; - if (Debugging.AssertsEnabled) Debugging.Assert(() => posPendingCount >= 0, () => "nextPosition() was called too many times (more than freq() times) posPendingCount=" + posPendingCount); + if (Debugging.AssertsEnabled) Debugging.Assert(posPendingCount >= 0, () => "nextPosition() was called too many times (more than freq() times) posPendingCount=" + posPendingCount); return position; } @@ -1008,8 +1008,8 @@ public virtual SegmentFullPositionsEnum Reset(FieldInfo fieldInfo, StandardTermS storePayloads = fieldInfo.HasPayloads; if (Debugging.AssertsEnabled) { - Debugging.Assert(() => fieldInfo.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0); - Debugging.Assert(() => storePayloads || storeOffsets); + Debugging.Assert(fieldInfo.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0); + Debugging.Assert(storePayloads || storeOffsets); } if (payload == null) { @@ -1166,9 +1166,9 @@ public override int NextPosition() { // new payload length payloadLength = proxIn.ReadVInt32(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => payloadLength >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(payloadLength >= 0); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => payloadLength != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(payloadLength != -1); } if (storeOffsets) @@ -1206,9 +1206,9 @@ public override int NextPosition() { // new payload length payloadLength = proxIn.ReadVInt32(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => payloadLength >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(payloadLength >= 0); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => payloadLength != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(payloadLength != -1); payloadPending = true; code_ = (int)((uint)code_ >> 1); @@ -1228,7 +1228,7 @@ public override int NextPosition() posPendingCount--; - if (Debugging.AssertsEnabled) Debugging.Assert(() => posPendingCount >= 0, () => "nextPosition() was called too many times (more than freq() times) posPendingCount=" + posPendingCount); + if (Debugging.AssertsEnabled) Debugging.Assert(posPendingCount >= 0, () => "nextPosition() was called too many times (more than freq() times) posPendingCount=" + posPendingCount); //System.out.println("StandardR.D&PE nextPos return pos=" + position); return position; @@ -1252,8 +1252,8 @@ public override BytesRef GetPayload() } if (Debugging.AssertsEnabled) { - Debugging.Assert(() => lazyProxPointer == -1); - Debugging.Assert(() => posPendingCount < freq); + Debugging.Assert(lazyProxPointer == -1); + Debugging.Assert(posPendingCount < freq); } if (payloadPending) diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsReader.cs index d5d158cd1f..9e2775af6f 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsReader.cs @@ -94,8 +94,8 @@ public Lucene40StoredFieldsReader(Directory d, SegmentInfo si, FieldInfos fn, IO CodecUtil.CheckHeader(fieldsStream, Lucene40StoredFieldsWriter.CODEC_NAME_DAT, Lucene40StoredFieldsWriter.VERSION_START, Lucene40StoredFieldsWriter.VERSION_CURRENT); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => Lucene40StoredFieldsWriter.HEADER_LENGTH_DAT == fieldsStream.GetFilePointer()); - Debugging.Assert(() => Lucene40StoredFieldsWriter.HEADER_LENGTH_IDX == indexStream.GetFilePointer()); + Debugging.Assert(Lucene40StoredFieldsWriter.HEADER_LENGTH_DAT == fieldsStream.GetFilePointer()); + Debugging.Assert(Lucene40StoredFieldsWriter.HEADER_LENGTH_IDX == indexStream.GetFilePointer()); } long indexSize = indexStream.Length - Lucene40StoredFieldsWriter.HEADER_LENGTH_IDX; this.size = (int)(indexSize >> 3); @@ -177,7 +177,7 @@ public override void VisitDocument(int n, StoredFieldVisitor visitor) FieldInfo fieldInfo = fieldInfos.FieldInfo(fieldNumber); int bits = fieldsStream.ReadByte() & 0xFF; - if (Debugging.AssertsEnabled) Debugging.Assert(() => bits <= (Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_MASK | Lucene40StoredFieldsWriter.FIELD_IS_BINARY), () => "bits=" + bits.ToString("x")); + if (Debugging.AssertsEnabled) Debugging.Assert(bits <= (Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_MASK | Lucene40StoredFieldsWriter.FIELD_IS_BINARY), () => "bits=" + bits.ToString("x")); switch (visitor.NeedsField(fieldInfo)) { @@ -284,7 +284,7 @@ public IndexInput RawDocs(int[] lengths, int startDocID, int numDocs) { long offset; int docID = startDocID + count + 1; - if (Debugging.AssertsEnabled) Debugging.Assert(() => docID <= numTotalDocs); + if (Debugging.AssertsEnabled) Debugging.Assert(docID <= numTotalDocs); if (docID < numTotalDocs) { offset = indexStream.ReadInt64(); diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs index d4e1ada0ee..116afebf24 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs @@ -91,7 +91,7 @@ public sealed class Lucene40StoredFieldsWriter : StoredFieldsWriter /// Sole constructor. public Lucene40StoredFieldsWriter(Directory directory, string segment, IOContext context) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => directory != null); + if (Debugging.AssertsEnabled) Debugging.Assert(directory != null); this.directory = directory; this.segment = segment; @@ -105,8 +105,8 @@ public Lucene40StoredFieldsWriter(Directory directory, string segment, IOContext CodecUtil.WriteHeader(indexStream, CODEC_NAME_IDX, VERSION_CURRENT); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => HEADER_LENGTH_DAT == fieldsStream.GetFilePointer()); - Debugging.Assert(() => HEADER_LENGTH_IDX == indexStream.GetFilePointer()); + Debugging.Assert(HEADER_LENGTH_DAT == fieldsStream.GetFilePointer()); + Debugging.Assert(HEADER_LENGTH_IDX == indexStream.GetFilePointer()); } success = true; } @@ -266,7 +266,7 @@ public void AddRawDocuments(IndexInput stream, int[] lengths, int numDocs) position += lengths[i]; } fieldsStream.CopyBytes(stream, position - start); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldsStream.GetFilePointer() == position); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldsStream.GetFilePointer() == position); } public override void Finish(FieldInfos fis, int numDocs) @@ -328,7 +328,7 @@ private int CopyFieldsWithDeletions(MergeState mergeState, AtomicReader reader, int docCount = 0; int maxDoc = reader.MaxDoc; IBits liveDocs = reader.LiveDocs; - if (Debugging.AssertsEnabled) Debugging.Assert(() => liveDocs != null); + if (Debugging.AssertsEnabled) Debugging.Assert(liveDocs != null); if (matchingFieldsReader != null) { // We can bulk-copy because the fieldInfos are "congruent" diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs index 9684342c01..d58a59daa5 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs @@ -122,17 +122,17 @@ public Lucene40TermVectorsReader(Directory d, SegmentInfo si, FieldInfos fieldIn int tvfVersion = CodecUtil.CheckHeader(tvf, CODEC_NAME_FIELDS, VERSION_START, VERSION_CURRENT); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => HEADER_LENGTH_INDEX == tvx.GetFilePointer()); - Debugging.Assert(() => HEADER_LENGTH_DOCS == tvd.GetFilePointer()); - Debugging.Assert(() => HEADER_LENGTH_FIELDS == tvf.GetFilePointer()); - Debugging.Assert(() => tvxVersion == tvdVersion); - Debugging.Assert(() => tvxVersion == tvfVersion); + Debugging.Assert(HEADER_LENGTH_INDEX == tvx.GetFilePointer()); + Debugging.Assert(HEADER_LENGTH_DOCS == tvd.GetFilePointer()); + Debugging.Assert(HEADER_LENGTH_FIELDS == tvf.GetFilePointer()); + Debugging.Assert(tvxVersion == tvdVersion); + Debugging.Assert(tvxVersion == tvfVersion); } numTotalDocs = (int)(tvx.Length - HEADER_LENGTH_INDEX >> 4); this.size = numTotalDocs; - if (Debugging.AssertsEnabled) Debugging.Assert(() => size == 0 || numTotalDocs == size); + if (Debugging.AssertsEnabled) Debugging.Assert(size == 0 || numTotalDocs == size); this.fieldInfos = fieldInfos; success = true; @@ -203,7 +203,7 @@ internal void RawDocs(int[] tvdLengths, int[] tvfLengths, int startDocID, int nu while (count < numDocs) { int docID = startDocID + count + 1; - if (Debugging.AssertsEnabled) Debugging.Assert(() => docID <= numTotalDocs); + if (Debugging.AssertsEnabled) Debugging.Assert(docID <= numTotalDocs); if (docID < numTotalDocs) { tvdPosition = tvx.ReadInt64(); @@ -213,7 +213,7 @@ internal void RawDocs(int[] tvdLengths, int[] tvfLengths, int startDocID, int nu { tvdPosition = tvd.Length; tvfPosition = tvf.Length; - if (Debugging.AssertsEnabled) Debugging.Assert(() => count == numDocs - 1); + if (Debugging.AssertsEnabled) Debugging.Assert(count == numDocs - 1); } tvdLengths[count] = (int)(tvdPosition - lastTvdPosition); tvfLengths[count] = (int)(tvfPosition - lastTvfPosition); @@ -251,7 +251,7 @@ public TVFields(Lucene40TermVectorsReader outerInstance, int docID) outerInstance.tvd.Seek(outerInstance.tvx.ReadInt64()); int fieldCount = outerInstance.tvd.ReadVInt32(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldCount >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldCount >= 0); if (fieldCount != 0) { fieldNumbers = new int[fieldCount]; @@ -521,7 +521,7 @@ public override BytesRef Next() } payloadOffsets[posUpto] = totalPayloadLength; totalPayloadLength += lastPayloadLength; - if (Debugging.AssertsEnabled) Debugging.Assert(() => totalPayloadLength >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(totalPayloadLength >= 0); } payloadData = new byte[totalPayloadLength]; tvf.ReadBytes(payloadData, 0, payloadData.Length); @@ -671,7 +671,7 @@ public override int Freq } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => startOffsets != null); + if (Debugging.AssertsEnabled) Debugging.Assert(startOffsets != null); return startOffsets.Length; } } @@ -733,7 +733,7 @@ public override BytesRef GetPayload() public override int NextPosition() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => (positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length); + if (Debugging.AssertsEnabled) Debugging.Assert((positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length); if (positions != null) { diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs index dc72a6b78d..a6e49d0e06 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs @@ -80,9 +80,9 @@ public Lucene40TermVectorsWriter(Directory directory, string segment, IOContext CodecUtil.WriteHeader(tvf, Lucene40TermVectorsReader.CODEC_NAME_FIELDS, Lucene40TermVectorsReader.VERSION_CURRENT); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => Lucene40TermVectorsReader.HEADER_LENGTH_INDEX == tvx.GetFilePointer()); - Debugging.Assert(() => Lucene40TermVectorsReader.HEADER_LENGTH_DOCS == tvd.GetFilePointer()); - Debugging.Assert(() => Lucene40TermVectorsReader.HEADER_LENGTH_FIELDS == tvf.GetFilePointer()); + Debugging.Assert(Lucene40TermVectorsReader.HEADER_LENGTH_INDEX == tvx.GetFilePointer()); + Debugging.Assert(Lucene40TermVectorsReader.HEADER_LENGTH_DOCS == tvd.GetFilePointer()); + Debugging.Assert(Lucene40TermVectorsReader.HEADER_LENGTH_FIELDS == tvf.GetFilePointer()); } success = true; } @@ -113,7 +113,7 @@ public override void StartDocument(int numVectorFields) public override void StartField(FieldInfo info, int numTerms, bool positions, bool offsets, bool payloads) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => lastFieldName == null || info.Name.CompareToOrdinal(lastFieldName) > 0, () => "fieldName=" + info.Name + " lastFieldName=" + lastFieldName); + if (Debugging.AssertsEnabled) Debugging.Assert(lastFieldName == null || info.Name.CompareToOrdinal(lastFieldName) > 0, () => "fieldName=" + info.Name + " lastFieldName=" + lastFieldName); lastFieldName = info.Name; this.positions = positions; this.offsets = offsets; @@ -142,7 +142,7 @@ public override void StartField(FieldInfo info, int numTerms, bool positions, bo [MethodImpl(MethodImplOptions.NoInlining)] public override void FinishDocument() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldCount == numVectorFields); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldCount == numVectorFields); for (int i = 1; i < fieldCount; i++) { tvd.WriteVInt64(fps[i] - fps[i - 1]); @@ -273,8 +273,8 @@ public override void FinishTerm() // dump buffer if (Debugging.AssertsEnabled) { - Debugging.Assert(() => positions && (offsets || payloads)); - Debugging.Assert(() => bufferedIndex == bufferedFreq); + Debugging.Assert(positions && (offsets || payloads)); + Debugging.Assert(bufferedIndex == bufferedFreq); } if (payloads) { @@ -365,8 +365,8 @@ private void AddRawDocuments(Lucene40TermVectorsReader reader, int[] tvdLengths, tvf.CopyBytes(reader.TvfStream, tvfPosition - tvfStart); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => tvd.GetFilePointer() == tvdPosition); - Debugging.Assert(() => tvf.GetFilePointer() == tvfPosition); + Debugging.Assert(tvd.GetFilePointer() == tvdPosition); + Debugging.Assert(tvf.GetFilePointer() == tvfPosition); } } diff --git a/src/Lucene.Net/Codecs/Lucene41/ForUtil.cs b/src/Lucene.Net/Codecs/Lucene41/ForUtil.cs index 871c2b06c7..9a6c9330c4 100644 --- a/src/Lucene.Net/Codecs/Lucene41/ForUtil.cs +++ b/src/Lucene.Net/Codecs/Lucene41/ForUtil.cs @@ -88,7 +88,7 @@ private static int ComputeIterations(PackedInt32s.IDecoder decoder) private static int EncodedSize(PackedInt32s.Format format, int packedIntsVersion, int bitsPerValue) { long byteCount = format.ByteCount(packedIntsVersion, Lucene41PostingsFormat.BLOCK_SIZE, bitsPerValue); - if (Debugging.AssertsEnabled) Debugging.Assert(() => byteCount >= 0 && byteCount <= int.MaxValue, byteCount.ToString); + if (Debugging.AssertsEnabled) Debugging.Assert(byteCount >= 0 && byteCount <= int.MaxValue, byteCount.ToString); return (int)byteCount; } @@ -113,8 +113,8 @@ internal ForUtil(float acceptableOverheadRatio, DataOutput @out) PackedInt32s.FormatAndBits formatAndBits = PackedInt32s.FastestFormatAndBits(Lucene41PostingsFormat.BLOCK_SIZE, bpv, acceptableOverheadRatio); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => formatAndBits.Format.IsSupported(formatAndBits.BitsPerValue)); - Debugging.Assert(() => formatAndBits.BitsPerValue <= 32); + Debugging.Assert(formatAndBits.Format.IsSupported(formatAndBits.BitsPerValue)); + Debugging.Assert(formatAndBits.BitsPerValue <= 32); } encodedSizes[bpv] = EncodedSize(formatAndBits.Format, PackedInt32s.VERSION_CURRENT, formatAndBits.BitsPerValue); encoders[bpv] = PackedInt32s.GetEncoder(formatAndBits.Format, PackedInt32s.VERSION_CURRENT, formatAndBits.BitsPerValue); @@ -144,7 +144,7 @@ internal ForUtil(DataInput @in) var bitsPerValue = (code & 31) + 1; PackedInt32s.Format format = PackedInt32s.Format.ById(formatId); - if (Debugging.AssertsEnabled) Debugging.Assert(() => format.IsSupported(bitsPerValue)); + if (Debugging.AssertsEnabled) Debugging.Assert(format.IsSupported(bitsPerValue)); encodedSizes[bpv] = EncodedSize(format, packedIntsVersion, bitsPerValue); encoders[bpv] = PackedInt32s.GetEncoder(format, packedIntsVersion, bitsPerValue); decoders[bpv] = PackedInt32s.GetDecoder(format, packedIntsVersion, bitsPerValue); @@ -169,12 +169,12 @@ internal void WriteBlock(int[] data, byte[] encoded, IndexOutput @out) } int numBits = BitsRequired(data); - if (Debugging.AssertsEnabled) Debugging.Assert(() => numBits > 0 && numBits <= 32, numBits.ToString); + if (Debugging.AssertsEnabled) Debugging.Assert(numBits > 0 && numBits <= 32, numBits.ToString); PackedInt32s.IEncoder encoder = encoders[numBits]; int iters = iterations[numBits]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => iters * encoder.ByteValueCount >= Lucene41PostingsFormat.BLOCK_SIZE); + if (Debugging.AssertsEnabled) Debugging.Assert(iters * encoder.ByteValueCount >= Lucene41PostingsFormat.BLOCK_SIZE); int encodedSize = encodedSizes[numBits]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => iters * encoder.ByteBlockCount >= encodedSize); + if (Debugging.AssertsEnabled) Debugging.Assert(iters * encoder.ByteBlockCount >= encodedSize); @out.WriteByte((byte)numBits); @@ -192,7 +192,7 @@ internal void WriteBlock(int[] data, byte[] encoded, IndexOutput @out) internal void ReadBlock(IndexInput @in, byte[] encoded, int[] decoded) { int numBits = @in.ReadByte(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => numBits <= 32, numBits.ToString); + if (Debugging.AssertsEnabled) Debugging.Assert(numBits <= 32, numBits.ToString); if (numBits == ALL_VALUES_EQUAL) { @@ -206,7 +206,7 @@ internal void ReadBlock(IndexInput @in, byte[] encoded, int[] decoded) PackedInt32s.IDecoder decoder = decoders[numBits]; int iters = iterations[numBits]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => iters * decoder.ByteValueCount >= Lucene41PostingsFormat.BLOCK_SIZE); + if (Debugging.AssertsEnabled) Debugging.Assert(iters * decoder.ByteValueCount >= Lucene41PostingsFormat.BLOCK_SIZE); decoder.Decode(encoded, 0, decoded, 0, iters); } @@ -224,7 +224,7 @@ internal void SkipBlock(IndexInput @in) @in.ReadVInt32(); return; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => numBits > 0 && numBits <= 32, numBits.ToString); + if (Debugging.AssertsEnabled) Debugging.Assert(numBits > 0 && numBits <= 32, numBits.ToString); int encodedSize = encodedSizes[numBits]; @in.Seek(@in.GetFilePointer() + encodedSize); } @@ -251,7 +251,7 @@ private static int BitsRequired(int[] data) long or = 0; for (int i = 0; i < Lucene41PostingsFormat.BLOCK_SIZE; ++i) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => data[i] >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(data[i] >= 0); or |= (uint)data[i]; } return PackedInt32s.BitsRequired(or); diff --git a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsFormat.cs b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsFormat.cs index f589769535..18e322d496 100644 --- a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsFormat.cs @@ -392,9 +392,9 @@ public Lucene41PostingsFormat(int minTermBlockSize, int maxTermBlockSize) : base() { this.minTermBlockSize = minTermBlockSize; - if (Debugging.AssertsEnabled) Debugging.Assert(() => minTermBlockSize > 1); + if (Debugging.AssertsEnabled) Debugging.Assert(minTermBlockSize > 1); this.maxTermBlockSize = maxTermBlockSize; - if (Debugging.AssertsEnabled) Debugging.Assert(() => minTermBlockSize <= maxTermBlockSize); + if (Debugging.AssertsEnabled) Debugging.Assert(minTermBlockSize <= maxTermBlockSize); } public override string ToString() diff --git a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsReader.cs b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsReader.cs index 52c0859822..b221ebd103 100644 --- a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsReader.cs @@ -402,7 +402,7 @@ public DocsEnum Reset(IBits liveDocs, Lucene41PostingsWriter.Int32BlockTermState private void RefillDocs() { int left = docFreq - docUpto; - if (Debugging.AssertsEnabled) Debugging.Assert(() => left > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(left > 0); if (left >= Lucene41PostingsFormat.BLOCK_SIZE) { @@ -511,7 +511,7 @@ public override int Advance(int target) if (!skipped) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => skipOffset != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(skipOffset != -1); // this is the first time this enum has skipped // since reset() was called; load the skip data: skipper.Init(docTermStartFP + skipOffset, docTermStartFP, 0, 0, docFreq); @@ -528,7 +528,7 @@ public override int Advance(int target) // if (DEBUG) { // System.out.println("skipper moved to docUpto=" + newDocUpto + " vs current=" + docUpto + "; docID=" + skipper.getDoc() + " fp=" + skipper.getDocPointer()); // } - if (Debugging.AssertsEnabled) Debugging.Assert(() => newDocUpto % Lucene41PostingsFormat.BLOCK_SIZE == 0, () => "got " + newDocUpto); + if (Debugging.AssertsEnabled) Debugging.Assert(newDocUpto % Lucene41PostingsFormat.BLOCK_SIZE == 0, () => "got " + newDocUpto); docUpto = newDocUpto; // Force to read next block @@ -731,7 +731,7 @@ public DocsAndPositionsEnum Reset(IBits liveDocs, Lucene41PostingsWriter.Int32Bl private void RefillDocs() { int left = docFreq - docUpto; - if (Debugging.AssertsEnabled) Debugging.Assert(() => left > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(left > 0); if (left >= Lucene41PostingsFormat.BLOCK_SIZE) { @@ -875,7 +875,7 @@ public override int Advance(int target) if (!skipped) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => skipOffset != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(skipOffset != -1); // this is the first time this enum has skipped // since reset() was called; load the skip data: // if (DEBUG) { @@ -894,7 +894,7 @@ public override int Advance(int target) // System.out.println(" skipper moved to docUpto=" + newDocUpto + " vs current=" + docUpto + "; docID=" + skipper.getDoc() + " fp=" + skipper.getDocPointer() + " pos.fp=" + skipper.getPosPointer() + " pos.bufferUpto=" + skipper.getPosBufferUpto()); // } - if (Debugging.AssertsEnabled) Debugging.Assert(() => newDocUpto % Lucene41PostingsFormat.BLOCK_SIZE == 0, () => "got " + newDocUpto); + if (Debugging.AssertsEnabled) Debugging.Assert(newDocUpto % Lucene41PostingsFormat.BLOCK_SIZE == 0, () => "got " + newDocUpto); docUpto = newDocUpto; // Force to read next block @@ -983,7 +983,7 @@ private void SkipPositions() // if (DEBUG) { // System.out.println(" skip whole block @ fp=" + posIn.getFilePointer()); // } - if (Debugging.AssertsEnabled) Debugging.Assert(() => posIn.GetFilePointer() != lastPosBlockFP); + if (Debugging.AssertsEnabled) Debugging.Assert(posIn.GetFilePointer() != lastPosBlockFP); outerInstance.forUtil.SkipBlock(posIn); toSkip -= Lucene41PostingsFormat.BLOCK_SIZE; } @@ -1236,7 +1236,7 @@ public EverythingEnum Reset(IBits liveDocs, Lucene41PostingsWriter.Int32BlockTer private void RefillDocs() { int left = docFreq - docUpto; - if (Debugging.AssertsEnabled) Debugging.Assert(() => left > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(left > 0); if (left >= Lucene41PostingsFormat.BLOCK_SIZE) { @@ -1450,7 +1450,7 @@ public override int Advance(int target) if (!skipped) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => skipOffset != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(skipOffset != -1); // this is the first time this enum has skipped // since reset() was called; load the skip data: // if (DEBUG) { @@ -1468,7 +1468,7 @@ public override int Advance(int target) // if (DEBUG) { // System.out.println(" skipper moved to docUpto=" + newDocUpto + " vs current=" + docUpto + "; docID=" + skipper.getDoc() + " fp=" + skipper.getDocPointer() + " pos.fp=" + skipper.getPosPointer() + " pos.bufferUpto=" + skipper.getPosBufferUpto() + " pay.fp=" + skipper.getPayPointer() + " lastStartOffset=" + lastStartOffset); // } - if (Debugging.AssertsEnabled) Debugging.Assert(() => newDocUpto % Lucene41PostingsFormat.BLOCK_SIZE == 0, () => "got " + newDocUpto); + if (Debugging.AssertsEnabled) Debugging.Assert(newDocUpto % Lucene41PostingsFormat.BLOCK_SIZE == 0, () => "got " + newDocUpto); docUpto = newDocUpto; // Force to read next block @@ -1568,7 +1568,7 @@ private void SkipPositions() // if (DEBUG) { // System.out.println(" skip whole block @ fp=" + posIn.getFilePointer()); // } - if (Debugging.AssertsEnabled) Debugging.Assert(() => posIn.GetFilePointer() != lastPosBlockFP); + if (Debugging.AssertsEnabled) Debugging.Assert(posIn.GetFilePointer() != lastPosBlockFP); outerInstance.forUtil.SkipBlock(posIn); if (indexHasPayloads) diff --git a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsWriter.cs b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsWriter.cs index 305ab0d766..1b57a4e83f 100644 --- a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsWriter.cs +++ b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsWriter.cs @@ -379,8 +379,8 @@ public override void AddPosition(int position, BytesRef payload, int startOffset { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => startOffset >= lastStartOffset); - Debugging.Assert(() => endOffset >= startOffset); + Debugging.Assert(startOffset >= lastStartOffset); + Debugging.Assert(endOffset >= startOffset); } offsetStartDeltaBuffer[posBufferUpto] = startOffset - lastStartOffset; offsetLengthBuffer[posBufferUpto] = endOffset - startOffset; @@ -442,11 +442,11 @@ public override void FinishDoc() public override void FinishTerm(BlockTermState state) { Int32BlockTermState state2 = (Int32BlockTermState)state; - if (Debugging.AssertsEnabled) Debugging.Assert(() => state2.DocFreq > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(state2.DocFreq > 0); // TODO: wasteful we are counting this (counting # docs // for this term) in two places? - if (Debugging.AssertsEnabled) Debugging.Assert(() => state2.DocFreq == docCount, () => state2.DocFreq + " vs " + docCount); + if (Debugging.AssertsEnabled) Debugging.Assert(state2.DocFreq == docCount, () => state2.DocFreq + " vs " + docCount); // if (DEBUG) { // System.out.println("FPW.finishTerm docFreq=" + state2.docFreq); @@ -501,7 +501,7 @@ public override void FinishTerm(BlockTermState state) // totalTermFreq is just total number of positions(or payloads, or offsets) // associated with current term. - if (Debugging.AssertsEnabled) Debugging.Assert(() => state2.TotalTermFreq != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(state2.TotalTermFreq != -1); if (state2.TotalTermFreq > Lucene41PostingsFormat.BLOCK_SIZE) { // record file offset for last pos in last block @@ -579,7 +579,7 @@ public override void FinishTerm(BlockTermState state) if (fieldHasPayloads) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => payloadBytesReadUpto == payloadByteUpto); + if (Debugging.AssertsEnabled) Debugging.Assert(payloadBytesReadUpto == payloadByteUpto); payloadByteUpto = 0; } } diff --git a/src/Lucene.Net/Codecs/Lucene41/Lucene41SkipReader.cs b/src/Lucene.Net/Codecs/Lucene41/Lucene41SkipReader.cs index 16474f8d44..81d23b5437 100644 --- a/src/Lucene.Net/Codecs/Lucene41/Lucene41SkipReader.cs +++ b/src/Lucene.Net/Codecs/Lucene41/Lucene41SkipReader.cs @@ -129,7 +129,7 @@ public void Init(long skipPointer, long docBasePointer, long posBasePointer, lon } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => posBasePointer == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(posBasePointer == 0); } } diff --git a/src/Lucene.Net/Codecs/Lucene42/Lucene42NormsConsumer.cs b/src/Lucene.Net/Codecs/Lucene42/Lucene42NormsConsumer.cs index db8a5f6e1d..4eda6730fa 100644 --- a/src/Lucene.Net/Codecs/Lucene42/Lucene42NormsConsumer.cs +++ b/src/Lucene.Net/Codecs/Lucene42/Lucene42NormsConsumer.cs @@ -93,7 +93,7 @@ public override void AddNumericField(FieldInfo field, IEnumerable values) long count = 0; foreach (long? nv in values) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => nv != null); + if (Debugging.AssertsEnabled) Debugging.Assert(nv != null); long v = nv.Value; if (gcd != 1) @@ -127,7 +127,7 @@ public override void AddNumericField(FieldInfo field, IEnumerable values) ++count; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => count == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(count == maxDoc); } if (uniqueValues != null) diff --git a/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesConsumer.cs b/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesConsumer.cs index c8491c3364..5b32e5c18f 100644 --- a/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesConsumer.cs +++ b/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesConsumer.cs @@ -495,13 +495,13 @@ public override void AddSortedSetField(FieldInfo field, IEnumerable va } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => current == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(current == 1); ordsIter.MoveNext(); yield return ordsIter.Current; } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => !ordsIter.MoveNext()); + if (Debugging.AssertsEnabled) Debugging.Assert(!ordsIter.MoveNext()); } protected override void Dispose(bool disposing) diff --git a/src/Lucene.Net/Codecs/Lucene46/Lucene46FieldInfosWriter.cs b/src/Lucene.Net/Codecs/Lucene46/Lucene46FieldInfosWriter.cs index 5b692e8589..3449d17d01 100644 --- a/src/Lucene.Net/Codecs/Lucene46/Lucene46FieldInfosWriter.cs +++ b/src/Lucene.Net/Codecs/Lucene46/Lucene46FieldInfosWriter.cs @@ -73,7 +73,7 @@ public override void Write(Directory directory, string segmentName, string segme if (fi.IsIndexed) { bits |= Lucene46FieldInfosFormat.IS_INDEXED; - if (Debugging.AssertsEnabled) Debugging.Assert(() => indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); + if (Debugging.AssertsEnabled) Debugging.Assert(indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); if (indexOptions == IndexOptions.DOCS_ONLY) { bits |= Lucene46FieldInfosFormat.OMIT_TERM_FREQ_AND_POSITIONS; @@ -94,7 +94,7 @@ public override void Write(Directory directory, string segmentName, string segme // pack the DV types in one byte var dv = DocValuesByte(fi.DocValuesType); var nrm = DocValuesByte(fi.NormType); - if (Debugging.AssertsEnabled) Debugging.Assert(() => (dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0); + if (Debugging.AssertsEnabled) Debugging.Assert((dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0); var val = (byte)(0xff & ((nrm << 4) | (byte)dv)); output.WriteByte(val); output.WriteInt64(fi.DocValuesGen); diff --git a/src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs b/src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs index 4462d2fd74..1dff4f6db9 100644 --- a/src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs +++ b/src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs @@ -106,7 +106,7 @@ public override int NextDoc() current = subs[upto].DocsEnum; currentBase = mergeState.DocBase[reader]; currentMap = mergeState.DocMaps[reader]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentMap.MaxDoc == subs[upto].Slice.Length, () => "readerIndex=" + reader + " subs.len=" + subs.Length + " len1=" + currentMap.MaxDoc + " vs " + subs[upto].Slice.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(currentMap.MaxDoc == subs[upto].Slice.Length, () => "readerIndex=" + reader + " subs.len=" + subs.Length + " len1=" + currentMap.MaxDoc + " vs " + subs[upto].Slice.Length); } } diff --git a/src/Lucene.Net/Codecs/MultiLevelSkipListReader.cs b/src/Lucene.Net/Codecs/MultiLevelSkipListReader.cs index 40a675b8fb..d2df4fcde8 100644 --- a/src/Lucene.Net/Codecs/MultiLevelSkipListReader.cs +++ b/src/Lucene.Net/Codecs/MultiLevelSkipListReader.cs @@ -253,7 +253,7 @@ public virtual void Init(long skipPointer, int df) { this.skipPointer[0] = skipPointer; this.docCount = df; - if (Debugging.AssertsEnabled) Debugging.Assert(() => skipPointer >= 0 && skipPointer <= skipStream[0].Length, () => "invalid skip pointer: " + skipPointer + ", length=" + skipStream[0].Length); + if (Debugging.AssertsEnabled) Debugging.Assert(skipPointer >= 0 && skipPointer <= skipStream[0].Length, () => "invalid skip pointer: " + skipPointer + ", length=" + skipStream[0].Length); Array.Clear(m_skipDoc, 0, m_skipDoc.Length); Array.Clear(numSkipped, 0, numSkipped.Length); Array.Clear(childPointer, 0, childPointer.Length); diff --git a/src/Lucene.Net/Codecs/MultiLevelSkipListWriter.cs b/src/Lucene.Net/Codecs/MultiLevelSkipListWriter.cs index 81713c72b8..b234a5b2f2 100644 --- a/src/Lucene.Net/Codecs/MultiLevelSkipListWriter.cs +++ b/src/Lucene.Net/Codecs/MultiLevelSkipListWriter.cs @@ -148,7 +148,7 @@ public virtual void ResetSkip() /// If an I/O error occurs. public virtual void BufferSkip(int df) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => df % skipInterval == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(df % skipInterval == 0); int numLevels = 1; df /= skipInterval; diff --git a/src/Lucene.Net/Codecs/PerField/PerFieldDocValuesFormat.cs b/src/Lucene.Net/Codecs/PerField/PerFieldDocValuesFormat.cs index 4ef99e0b12..1d2f2838d3 100644 --- a/src/Lucene.Net/Codecs/PerField/PerFieldDocValuesFormat.cs +++ b/src/Lucene.Net/Codecs/PerField/PerFieldDocValuesFormat.cs @@ -155,7 +155,7 @@ internal virtual DocValuesConsumer GetInstance(FieldInfo field) string formatName_ = format.Name; string previousValue = field.PutAttribute(PER_FIELD_FORMAT_KEY, formatName_); - if (Debugging.AssertsEnabled) Debugging.Assert(() => field.DocValuesGen != -1 || previousValue == null, () => "formatName=" + formatName_ + " prevValue=" + previousValue); + if (Debugging.AssertsEnabled) Debugging.Assert(field.DocValuesGen != -1 || previousValue == null, () => "formatName=" + formatName_ + " prevValue=" + previousValue); int? suffix = null; @@ -199,12 +199,12 @@ internal virtual DocValuesConsumer GetInstance(FieldInfo field) else { // we've already seen this format, so just grab its suffix - if (Debugging.AssertsEnabled) Debugging.Assert(() => suffixes.ContainsKey(formatName_)); + if (Debugging.AssertsEnabled) Debugging.Assert(suffixes.ContainsKey(formatName_)); suffix = consumer.Suffix; } previousValue = field.PutAttribute(PER_FIELD_SUFFIX_KEY, Convert.ToString(suffix, CultureInfo.InvariantCulture)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => field.DocValuesGen != -1 || previousValue == null, () => "suffix=" + Convert.ToString(suffix, CultureInfo.InvariantCulture) + " prevValue=" + previousValue); + if (Debugging.AssertsEnabled) Debugging.Assert(field.DocValuesGen != -1 || previousValue == null, () => "suffix=" + Convert.ToString(suffix, CultureInfo.InvariantCulture) + " prevValue=" + previousValue); // TODO: we should only provide the "slice" of FIS // that this DVF actually sees ... @@ -265,7 +265,7 @@ public FieldsReader(PerFieldDocValuesFormat outerInstance, SegmentReadState read { // null formatName means the field is in fieldInfos, but has no docvalues! string suffix = fi.GetAttribute(PER_FIELD_SUFFIX_KEY); - if (Debugging.AssertsEnabled) Debugging.Assert(() => suffix != null); + if (Debugging.AssertsEnabled) Debugging.Assert(suffix != null); DocValuesFormat format = DocValuesFormat.ForName(formatName); string segmentSuffix = GetFullSegmentSuffix(readState.SegmentSuffix, GetSuffix(formatName, suffix)); // LUCENENET: Eliminated extra lookup by using TryGetValue instead of ContainsKey @@ -306,7 +306,7 @@ internal FieldsReader(PerFieldDocValuesFormat outerInstance, FieldsReader other) { DocValuesProducer producer; oldToNew.TryGetValue(ent.Value, out producer); - if (Debugging.AssertsEnabled) Debugging.Assert(() => producer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(producer != null); fields[ent.Key] = producer; } } diff --git a/src/Lucene.Net/Codecs/PerField/PerFieldPostingsFormat.cs b/src/Lucene.Net/Codecs/PerField/PerFieldPostingsFormat.cs index 4f3a7ea473..64c046a482 100644 --- a/src/Lucene.Net/Codecs/PerField/PerFieldPostingsFormat.cs +++ b/src/Lucene.Net/Codecs/PerField/PerFieldPostingsFormat.cs @@ -116,7 +116,7 @@ public override TermsConsumer AddField(FieldInfo field) string formatName = format.Name; string previousValue = field.PutAttribute(PER_FIELD_FORMAT_KEY, formatName); - if (Debugging.AssertsEnabled) Debugging.Assert(() => previousValue == null); + if (Debugging.AssertsEnabled) Debugging.Assert(previousValue == null); int? suffix; @@ -147,12 +147,12 @@ public override TermsConsumer AddField(FieldInfo field) else { // we've already seen this format, so just grab its suffix - if (Debugging.AssertsEnabled) Debugging.Assert(() => suffixes.ContainsKey(formatName)); + if (Debugging.AssertsEnabled) Debugging.Assert(suffixes.ContainsKey(formatName)); suffix = consumer.Suffix; } previousValue = field.PutAttribute(PER_FIELD_SUFFIX_KEY, Convert.ToString(suffix, CultureInfo.InvariantCulture)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => previousValue == null); + if (Debugging.AssertsEnabled) Debugging.Assert(previousValue == null); // TODO: we should only provide the "slice" of FIS // that this PF actually sees ... then stuff like @@ -219,7 +219,7 @@ public FieldsReader(PerFieldPostingsFormat outerInstance, SegmentReadState readS { // null formatName means the field is in fieldInfos, but has no postings! string suffix = fi.GetAttribute(PER_FIELD_SUFFIX_KEY); - if (Debugging.AssertsEnabled) Debugging.Assert(() => suffix != null); + if (Debugging.AssertsEnabled) Debugging.Assert(suffix != null); PostingsFormat format = PostingsFormat.ForName(formatName); string segmentSuffix = GetSuffix(formatName, suffix); // LUCENENET: Eliminated extra lookup by using TryGetValue instead of ContainsKey diff --git a/src/Lucene.Net/Codecs/PostingsConsumer.cs b/src/Lucene.Net/Codecs/PostingsConsumer.cs index 3213742a16..9e11d1ecce 100644 --- a/src/Lucene.Net/Codecs/PostingsConsumer.cs +++ b/src/Lucene.Net/Codecs/PostingsConsumer.cs @@ -151,7 +151,7 @@ public virtual TermStats Merge(MergeState mergeState, IndexOptions indexOptions, } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); + if (Debugging.AssertsEnabled) Debugging.Assert(indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); var postingsEnum = (DocsAndPositionsEnum)postings; while (true) { diff --git a/src/Lucene.Net/Codecs/TermVectorsWriter.cs b/src/Lucene.Net/Codecs/TermVectorsWriter.cs index 0f1ac1131e..3dcfbdbfe4 100644 --- a/src/Lucene.Net/Codecs/TermVectorsWriter.cs +++ b/src/Lucene.Net/Codecs/TermVectorsWriter.cs @@ -286,7 +286,7 @@ protected void AddAllDocVectors(Fields vectors, MergeState mergeState) fieldCount++; FieldInfo fieldInfo = mergeState.FieldInfos.FieldInfo(fieldName); - if (Debugging.AssertsEnabled) Debugging.Assert(() => lastFieldName == null || fieldName.CompareToOrdinal(lastFieldName) > 0, () => "lastFieldName=" + lastFieldName + " fieldName=" + fieldName); + if (Debugging.AssertsEnabled) Debugging.Assert(lastFieldName == null || fieldName.CompareToOrdinal(lastFieldName) > 0, () => "lastFieldName=" + lastFieldName + " fieldName=" + fieldName); lastFieldName = fieldName; Terms terms = vectors.GetTerms(fieldName); @@ -299,7 +299,7 @@ protected void AddAllDocVectors(Fields vectors, MergeState mergeState) bool hasPositions = terms.HasPositions; bool hasOffsets = terms.HasOffsets; bool hasPayloads = terms.HasPayloads; - if (Debugging.AssertsEnabled) Debugging.Assert(() => !hasPayloads || hasPositions); + if (Debugging.AssertsEnabled) Debugging.Assert(!hasPayloads || hasPositions); int numTerms = (int)terms.Count; if (numTerms == -1) @@ -328,13 +328,13 @@ protected void AddAllDocVectors(Fields vectors, MergeState mergeState) if (hasPositions || hasOffsets) { docsAndPositionsEnum = termsEnum.DocsAndPositions(null, docsAndPositionsEnum); - if (Debugging.AssertsEnabled) Debugging.Assert(() => docsAndPositionsEnum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(docsAndPositionsEnum != null); int docID = docsAndPositionsEnum.NextDoc(); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => docID != DocIdSetIterator.NO_MORE_DOCS); - Debugging.Assert(() => docsAndPositionsEnum.Freq == freq); + Debugging.Assert(docID != DocIdSetIterator.NO_MORE_DOCS); + Debugging.Assert(docsAndPositionsEnum.Freq == freq); } for (int posUpto = 0; posUpto < freq; posUpto++) @@ -345,16 +345,16 @@ protected void AddAllDocVectors(Fields vectors, MergeState mergeState) BytesRef payload = docsAndPositionsEnum.GetPayload(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => !hasPositions || pos >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(!hasPositions || pos >= 0); AddPosition(pos, startOffset, endOffset, payload); } } FinishTerm(); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => termCount == numTerms); + if (Debugging.AssertsEnabled) Debugging.Assert(termCount == numTerms); FinishField(); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldCount == numFields); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldCount == numFields); FinishDocument(); } diff --git a/src/Lucene.Net/Codecs/TermsConsumer.cs b/src/Lucene.Net/Codecs/TermsConsumer.cs index 926f92e562..6904fd7a14 100644 --- a/src/Lucene.Net/Codecs/TermsConsumer.cs +++ b/src/Lucene.Net/Codecs/TermsConsumer.cs @@ -101,7 +101,7 @@ protected internal TermsConsumer() public virtual void Merge(MergeState mergeState, IndexOptions indexOptions, TermsEnum termsEnum) { BytesRef term; - if (Debugging.AssertsEnabled) Debugging.Assert(() => termsEnum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(termsEnum != null); long sumTotalTermFreq = 0; long sumDocFreq = 0; long sumDFsinceLastAbortCheck = 0; @@ -157,7 +157,7 @@ public virtual void Merge(MergeState mergeState, IndexOptions indexOptions, Term // We can pass null for liveDocs, because the // mapping enum will skip the non-live docs: docsAndFreqsEnumIn = (MultiDocsEnum)termsEnum.Docs(null, docsAndFreqsEnumIn); - if (Debugging.AssertsEnabled) Debugging.Assert(() => docsAndFreqsEnumIn != null); + if (Debugging.AssertsEnabled) Debugging.Assert(docsAndFreqsEnumIn != null); docsAndFreqsEnum.Reset(docsAndFreqsEnumIn); PostingsConsumer postingsConsumer = StartTerm(term); TermStats stats = postingsConsumer.Merge(mergeState, indexOptions, docsAndFreqsEnum, visitedDocs); @@ -188,7 +188,7 @@ public virtual void Merge(MergeState mergeState, IndexOptions indexOptions, Term // We can pass null for liveDocs, because the // mapping enum will skip the non-live docs: postingsEnumIn = (MultiDocsAndPositionsEnum)termsEnum.DocsAndPositions(null, postingsEnumIn, DocsAndPositionsFlags.PAYLOADS); - if (Debugging.AssertsEnabled) Debugging.Assert(() => postingsEnumIn != null); + if (Debugging.AssertsEnabled) Debugging.Assert(postingsEnumIn != null); postingsEnum.Reset(postingsEnumIn); PostingsConsumer postingsConsumer = StartTerm(term); @@ -209,7 +209,7 @@ public virtual void Merge(MergeState mergeState, IndexOptions indexOptions, Term } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); + if (Debugging.AssertsEnabled) Debugging.Assert(indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); if (postingsEnum == null) { postingsEnum = new MappingMultiDocsAndPositionsEnum(); @@ -221,7 +221,7 @@ public virtual void Merge(MergeState mergeState, IndexOptions indexOptions, Term // We can pass null for liveDocs, because the // mapping enum will skip the non-live docs: postingsEnumIn = (MultiDocsAndPositionsEnum)termsEnum.DocsAndPositions(null, postingsEnumIn); - if (Debugging.AssertsEnabled) Debugging.Assert(() => postingsEnumIn != null); + if (Debugging.AssertsEnabled) Debugging.Assert(postingsEnumIn != null); postingsEnum.Reset(postingsEnumIn); PostingsConsumer postingsConsumer = StartTerm(term); diff --git a/src/Lucene.Net/Index/AtomicReader.cs b/src/Lucene.Net/Index/AtomicReader.cs index d890e7f8d8..d57d08a853 100644 --- a/src/Lucene.Net/Index/AtomicReader.cs +++ b/src/Lucene.Net/Index/AtomicReader.cs @@ -213,8 +213,8 @@ public DocsEnum GetTermDocsEnum(Term term) // LUCENENET specific: Renamed from T { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => term.Field != null); - Debugging.Assert(() => term.Bytes != null); + Debugging.Assert(term.Field != null); + Debugging.Assert(term.Bytes != null); } Fields fields = Fields; if (fields != null) @@ -239,8 +239,8 @@ public DocsEnum GetTermDocsEnum(Term term) // LUCENENET specific: Renamed from T /// public DocsAndPositionsEnum GetTermPositionsEnum(Term term) // LUCENENET specific: Renamed from TermPositionsEnum() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => term.Field != null); - if (Debugging.AssertsEnabled) Debugging.Assert(() => term.Bytes != null); + if (Debugging.AssertsEnabled) Debugging.Assert(term.Field != null); + if (Debugging.AssertsEnabled) Debugging.Assert(term.Bytes != null); Fields fields = Fields; if (fields != null) { diff --git a/src/Lucene.Net/Index/AtomicReaderContext.cs b/src/Lucene.Net/Index/AtomicReaderContext.cs index 28caa90224..ffa83bfed0 100644 --- a/src/Lucene.Net/Index/AtomicReaderContext.cs +++ b/src/Lucene.Net/Index/AtomicReaderContext.cs @@ -63,7 +63,7 @@ public override IList Leaves { throw new NotSupportedException("this is not a top-level context."); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => leaves != null); + if (Debugging.AssertsEnabled) Debugging.Assert(leaves != null); return leaves; } } diff --git a/src/Lucene.Net/Index/AutomatonTermsEnum.cs b/src/Lucene.Net/Index/AutomatonTermsEnum.cs index 88978fe709..b844fd45ba 100644 --- a/src/Lucene.Net/Index/AutomatonTermsEnum.cs +++ b/src/Lucene.Net/Index/AutomatonTermsEnum.cs @@ -91,7 +91,7 @@ public AutomatonTermsEnum(TermsEnum tenum, CompiledAutomaton compiled) { this.finite = compiled.Finite; this.runAutomaton = compiled.RunAutomaton; - if (Debugging.AssertsEnabled) Debugging.Assert(() => this.runAutomaton != null); + if (Debugging.AssertsEnabled) Debugging.Assert(this.runAutomaton != null); this.commonSuffixRef = compiled.CommonSuffixRef; this.allTransitions = compiled.SortedTransitions; @@ -129,7 +129,7 @@ protected override BytesRef NextSeekTerm(BytesRef term) //System.out.println("ATE.nextSeekTerm term=" + term); if (term == null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => seekBytesRef.Length == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(seekBytesRef.Length == 0); // return the empty term, as its valid if (runAutomaton.IsAccept(runAutomaton.InitialState)) { @@ -159,14 +159,14 @@ protected override BytesRef NextSeekTerm(BytesRef term) /// private void SetLinear(int position) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => linear == false); + if (Debugging.AssertsEnabled) Debugging.Assert(linear == false); int state = runAutomaton.InitialState; int maxInterval = 0xff; for (int i = 0; i < position; i++) { state = runAutomaton.Step(state, seekBytesRef.Bytes[i] & 0xff); - if (Debugging.AssertsEnabled) Debugging.Assert(() => state >= 0, () => "state=" + state); + if (Debugging.AssertsEnabled) Debugging.Assert(state >= 0, () => "state=" + state); } for (int i = 0; i < allTransitions[state].Length; i++) { diff --git a/src/Lucene.Net/Index/BitsSlice.cs b/src/Lucene.Net/Index/BitsSlice.cs index 1b3caa3956..9545b6e599 100644 --- a/src/Lucene.Net/Index/BitsSlice.cs +++ b/src/Lucene.Net/Index/BitsSlice.cs @@ -40,7 +40,7 @@ public BitsSlice(IBits parent, ReaderSlice slice) this.parent = parent; this.start = slice.Start; this.length = slice.Length; - if (Debugging.AssertsEnabled) Debugging.Assert(() => length >= 0, () => "length=" + length); + if (Debugging.AssertsEnabled) Debugging.Assert(length >= 0, () => "length=" + length); } public bool Get(int doc) @@ -49,7 +49,7 @@ public bool Get(int doc) { throw new Exception("doc " + doc + " is out of bounds 0 .. " + (length - 1)); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => doc < length, () => "doc=" + doc + " length=" + length); + if (Debugging.AssertsEnabled) Debugging.Assert(doc < length, () => "doc=" + doc + " length=" + length); return parent.Get(doc + start); } diff --git a/src/Lucene.Net/Index/BufferedUpdatesStream.cs b/src/Lucene.Net/Index/BufferedUpdatesStream.cs index 58acdd26e2..f329ad23e0 100644 --- a/src/Lucene.Net/Index/BufferedUpdatesStream.cs +++ b/src/Lucene.Net/Index/BufferedUpdatesStream.cs @@ -90,10 +90,10 @@ public virtual long Push(FrozenBufferedUpdates packet) packet.DelGen = nextGen++; if (Debugging.AssertsEnabled) { - Debugging.Assert(packet.Any); - Debugging.Assert(CheckDeleteStats); - Debugging.Assert(() => packet.DelGen < nextGen); - Debugging.Assert(() => updates.Count == 0 || updates[updates.Count - 1].DelGen < packet.DelGen, () => "Delete packets must be in order"); + Debugging.Assert(packet.Any()); + Debugging.Assert(CheckDeleteStats()); + Debugging.Assert(packet.DelGen < nextGen); + Debugging.Assert(updates.Count == 0 || updates[updates.Count - 1].DelGen < packet.DelGen, () => "Delete packets must be in order"); } updates.Add(packet); numTerms.AddAndGet(packet.numTermDeletes); @@ -102,7 +102,7 @@ public virtual long Push(FrozenBufferedUpdates packet) { infoStream.Message("BD", "push deletes " + packet + " delGen=" + packet.DelGen + " packetCount=" + updates.Count + " totBytesUsed=" + bytesUsed); } - if (Debugging.AssertsEnabled) Debugging.Assert(CheckDeleteStats); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckDeleteStats()); return packet.DelGen; } } @@ -178,7 +178,7 @@ public virtual ApplyDeletesResult ApplyDeletesAndUpdates(IndexWriter.ReaderPool return new ApplyDeletesResult(false, nextGen++, null); } - if (Debugging.AssertsEnabled) Debugging.Assert(CheckDeleteStats); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckDeleteStats()); if (!Any()) { @@ -239,11 +239,11 @@ public virtual ApplyDeletesResult ApplyDeletesAndUpdates(IndexWriter.ReaderPool } else if (packet != null && segGen == packet.DelGen) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => packet.isSegmentPrivate, () => "Packet and Segments deletegen can only match on a segment private del packet gen=" + segGen); + if (Debugging.AssertsEnabled) Debugging.Assert(packet.isSegmentPrivate, () => "Packet and Segments deletegen can only match on a segment private del packet gen=" + segGen); //System.out.println(" eq"); // Lock order: IW -> BD -> RP - if (Debugging.AssertsEnabled) Debugging.Assert(() => readerPool.InfoIsLive(info)); + if (Debugging.AssertsEnabled) Debugging.Assert(readerPool.InfoIsLive(info)); ReadersAndUpdates rld = readerPool.Get(info, true); SegmentReader reader = rld.GetReader(IOContext.READ); int delCount = 0; @@ -270,7 +270,7 @@ public virtual ApplyDeletesResult ApplyDeletesAndUpdates(IndexWriter.ReaderPool rld.WriteFieldUpdates(info.Info.Dir, dvUpdates); } int fullDelCount = rld.Info.DelCount + rld.PendingDeleteCount; - if (Debugging.AssertsEnabled) Debugging.Assert(() => fullDelCount <= rld.Info.Info.DocCount); + if (Debugging.AssertsEnabled) Debugging.Assert(fullDelCount <= rld.Info.Info.DocCount); segAllDeletes = fullDelCount == rld.Info.Info.DocCount; } finally @@ -315,7 +315,7 @@ public virtual ApplyDeletesResult ApplyDeletesAndUpdates(IndexWriter.ReaderPool if (coalescedUpdates != null) { // Lock order: IW -> BD -> RP - if (Debugging.AssertsEnabled) Debugging.Assert(() => readerPool.InfoIsLive(info)); + if (Debugging.AssertsEnabled) Debugging.Assert(readerPool.InfoIsLive(info)); ReadersAndUpdates rld = readerPool.Get(info, true); SegmentReader reader = rld.GetReader(IOContext.READ); int delCount = 0; @@ -332,7 +332,7 @@ public virtual ApplyDeletesResult ApplyDeletesAndUpdates(IndexWriter.ReaderPool rld.WriteFieldUpdates(info.Info.Dir, dvUpdates); } int fullDelCount = rld.Info.DelCount + rld.PendingDeleteCount; - if (Debugging.AssertsEnabled) Debugging.Assert(() => fullDelCount <= rld.Info.Info.DocCount); + if (Debugging.AssertsEnabled) Debugging.Assert(fullDelCount <= rld.Info.Info.DocCount); segAllDeletes = fullDelCount == rld.Info.Info.DocCount; } finally @@ -362,7 +362,7 @@ public virtual ApplyDeletesResult ApplyDeletesAndUpdates(IndexWriter.ReaderPool } } - if (Debugging.AssertsEnabled) Debugging.Assert(CheckDeleteStats); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckDeleteStats()); if (infoStream.IsEnabled("BD")) { infoStream.Message("BD", "applyDeletes took " + (Environment.TickCount - t0) + " msec"); @@ -392,7 +392,7 @@ public virtual void Prune(SegmentInfos segmentInfos) { lock (this) { - if (Debugging.AssertsEnabled) Debugging.Assert(CheckDeleteStats); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckDeleteStats()); long minGen = long.MaxValue; foreach (SegmentCommitInfo info in segmentInfos.Segments) { @@ -409,7 +409,7 @@ public virtual void Prune(SegmentInfos segmentInfos) if (updates[delIDX].DelGen >= minGen) { Prune(delIDX); - if (Debugging.AssertsEnabled) Debugging.Assert(CheckDeleteStats); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckDeleteStats()); return; } } @@ -418,8 +418,8 @@ public virtual void Prune(SegmentInfos segmentInfos) Prune(limit); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => !Any()); - Debugging.Assert(CheckDeleteStats); + Debugging.Assert(!Any()); + Debugging.Assert(CheckDeleteStats()); } } } @@ -438,9 +438,9 @@ private void Prune(int count) { FrozenBufferedUpdates packet = updates[delIDX]; numTerms.AddAndGet(-packet.numTermDeletes); - if (Debugging.AssertsEnabled) Debugging.Assert(() => numTerms >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(numTerms >= 0); bytesUsed.AddAndGet(-packet.bytesUsed); - if (Debugging.AssertsEnabled) Debugging.Assert(() => bytesUsed >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(bytesUsed >= 0); } updates.SubList(0, count).Clear(); } @@ -465,7 +465,7 @@ private long ApplyTermDeletes(IEnumerable termsIter, ReadersAndUpdates rld string currentField = null; DocsEnum docs = null; - if (Debugging.AssertsEnabled) Debugging.Assert(() => CheckDeleteTerm(null)); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckDeleteTerm(null)); bool any = false; @@ -477,7 +477,7 @@ private long ApplyTermDeletes(IEnumerable termsIter, ReadersAndUpdates rld // forwards if (!string.Equals(term.Field, currentField, StringComparison.Ordinal)) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentField == null || currentField.CompareToOrdinal(term.Field) < 0); + if (Debugging.AssertsEnabled) Debugging.Assert(currentField == null || currentField.CompareToOrdinal(term.Field) < 0); currentField = term.Field; Terms terms = fields.GetTerms(currentField); if (terms != null) @@ -494,7 +494,7 @@ private long ApplyTermDeletes(IEnumerable termsIter, ReadersAndUpdates rld { continue; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => CheckDeleteTerm(term)); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckDeleteTerm(term)); // System.out.println(" term=" + term); @@ -688,7 +688,7 @@ private bool CheckDeleteTerm(Term term) { if (term != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => lastDeleteTerm == null || term.CompareTo(lastDeleteTerm) > 0, () => "lastTerm=" + lastDeleteTerm + " vs term=" + term); + if (Debugging.AssertsEnabled) Debugging.Assert(lastDeleteTerm == null || term.CompareTo(lastDeleteTerm) > 0, () => "lastTerm=" + lastDeleteTerm + " vs term=" + term); } // TODO: we re-use term now in our merged iterable, but we shouldn't clone, instead copy for this assert lastDeleteTerm = term == null ? null : new Term(term.Field, BytesRef.DeepCopyOf(term.Bytes)); @@ -707,8 +707,8 @@ private bool CheckDeleteStats() } if (Debugging.AssertsEnabled) { - Debugging.Assert(() => numTerms2 == numTerms, () => "numTerms2=" + numTerms2 + " vs " + numTerms); - Debugging.Assert(() => bytesUsed2 == bytesUsed, () => "bytesUsed2=" + bytesUsed2 + " vs " + bytesUsed); + Debugging.Assert(numTerms2 == numTerms, () => "numTerms2=" + numTerms2 + " vs " + numTerms); + Debugging.Assert(bytesUsed2 == bytesUsed, () => "bytesUsed2=" + bytesUsed2 + " vs " + bytesUsed); } return true; } diff --git a/src/Lucene.Net/Index/ByteSliceReader.cs b/src/Lucene.Net/Index/ByteSliceReader.cs index 961f9731f9..fbfbccff93 100644 --- a/src/Lucene.Net/Index/ByteSliceReader.cs +++ b/src/Lucene.Net/Index/ByteSliceReader.cs @@ -50,9 +50,9 @@ public void Init(ByteBlockPool pool, int startIndex, int endIndex) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => endIndex - startIndex >= 0); - Debugging.Assert(() => startIndex >= 0); - Debugging.Assert(() => endIndex >= 0); + Debugging.Assert(endIndex - startIndex >= 0); + Debugging.Assert(startIndex >= 0); + Debugging.Assert(endIndex >= 0); } this.pool = pool; @@ -79,7 +79,7 @@ public void Init(ByteBlockPool pool, int startIndex, int endIndex) public bool Eof() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => upto + BufferOffset <= EndIndex); + if (Debugging.AssertsEnabled) Debugging.Assert(upto + BufferOffset <= EndIndex); return upto + BufferOffset == EndIndex; } @@ -87,8 +87,8 @@ public override byte ReadByte() { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => !Eof()); - Debugging.Assert(() => upto <= limit); + Debugging.Assert(!Eof()); + Debugging.Assert(upto <= limit); } if (upto == limit) { @@ -104,7 +104,7 @@ public long WriteTo(DataOutput @out) { if (limit + BufferOffset == EndIndex) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => EndIndex - BufferOffset >= upto); + if (Debugging.AssertsEnabled) Debugging.Assert(EndIndex - BufferOffset >= upto); @out.WriteBytes(buffer, upto, limit - upto); size += limit - upto; break; @@ -137,7 +137,7 @@ public void NextSlice() if (nextIndex + newSize >= EndIndex) { // We are advancing to the final slice - if (Debugging.AssertsEnabled) Debugging.Assert(() => EndIndex - nextIndex > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(EndIndex - nextIndex > 0); limit = EndIndex - BufferOffset; } else diff --git a/src/Lucene.Net/Index/ByteSliceWriter.cs b/src/Lucene.Net/Index/ByteSliceWriter.cs index d242d1496e..d7a9f86557 100644 --- a/src/Lucene.Net/Index/ByteSliceWriter.cs +++ b/src/Lucene.Net/Index/ByteSliceWriter.cs @@ -47,26 +47,26 @@ public ByteSliceWriter(ByteBlockPool pool) public void Init(int address) { slice = pool.Buffers[address >> ByteBlockPool.BYTE_BLOCK_SHIFT]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => slice != null); + if (Debugging.AssertsEnabled) Debugging.Assert(slice != null); upto = address & ByteBlockPool.BYTE_BLOCK_MASK; offset0 = address; - if (Debugging.AssertsEnabled) Debugging.Assert(() => upto < slice.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(upto < slice.Length); } /// /// Write byte into byte slice stream public override void WriteByte(byte b) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => slice != null); + if (Debugging.AssertsEnabled) Debugging.Assert(slice != null); if (slice[upto] != 0) { upto = pool.AllocSlice(slice, upto); slice = pool.Buffer; offset0 = pool.ByteOffset; - if (Debugging.AssertsEnabled) Debugging.Assert(() => slice != null); + if (Debugging.AssertsEnabled) Debugging.Assert(slice != null); } slice[upto++] = (byte)b; - if (Debugging.AssertsEnabled) Debugging.Assert(() => upto != slice.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(upto != slice.Length); } public override void WriteBytes(byte[] b, int offset, int len) @@ -83,7 +83,7 @@ public override void WriteBytes(byte[] b, int offset, int len) } slice[upto++] = (byte)b[offset++]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => upto != slice.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(upto != slice.Length); } } diff --git a/src/Lucene.Net/Index/CheckIndex.cs b/src/Lucene.Net/Index/CheckIndex.cs index db614b59c7..135f87a64b 100644 --- a/src/Lucene.Net/Index/CheckIndex.cs +++ b/src/Lucene.Net/Index/CheckIndex.cs @@ -949,7 +949,7 @@ public static Status.FieldNormStatus TestFieldNorms(AtomicReader reader, TextWri if (info.HasNorms) { #pragma warning disable 612, 618 - if (Debugging.AssertsEnabled) Debugging.Assert(() => reader.HasNorms(info.Name)); // deprecated path + if (Debugging.AssertsEnabled) Debugging.Assert(reader.HasNorms(info.Name)); // deprecated path #pragma warning restore 612, 618 CheckNorms(info, reader, infoStream); ++status.TotFields; @@ -957,7 +957,7 @@ public static Status.FieldNormStatus TestFieldNorms(AtomicReader reader, TextWri else { #pragma warning disable 612, 618 - if (Debugging.AssertsEnabled) Debugging.Assert(() => !reader.HasNorms(info.Name)); // deprecated path + if (Debugging.AssertsEnabled) Debugging.Assert(!reader.HasNorms(info.Name)); // deprecated path #pragma warning restore 612, 618 if (reader.GetNormValues(info.Name) != null) { @@ -1102,7 +1102,7 @@ private static Status.TermIndexStatus CheckFields(Fields fields, IBits liveDocs, break; } - if (Debugging.AssertsEnabled) Debugging.Assert(term.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(term.IsValid()); // make sure terms arrive in order according to // the comp @@ -1238,7 +1238,7 @@ private static Status.TermIndexStatus CheckFields(Fields fields, IBits liveDocs, // LUCENENET specific - restructured to reduce number of checks in production if (!(payload is null)) { - if (Debugging.AssertsEnabled) Debugging.Assert(payload.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(payload.IsValid()); if (payload.Length < 1) { throw new Exception("term " + term + ": doc " + doc + ": pos " + pos + " payload length is out of bounds " + payload.Length); @@ -1456,7 +1456,7 @@ private static Status.TermIndexStatus CheckFields(Fields fields, IBits liveDocs, if (fieldTerms is BlockTreeTermsReader.FieldReader) { BlockTreeTermsReader.Stats stats = ((BlockTreeTermsReader.FieldReader)fieldTerms).ComputeStats(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => stats != null); + if (Debugging.AssertsEnabled) Debugging.Assert(stats != null); if (status.BlockTreeStats == null) { status.BlockTreeStats = new Dictionary(); @@ -1811,7 +1811,7 @@ private static void CheckBinaryDocValues(string fieldName, AtomicReader reader, for (int i = 0; i < reader.MaxDoc; i++) { dv.Get(i, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(scratch.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(scratch.IsValid()); if (docsWithField.Get(i) == false && scratch.Length > 0) { throw new Exception("dv for field: " + fieldName + " is missing but has value=" + scratch + " for doc: " + i); @@ -1862,7 +1862,7 @@ private static void CheckSortedDocValues(string fieldName, AtomicReader reader, for (int i = 0; i <= maxOrd; i++) { dv.LookupOrd(i, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(scratch.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(scratch.IsValid()); if (lastValue != null) { if (scratch.CompareTo(lastValue) <= 0) @@ -1954,7 +1954,7 @@ private static void CheckSortedSetDocValues(string fieldName, AtomicReader reade for (long i = 0; i <= maxOrd; i++) { dv.LookupOrd(i, scratch); - if (Debugging.AssertsEnabled) Debugging.Assert(scratch.IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(scratch.IsValid()); if (lastValue != null) { if (scratch.CompareTo(lastValue) <= 0) @@ -2157,25 +2157,25 @@ public static Status.TermVectorStatus TestTermVectors(AtomicReader reader, TextW if (hasProx) { postings = termsEnum.DocsAndPositions(null, postings); - if (Debugging.AssertsEnabled) Debugging.Assert(() => postings != null); + if (Debugging.AssertsEnabled) Debugging.Assert(postings != null); docs = null; } else { docs = termsEnum.Docs(null, docs); - if (Debugging.AssertsEnabled) Debugging.Assert(() => docs != null); + if (Debugging.AssertsEnabled) Debugging.Assert(docs != null); postings = null; } DocsEnum docs2; if (hasProx) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => postings != null); + if (Debugging.AssertsEnabled) Debugging.Assert(postings != null); docs2 = postings; } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docs != null); + if (Debugging.AssertsEnabled) Debugging.Assert(docs != null); docs2 = docs; } @@ -2273,12 +2273,12 @@ public static Status.TermVectorStatus TestTermVectors(AtomicReader reader, TextW if (payload != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => vectorsHasPayload); + if (Debugging.AssertsEnabled) Debugging.Assert(vectorsHasPayload); } if (postingsHasPayload && vectorsHasPayload) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => postingsPostings != null); + if (Debugging.AssertsEnabled) Debugging.Assert(postingsPostings != null); if (payload == null) { diff --git a/src/Lucene.Net/Index/CompositeReader.cs b/src/Lucene.Net/Index/CompositeReader.cs index d22e6c2fcf..0c90f60802 100644 --- a/src/Lucene.Net/Index/CompositeReader.cs +++ b/src/Lucene.Net/Index/CompositeReader.cs @@ -83,7 +83,7 @@ public override string ToString() } buffer.Append('('); var subReaders = GetSequentialSubReaders(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => subReaders != null); + if (Debugging.AssertsEnabled) Debugging.Assert(subReaders != null); if (subReaders.Count > 0) { buffer.Append(subReaders[0]); @@ -115,7 +115,7 @@ public override sealed IndexReaderContext Context // lazy init without thread safety for perf reasons: Building the readerContext twice does not hurt! if (readerContext == null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => GetSequentialSubReaders() != null); + if (Debugging.AssertsEnabled) Debugging.Assert(GetSequentialSubReaders() != null); readerContext = CompositeReaderContext.Create(this); } return readerContext; diff --git a/src/Lucene.Net/Index/CompositeReaderContext.cs b/src/Lucene.Net/Index/CompositeReaderContext.cs index 18adfac759..f207ac037a 100644 --- a/src/Lucene.Net/Index/CompositeReaderContext.cs +++ b/src/Lucene.Net/Index/CompositeReaderContext.cs @@ -70,7 +70,7 @@ public override IList Leaves { throw new NotSupportedException("this is not a top-level context."); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => leaves != null); + if (Debugging.AssertsEnabled) Debugging.Assert(leaves != null); return leaves; } } @@ -126,7 +126,7 @@ internal IndexReaderContext Build(CompositeReaderContext parent, IndexReader rea children[i] = Build(newParent, r, i, newDocBase); newDocBase += r.MaxDoc; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => newDocBase == cr.MaxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(newDocBase == cr.MaxDoc); return newParent; } } diff --git a/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs b/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs index 915e973c62..be019b0530 100644 --- a/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs +++ b/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs @@ -395,7 +395,7 @@ public override void Merge(IndexWriter writer, MergeTrigger trigger, bool newMer { lock (this) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !Monitor.IsEntered(writer)); + if (Debugging.AssertsEnabled) Debugging.Assert(!Monitor.IsEntered(writer)); this.m_writer = writer; diff --git a/src/Lucene.Net/Index/DirectoryReader.cs b/src/Lucene.Net/Index/DirectoryReader.cs index 759c8f0bce..9e013f18ff 100644 --- a/src/Lucene.Net/Index/DirectoryReader.cs +++ b/src/Lucene.Net/Index/DirectoryReader.cs @@ -171,7 +171,7 @@ public abstract class DirectoryReader : BaseCompositeReader public static DirectoryReader OpenIfChanged(DirectoryReader oldReader) { DirectoryReader newReader = oldReader.DoOpenIfChanged(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => newReader != oldReader); + if (Debugging.AssertsEnabled) Debugging.Assert(newReader != oldReader); return newReader; } @@ -184,7 +184,7 @@ public static DirectoryReader OpenIfChanged(DirectoryReader oldReader) public static DirectoryReader OpenIfChanged(DirectoryReader oldReader, IndexCommit commit) { DirectoryReader newReader = oldReader.DoOpenIfChanged(commit); - if (Debugging.AssertsEnabled) Debugging.Assert(() => newReader != oldReader); + if (Debugging.AssertsEnabled) Debugging.Assert(newReader != oldReader); return newReader; } @@ -251,7 +251,7 @@ public static DirectoryReader OpenIfChanged(DirectoryReader oldReader, IndexComm public static DirectoryReader OpenIfChanged(DirectoryReader oldReader, IndexWriter writer, bool applyAllDeletes) { DirectoryReader newReader = oldReader.DoOpenIfChanged(writer, applyAllDeletes); - if (Debugging.AssertsEnabled) Debugging.Assert(() => newReader != oldReader); + if (Debugging.AssertsEnabled) Debugging.Assert(newReader != oldReader); return newReader; } diff --git a/src/Lucene.Net/Index/DocFieldProcessor.cs b/src/Lucene.Net/Index/DocFieldProcessor.cs index fa1e820a7d..343d00549f 100644 --- a/src/Lucene.Net/Index/DocFieldProcessor.cs +++ b/src/Lucene.Net/Index/DocFieldProcessor.cs @@ -79,7 +79,7 @@ public override void Flush(SegmentWriteState state) childFields[f.FieldInfo.Name] = f; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => fields.Count == totalFieldCount); + if (Debugging.AssertsEnabled) Debugging.Assert(fields.Count == totalFieldCount); storedConsumer.Flush(state); consumer.Flush(childFields, state); @@ -166,14 +166,14 @@ public ICollection Fields() field = field.next; } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => fields.Count == totalFieldCount); + if (Debugging.AssertsEnabled) Debugging.Assert(fields.Count == totalFieldCount); return fields; } private void Rehash() { int newHashSize = (fieldHash.Length * 2); - if (Debugging.AssertsEnabled) Debugging.Assert(() => newHashSize > fieldHash.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(newHashSize > fieldHash.Length); DocFieldProcessorPerField[] newHashArray = new DocFieldProcessorPerField[newHashSize]; @@ -246,7 +246,7 @@ public override void ProcessDocument(FieldInfos.Builder fieldInfos) // need to addOrUpdate so that FieldInfos can update globalFieldNumbers // with the correct DocValue type (LUCENE-5192) FieldInfo fi = fieldInfos.AddOrUpdate(fieldName, field.IndexableFieldType); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fi == fp.fieldInfo, () => "should only have updated an existing FieldInfo instance"); + if (Debugging.AssertsEnabled) Debugging.Assert(fi == fp.fieldInfo, () => "should only have updated an existing FieldInfo instance"); } if (thisFieldGen != fp.lastGen) diff --git a/src/Lucene.Net/Index/DocTermOrds.cs b/src/Lucene.Net/Index/DocTermOrds.cs index 68f948b354..c49bbc4214 100644 --- a/src/Lucene.Net/Index/DocTermOrds.cs +++ b/src/Lucene.Net/Index/DocTermOrds.cs @@ -752,7 +752,7 @@ public OrdWrappedTermsEnum(DocTermOrds outerInstance, AtomicReader reader) this.outerInstance = outerInstance; InitializeInstanceFields(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.m_indexedTermsArray != null); + if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.m_indexedTermsArray != null); termsEnum = reader.Fields.GetTerms(outerInstance.m_field).GetIterator(null); } @@ -804,10 +804,10 @@ public override SeekStatus SeekCeil(BytesRef target) { // we hit the term exactly... lucky us! TermsEnum.SeekStatus seekStatus = termsEnum.SeekCeil(target); - if (Debugging.AssertsEnabled) Debugging.Assert(() => seekStatus == TermsEnum.SeekStatus.FOUND); + if (Debugging.AssertsEnabled) Debugging.Assert(seekStatus == TermsEnum.SeekStatus.FOUND); ord = startIdx << outerInstance.indexIntervalBits; SetTerm(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => term != null); + if (Debugging.AssertsEnabled) Debugging.Assert(term != null); return SeekStatus.FOUND; } @@ -818,10 +818,10 @@ public override SeekStatus SeekCeil(BytesRef target) { // our target occurs *before* the first term TermsEnum.SeekStatus seekStatus = termsEnum.SeekCeil(target); - if (Debugging.AssertsEnabled) Debugging.Assert(() => seekStatus == TermsEnum.SeekStatus.NOT_FOUND); + if (Debugging.AssertsEnabled) Debugging.Assert(seekStatus == TermsEnum.SeekStatus.NOT_FOUND); ord = 0; SetTerm(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => term != null); + if (Debugging.AssertsEnabled) Debugging.Assert(term != null); return SeekStatus.NOT_FOUND; } @@ -837,10 +837,10 @@ public override SeekStatus SeekCeil(BytesRef target) { // seek to the right block TermsEnum.SeekStatus seekStatus = termsEnum.SeekCeil(outerInstance.m_indexedTermsArray[startIdx]); - if (Debugging.AssertsEnabled) Debugging.Assert(() => seekStatus == TermsEnum.SeekStatus.FOUND); + if (Debugging.AssertsEnabled) Debugging.Assert(seekStatus == TermsEnum.SeekStatus.FOUND); ord = startIdx << outerInstance.indexIntervalBits; SetTerm(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => term != null); // should be non-null since it's in the index + if (Debugging.AssertsEnabled) Debugging.Assert(term != null); // should be non-null since it's in the index } while (term != null && term.CompareTo(target) < 0) @@ -874,7 +874,7 @@ public override void SeekExact(long targetOrd) ord = idx << outerInstance.indexIntervalBits; delta = (int)(targetOrd - ord); TermsEnum.SeekStatus seekStatus = termsEnum.SeekCeil(@base); - if (Debugging.AssertsEnabled) Debugging.Assert(() => seekStatus == TermsEnum.SeekStatus.FOUND); + if (Debugging.AssertsEnabled) Debugging.Assert(seekStatus == TermsEnum.SeekStatus.FOUND); } else { @@ -886,14 +886,14 @@ public override void SeekExact(long targetOrd) BytesRef br = termsEnum.Next(); if (br == null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(false); return; } ord++; } SetTerm(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => term != null); + if (Debugging.AssertsEnabled) Debugging.Assert(term != null); } private BytesRef SetTerm() diff --git a/src/Lucene.Net/Index/DocValuesFieldUpdates.cs b/src/Lucene.Net/Index/DocValuesFieldUpdates.cs index 9f6d5d8ee1..6a14132eff 100644 --- a/src/Lucene.Net/Index/DocValuesFieldUpdates.cs +++ b/src/Lucene.Net/Index/DocValuesFieldUpdates.cs @@ -114,14 +114,14 @@ internal virtual DocValuesFieldUpdates NewUpdates(string field, DocValuesFieldUp { case DocValuesFieldUpdatesType.NUMERIC: NumericDocValuesFieldUpdates numericUpdates; - if (Debugging.AssertsEnabled) Debugging.Assert(() => !numericDVUpdates.ContainsKey(field)); + if (Debugging.AssertsEnabled) Debugging.Assert(!numericDVUpdates.ContainsKey(field)); numericUpdates = new NumericDocValuesFieldUpdates(field, maxDoc); numericDVUpdates[field] = numericUpdates; return numericUpdates; case DocValuesFieldUpdatesType.BINARY: BinaryDocValuesFieldUpdates binaryUpdates; - if (Debugging.AssertsEnabled) Debugging.Assert(() => !binaryDVUpdates.ContainsKey(field)); + if (Debugging.AssertsEnabled) Debugging.Assert(!binaryDVUpdates.ContainsKey(field)); binaryUpdates = new BinaryDocValuesFieldUpdates(field, maxDoc); binaryDVUpdates[field] = binaryUpdates; return binaryUpdates; diff --git a/src/Lucene.Net/Index/DocValuesProcessor.cs b/src/Lucene.Net/Index/DocValuesProcessor.cs index 9c9fae887c..9cb3b70d88 100644 --- a/src/Lucene.Net/Index/DocValuesProcessor.cs +++ b/src/Lucene.Net/Index/DocValuesProcessor.cs @@ -82,7 +82,7 @@ public override void AddField(int docID, IIndexableField field, FieldInfo fieldI } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "unrecognized DocValues.Type: " + dvType); + if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "unrecognized DocValues.Type: " + dvType); } } } @@ -219,7 +219,7 @@ private string GetTypeDesc(DocValuesWriter obj) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => obj is SortedDocValuesWriter); + if (Debugging.AssertsEnabled) Debugging.Assert(obj is SortedDocValuesWriter); return "sorted"; } } diff --git a/src/Lucene.Net/Index/DocumentsWriter.cs b/src/Lucene.Net/Index/DocumentsWriter.cs index 5e53b18d5c..b10606ea28 100644 --- a/src/Lucene.Net/Index/DocumentsWriter.cs +++ b/src/Lucene.Net/Index/DocumentsWriter.cs @@ -244,7 +244,7 @@ internal void Abort(IndexWriter writer) { lock (this) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !Monitor.IsEntered(writer), () => "IndexWriter lock should never be hold when aborting"); + if (Debugging.AssertsEnabled) Debugging.Assert(!Monitor.IsEntered(writer), () => "IndexWriter lock should never be hold when aborting"); bool success = false; JCG.HashSet newFilesSet = new JCG.HashSet(); try @@ -287,7 +287,7 @@ internal void LockAndAbortAll(IndexWriter indexWriter) { lock (this) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => indexWriter.HoldsFullFlushLock); + if (Debugging.AssertsEnabled) Debugging.Assert(indexWriter.HoldsFullFlushLock); if (infoStream.IsEnabled("DW")) { infoStream.Message("DW", "lockAndAbortAll"); @@ -327,7 +327,7 @@ internal void LockAndAbortAll(IndexWriter indexWriter) private void AbortThreadState(ThreadState perThread, ISet newFiles) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => perThread.IsHeldByCurrentThread); + if (Debugging.AssertsEnabled) Debugging.Assert(perThread.IsHeldByCurrentThread); if (perThread.IsActive) // we might be closed { if (perThread.IsInitialized) @@ -350,7 +350,7 @@ private void AbortThreadState(ThreadState perThread, ISet newFiles) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => closed); + if (Debugging.AssertsEnabled) Debugging.Assert(closed); } } @@ -358,7 +358,7 @@ internal void UnlockAllAfterAbortAll(IndexWriter indexWriter) { lock (this) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => indexWriter.HoldsFullFlushLock); + if (Debugging.AssertsEnabled) Debugging.Assert(indexWriter.HoldsFullFlushLock); if (infoStream.IsEnabled("DW")) { infoStream.Message("DW", "unlockAll"); @@ -498,10 +498,10 @@ internal bool UpdateDocuments(IEnumerable> docs, An if (!perThread.IsActive) { EnsureOpen(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "perThread is not active but we are still open"); + if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "perThread is not active but we are still open"); } EnsureInitialized(perThread); - if (Debugging.AssertsEnabled) Debugging.Assert(() => perThread.IsInitialized); + if (Debugging.AssertsEnabled) Debugging.Assert(perThread.IsInitialized); DocumentsWriterPerThread dwpt = perThread.dwpt; int dwptNumDocs = dwpt.NumDocsInRAM; try @@ -544,10 +544,10 @@ internal bool UpdateDocument(IEnumerable doc, Analyzer analyzer if (!perThread.IsActive) { EnsureOpen(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "perThread is not active but we are still open"); + if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "perThread is not active but we are still open"); } EnsureInitialized(perThread); - if (Debugging.AssertsEnabled) Debugging.Assert(() => perThread.IsInitialized); + if (Debugging.AssertsEnabled) Debugging.Assert(perThread.IsInitialized); DocumentsWriterPerThread dwpt = perThread.dwpt; int dwptNumDocs = dwpt.NumDocsInRAM; try @@ -588,7 +588,7 @@ private bool DoFlush(DocumentsWriterPerThread flushingDWPT) SegmentFlushTicket ticket = null; try { - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentFullFlushDelQueue == null || flushingDWPT.deleteQueue == currentFullFlushDelQueue, () => "expected: " + currentFullFlushDelQueue + "but was: " + flushingDWPT.deleteQueue + " " + flushControl.IsFullFlush); + if (Debugging.AssertsEnabled) Debugging.Assert(currentFullFlushDelQueue == null || flushingDWPT.deleteQueue == currentFullFlushDelQueue, () => "expected: " + currentFullFlushDelQueue + "but was: " + flushingDWPT.deleteQueue + " " + flushControl.IsFullFlush); /* * Since with DWPT the flush process is concurrent and several DWPT * could flush at the same time we must maintain the order of the @@ -735,12 +735,12 @@ internal bool FlushAllThreads(IndexWriter indexWriter) * otherwise a new DWPT could sneak into the loop with an already flushing * delete queue */ flushControl.MarkForFullFlush(); // swaps the delQueue synced on FlushControl - if (Debugging.AssertsEnabled) Debugging.Assert(() => SetFlushingDeleteQueue(flushingDeleteQueue)); + if (Debugging.AssertsEnabled) Debugging.Assert(SetFlushingDeleteQueue(flushingDeleteQueue)); } if (Debugging.AssertsEnabled) { - Debugging.Assert(() => currentFullFlushDelQueue != null); - Debugging.Assert(() => currentFullFlushDelQueue != deleteQueue); + Debugging.Assert(currentFullFlushDelQueue != null); + Debugging.Assert(currentFullFlushDelQueue != deleteQueue); } bool anythingFlushed = false; @@ -763,11 +763,11 @@ internal bool FlushAllThreads(IndexWriter indexWriter) ticketQueue.AddDeletes(flushingDeleteQueue); } ticketQueue.ForcePurge(indexWriter); - if (Debugging.AssertsEnabled) Debugging.Assert(() => !flushingDeleteQueue.AnyChanges() && !ticketQueue.HasTickets); + if (Debugging.AssertsEnabled) Debugging.Assert(!flushingDeleteQueue.AnyChanges() && !ticketQueue.HasTickets); } finally { - if (Debugging.AssertsEnabled) Debugging.Assert(() => flushingDeleteQueue == currentFullFlushDelQueue); + if (Debugging.AssertsEnabled) Debugging.Assert(flushingDeleteQueue == currentFullFlushDelQueue); } return anythingFlushed; } @@ -780,7 +780,7 @@ internal void FinishFullFlush(bool success) { infoStream.Message("DW", Thread.CurrentThread.Name + " finishFullFlush success=" + success); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => SetFlushingDeleteQueue(null)); + if (Debugging.AssertsEnabled) Debugging.Assert(SetFlushingDeleteQueue(null)); if (success) { // Release the flush lock @@ -813,7 +813,7 @@ internal sealed class ApplyDeletesEvent : IEvent internal ApplyDeletesEvent() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => instCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(instCount == 0); instCount++; } @@ -830,7 +830,7 @@ internal sealed class MergePendingEvent : IEvent internal MergePendingEvent() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => instCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(instCount == 0); instCount++; } @@ -847,7 +847,7 @@ internal sealed class ForcedPurgeEvent : IEvent internal ForcedPurgeEvent() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => instCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(instCount == 0); instCount++; } diff --git a/src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs b/src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs index 99afb2039e..b262315a94 100644 --- a/src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs +++ b/src/Lucene.Net/Index/DocumentsWriterDeleteQueue.cs @@ -144,7 +144,7 @@ internal void Add(Term term, DeleteSlice slice) * competing updates wins! */ slice.sliceTail = termNode; - if (Debugging.AssertsEnabled) Debugging.Assert(() => slice.sliceHead != slice.sliceTail, () => "slice head and tail must differ after add"); + if (Debugging.AssertsEnabled) Debugging.Assert(slice.sliceHead != slice.sliceTail, () => "slice head and tail must differ after add"); TryApplyGlobalSlice(); // TODO doing this each time is not necessary maybe // we can do it just every n times or so? } @@ -293,7 +293,7 @@ internal class DeleteSlice internal DeleteSlice(Node currentTail) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentTail != null); + if (Debugging.AssertsEnabled) Debugging.Assert(currentTail != null); /* * Initially this is a 0 length slice pointing to the 'current' tail of * the queue. Once we update the slice we only need to assign the tail and @@ -319,7 +319,7 @@ internal virtual void Apply(BufferedUpdates del, int docIDUpto) do { current = current.next; - if (Debugging.AssertsEnabled) Debugging.Assert(() => current != null, () => "slice property violated between the head on the tail must not be a null node"); + if (Debugging.AssertsEnabled) Debugging.Assert(current != null, () => "slice property violated between the head on the tail must not be a null node"); current.Apply(del, docIDUpto); // System.out.println(Thread.currentThread().getName() + ": pull " + current + " docIDUpto=" + docIDUpto); } while (current != sliceTail); diff --git a/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs b/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs index 32630b3d51..faf35c918f 100644 --- a/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs +++ b/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs @@ -152,7 +152,7 @@ private bool AssertMemory() * fail. To prevent this we only assert if the the largest document seen * is smaller than the 1/2 of the maxRamBufferMB */ - if (Debugging.AssertsEnabled) Debugging.Assert(() => ram <= expected, () => "actual mem: " + ram + " byte, expected mem: " + expected + if (Debugging.AssertsEnabled) Debugging.Assert(ram <= expected, () => "actual mem: " + ram + " byte, expected mem: " + expected + " byte, flush mem: " + flushBytes + ", active mem: " + activeBytes + ", pending DWPT: " + numPending + ", flushing DWPT: " + NumFlushingDWPT + ", blocked DWPT: " + NumBlockedFlushes @@ -179,7 +179,7 @@ private void CommitPerThreadBytes(ThreadState perThread) { activeBytes += delta; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => UpdatePeaks(delta)); + if (Debugging.AssertsEnabled) Debugging.Assert(UpdatePeaks(delta)); } // only for asserts @@ -239,7 +239,7 @@ internal DocumentsWriterPerThread DoAfterDocument(ThreadState perThread, bool is finally { bool stalled = UpdateStallState(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => AssertNumDocsSinceStalled(stalled) && AssertMemory()); + if (Debugging.AssertsEnabled) Debugging.Assert(AssertNumDocsSinceStalled(stalled) && AssertMemory()); } } } @@ -268,14 +268,14 @@ internal void DoAfterFlush(DocumentsWriterPerThread dwpt) { lock (this) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => flushingWriters.ContainsKey(dwpt)); + if (Debugging.AssertsEnabled) Debugging.Assert(flushingWriters.ContainsKey(dwpt)); try { long? bytes = flushingWriters[dwpt]; flushingWriters.Remove(dwpt); flushBytes -= (long)bytes; perThreadPool.Recycle(dwpt); - if (Debugging.AssertsEnabled) Debugging.Assert(AssertMemory); + if (Debugging.AssertsEnabled) Debugging.Assert(AssertMemory()); } finally { @@ -293,7 +293,7 @@ internal void DoAfterFlush(DocumentsWriterPerThread dwpt) private bool UpdateStallState() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => Monitor.IsEntered(this)); + if (Debugging.AssertsEnabled) Debugging.Assert(Monitor.IsEntered(this)); long limit = StallLimitBytes; /* * we block indexing threads if net byte grows due to slow flushes @@ -338,7 +338,7 @@ public void SetFlushPending(ThreadState perThread) { lock (this) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !perThread.flushPending); + if (Debugging.AssertsEnabled) Debugging.Assert(!perThread.flushPending); if (perThread.dwpt.NumDocsInRAM > 0) { perThread.flushPending = true; // write access synced @@ -346,7 +346,7 @@ public void SetFlushPending(ThreadState perThread) flushBytes += bytes; activeBytes -= bytes; numPending++; // write access synced - if (Debugging.AssertsEnabled) Debugging.Assert(AssertMemory); + if (Debugging.AssertsEnabled) Debugging.Assert(AssertMemory()); } // don't assert on numDocs since we could hit an abort excp. while selecting that dwpt for flushing } } @@ -365,7 +365,7 @@ internal void DoOnAbort(ThreadState state) { activeBytes -= state.bytesUsed; } - if (Debugging.AssertsEnabled) Debugging.Assert(AssertMemory); + if (Debugging.AssertsEnabled) Debugging.Assert(AssertMemory()); // Take it out of the loop this DWPT is stale perThreadPool.Reset(state, closed); } @@ -380,7 +380,7 @@ internal DocumentsWriterPerThread TryCheckoutForFlush(ThreadState perThread) { lock (this) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => perThread.IsHeldByCurrentThread); // LUCENENET specific: Since .NET Core doesn't use unfair locking, we need to ensure the current thread has a lock before calling InternalTryCheckoutForFlush. + if (Debugging.AssertsEnabled) Debugging.Assert(perThread.IsHeldByCurrentThread); // LUCENENET specific: Since .NET Core doesn't use unfair locking, we need to ensure the current thread has a lock before calling InternalTryCheckoutForFlush. return perThread.flushPending ? InternalTryCheckOutForFlush(perThread) : null; } } @@ -392,8 +392,8 @@ private void CheckoutAndBlock(ThreadState perThread) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => perThread.flushPending, () => "can not block non-pending threadstate"); - Debugging.Assert(() => fullFlush, () => "can not block if fullFlush == false"); + Debugging.Assert(perThread.flushPending, () => "can not block non-pending threadstate"); + Debugging.Assert(fullFlush, () => "can not block if fullFlush == false"); } DocumentsWriterPerThread dwpt; long bytes = perThread.bytesUsed; @@ -412,9 +412,9 @@ private DocumentsWriterPerThread InternalTryCheckOutForFlush(ThreadState perThre if (Debugging.AssertsEnabled) { // LUCENENET specific - Since we need to mimic the unfair behavior of ReentrantLock, we need to ensure that all threads that enter here hold the lock. - Debugging.Assert(() => perThread.IsHeldByCurrentThread); - Debugging.Assert(() => Monitor.IsEntered(this)); - Debugging.Assert(() => perThread.flushPending); + Debugging.Assert(perThread.IsHeldByCurrentThread); + Debugging.Assert(Monitor.IsEntered(this)); + Debugging.Assert(perThread.flushPending); } try { @@ -423,12 +423,12 @@ private DocumentsWriterPerThread InternalTryCheckOutForFlush(ThreadState perThre // We are pending so all memory is already moved to flushBytes if (perThread.IsInitialized) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => perThread.IsHeldByCurrentThread); + if (Debugging.AssertsEnabled) Debugging.Assert(perThread.IsHeldByCurrentThread); DocumentsWriterPerThread dwpt; long bytes = perThread.bytesUsed; // do that before // replace! dwpt = perThreadPool.Reset(perThread, closed); - if (Debugging.AssertsEnabled) Debugging.Assert(() => !flushingWriters.ContainsKey(dwpt), () => "DWPT is already flushing"); + if (Debugging.AssertsEnabled) Debugging.Assert(!flushingWriters.ContainsKey(dwpt), () => "DWPT is already flushing"); // Record the flushing DWPT to reduce flushBytes in doAfterFlush flushingWriters[dwpt] = bytes; numPending--; // write access synced @@ -625,8 +625,8 @@ internal void MarkForFullFlush() { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => !fullFlush, () => "called DWFC#markForFullFlush() while full flush is still running"); - Debugging.Assert(() => fullFlushBuffer.Count == 0, () => "full flush buffer should be empty: " + fullFlushBuffer); + Debugging.Assert(!fullFlush, () => "called DWFC#markForFullFlush() while full flush is still running"); + Debugging.Assert(fullFlushBuffer.Count == 0, () => "full flush buffer should be empty: " + fullFlushBuffer); } fullFlush = true; flushingQueue = documentsWriter.deleteQueue; @@ -650,7 +650,7 @@ internal void MarkForFullFlush() } continue; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => next.dwpt.deleteQueue == flushingQueue || next.dwpt.deleteQueue == documentsWriter.deleteQueue, () => " flushingQueue: " + flushingQueue + " currentqueue: " + documentsWriter.deleteQueue + " perThread queue: " + next.dwpt.deleteQueue + " numDocsInRam: " + next.dwpt.NumDocsInRAM); + if (Debugging.AssertsEnabled) Debugging.Assert(next.dwpt.deleteQueue == flushingQueue || next.dwpt.deleteQueue == documentsWriter.deleteQueue, () => " flushingQueue: " + flushingQueue + " currentqueue: " + documentsWriter.deleteQueue + " perThread queue: " + next.dwpt.deleteQueue + " numDocsInRam: " + next.dwpt.NumDocsInRAM); if (next.dwpt.deleteQueue != flushingQueue) { // this one is already a new DWPT @@ -670,7 +670,7 @@ internal void MarkForFullFlush() * a chance that this happens since we marking DWPT for full flush without * blocking indexing.*/ PruneBlockedQueue(flushingQueue); - if (Debugging.AssertsEnabled) Debugging.Assert(() => AssertBlockedFlushes(documentsWriter.deleteQueue)); + if (Debugging.AssertsEnabled) Debugging.Assert(AssertBlockedFlushes(documentsWriter.deleteQueue)); //FlushQueue.AddAll(FullFlushBuffer); foreach (var dwpt in fullFlushBuffer) { @@ -679,7 +679,7 @@ internal void MarkForFullFlush() fullFlushBuffer.Clear(); UpdateStallState(); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => AssertActiveDeleteQueue(documentsWriter.deleteQueue)); + if (Debugging.AssertsEnabled) Debugging.Assert(AssertActiveDeleteQueue(documentsWriter.deleteQueue)); } private bool AssertActiveDeleteQueue(DocumentsWriterDeleteQueue queue) @@ -691,7 +691,7 @@ private bool AssertActiveDeleteQueue(DocumentsWriterDeleteQueue queue) next.@Lock(); try { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !next.IsInitialized || next.dwpt.deleteQueue == queue, () => "isInitialized: " + next.IsInitialized + " numDocs: " + (next.IsInitialized ? next.dwpt.NumDocsInRAM : 0)); + if (Debugging.AssertsEnabled) Debugging.Assert(!next.IsInitialized || next.dwpt.deleteQueue == queue, () => "isInitialized: " + next.IsInitialized + " numDocs: " + (next.IsInitialized ? next.dwpt.NumDocsInRAM : 0)); } finally { @@ -712,10 +712,10 @@ internal void AddFlushableState(ThreadState perThread) DocumentsWriterPerThread dwpt = perThread.dwpt; if (Debugging.AssertsEnabled) { - Debugging.Assert(() => perThread.IsHeldByCurrentThread); - Debugging.Assert(() => perThread.IsInitialized); - Debugging.Assert(() => fullFlush); - Debugging.Assert(() => dwpt.deleteQueue != documentsWriter.deleteQueue); + Debugging.Assert(perThread.IsHeldByCurrentThread); + Debugging.Assert(perThread.IsInitialized); + Debugging.Assert(fullFlush); + Debugging.Assert(dwpt.deleteQueue != documentsWriter.deleteQueue); } if (dwpt.NumDocsInRAM > 0) { @@ -728,8 +728,8 @@ internal void AddFlushableState(ThreadState perThread) DocumentsWriterPerThread flushingDWPT = InternalTryCheckOutForFlush(perThread); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => flushingDWPT != null, () => "DWPT must never be null here since we hold the lock and it holds documents"); - Debugging.Assert(() => dwpt == flushingDWPT, () => "flushControl returned different DWPT"); + Debugging.Assert(flushingDWPT != null, () => "DWPT must never be null here since we hold the lock and it holds documents"); + Debugging.Assert(dwpt == flushingDWPT, () => "flushControl returned different DWPT"); } fullFlushBuffer.Add(flushingDWPT); } @@ -753,7 +753,7 @@ private void PruneBlockedQueue(DocumentsWriterDeleteQueue flushingQueue) if (blockedFlush.Dwpt.deleteQueue == flushingQueue) { blockedFlushes.Remove(node); - if (Debugging.AssertsEnabled) Debugging.Assert(() => !flushingWriters.ContainsKey(blockedFlush.Dwpt), () => "DWPT is already flushing"); + if (Debugging.AssertsEnabled) Debugging.Assert(!flushingWriters.ContainsKey(blockedFlush.Dwpt), () => "DWPT is already flushing"); // Record the flushing DWPT to reduce flushBytes in doAfterFlush flushingWriters[blockedFlush.Dwpt] = blockedFlush.Bytes; // don't decr pending here - its already done when DWPT is blocked @@ -769,17 +769,17 @@ internal void FinishFullFlush() { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => fullFlush); - Debugging.Assert(() => flushQueue.Count == 0); - Debugging.Assert(() => flushingWriters.Count == 0); + Debugging.Assert(fullFlush); + Debugging.Assert(flushQueue.Count == 0); + Debugging.Assert(flushingWriters.Count == 0); } try { if (blockedFlushes.Count > 0) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => AssertBlockedFlushes(documentsWriter.deleteQueue)); + if (Debugging.AssertsEnabled) Debugging.Assert(AssertBlockedFlushes(documentsWriter.deleteQueue)); PruneBlockedQueue(documentsWriter.deleteQueue); - if (Debugging.AssertsEnabled) Debugging.Assert(() => blockedFlushes.Count == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(blockedFlushes.Count == 0); } } finally @@ -794,7 +794,7 @@ internal bool AssertBlockedFlushes(DocumentsWriterDeleteQueue flushingQueue) { foreach (BlockedFlush blockedFlush in blockedFlushes) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => blockedFlush.Dwpt.deleteQueue == flushingQueue); + if (Debugging.AssertsEnabled) Debugging.Assert(blockedFlush.Dwpt.deleteQueue == flushingQueue); } return true; } diff --git a/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs b/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs index 4d18732137..c8ccede9d6 100644 --- a/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs +++ b/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs @@ -63,13 +63,13 @@ internal virtual void AddDeletes(DocumentsWriterDeleteQueue deleteQueue) private void IncTickets() { int numTickets = ticketCount.IncrementAndGet(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => numTickets > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(numTickets > 0); } private void DecTickets() { int numTickets = ticketCount.DecrementAndGet(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => numTickets >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(numTickets >= 0); } internal virtual SegmentFlushTicket AddFlushTicket(DocumentsWriterPerThread dwpt) @@ -121,14 +121,14 @@ internal virtual bool HasTickets { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ticketCount >= 0, () => "ticketCount should be >= 0 but was: " + ticketCount); + if (Debugging.AssertsEnabled) Debugging.Assert(ticketCount >= 0, () => "ticketCount should be >= 0 but was: " + ticketCount); return ticketCount != 0; } } private int InnerPurge(IndexWriter writer) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => purgeLock.IsHeldByCurrentThread); + if (Debugging.AssertsEnabled) Debugging.Assert(purgeLock.IsHeldByCurrentThread); int numPurged = 0; while (true) { @@ -159,7 +159,7 @@ private int InnerPurge(IndexWriter writer) // finally remove the published ticket from the queue FlushTicket poll = queue.Dequeue(); ticketCount.DecrementAndGet(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => poll == head); + if (Debugging.AssertsEnabled) Debugging.Assert(poll == head); } } } @@ -175,8 +175,8 @@ internal virtual int ForcePurge(IndexWriter writer) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => !Monitor.IsEntered(this)); - Debugging.Assert(() => !Monitor.IsEntered(writer)); + Debugging.Assert(!Monitor.IsEntered(this)); + Debugging.Assert(!Monitor.IsEntered(writer)); } purgeLock.@Lock(); try @@ -193,8 +193,8 @@ internal virtual int TryPurge(IndexWriter writer) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => !Monitor.IsEntered(this)); - Debugging.Assert(() => !Monitor.IsEntered(writer)); + Debugging.Assert(!Monitor.IsEntered(this)); + Debugging.Assert(!Monitor.IsEntered(writer)); } if (purgeLock.TryLock()) { @@ -228,7 +228,7 @@ internal abstract class FlushTicket protected FlushTicket(FrozenBufferedUpdates frozenUpdates) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => frozenUpdates != null); + if (Debugging.AssertsEnabled) Debugging.Assert(frozenUpdates != null); this.m_frozenUpdates = frozenUpdates; } @@ -246,8 +246,8 @@ protected void PublishFlushedSegment(IndexWriter indexWriter, FlushedSegment new { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => newSegment != null); - Debugging.Assert(() => newSegment.segmentInfo != null); + Debugging.Assert(newSegment != null); + Debugging.Assert(newSegment.segmentInfo != null); } FrozenBufferedUpdates segmentUpdates = newSegment.segmentUpdates; //System.out.println("FLUSH: " + newSegment.segmentInfo.info.name); @@ -269,7 +269,7 @@ protected void FinishFlush(IndexWriter indexWriter, FlushedSegment newSegment, F // Finish the flushed segment and publish it to IndexWriter if (newSegment == null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => bufferedUpdates != null); + if (Debugging.AssertsEnabled) Debugging.Assert(bufferedUpdates != null); if (bufferedUpdates != null && bufferedUpdates.Any()) { indexWriter.PublishFrozenUpdates(bufferedUpdates); @@ -295,7 +295,7 @@ internal GlobalDeletesTicket(FrozenBufferedUpdates frozenUpdates) // LUCENENET N protected internal override void Publish(IndexWriter writer) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !m_published, () => "ticket was already publised - can not publish twice"); + if (Debugging.AssertsEnabled) Debugging.Assert(!m_published, () => "ticket was already publised - can not publish twice"); m_published = true; // its a global ticket - no segment to publish FinishFlush(writer, null, m_frozenUpdates); @@ -316,20 +316,20 @@ internal SegmentFlushTicket(FrozenBufferedUpdates frozenDeletes) // LUCENENET NO protected internal override void Publish(IndexWriter writer) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !m_published, () => "ticket was already publised - can not publish twice"); + if (Debugging.AssertsEnabled) Debugging.Assert(!m_published, () => "ticket was already publised - can not publish twice"); m_published = true; FinishFlush(writer, segment, m_frozenUpdates); } internal void SetSegment(FlushedSegment segment) // LUCENENET NOTE: Made internal rather than protected because class is sealed { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !failed); + if (Debugging.AssertsEnabled) Debugging.Assert(!failed); this.segment = segment; } internal void SetFailed() // LUCENENET NOTE: Made internal rather than protected because class is sealed { - if (Debugging.AssertsEnabled) Debugging.Assert(() => segment == null); + if (Debugging.AssertsEnabled) Debugging.Assert(segment == null); failed = true; } diff --git a/src/Lucene.Net/Index/DocumentsWriterPerThread.cs b/src/Lucene.Net/Index/DocumentsWriterPerThread.cs index 3e66603568..1329f85f00 100644 --- a/src/Lucene.Net/Index/DocumentsWriterPerThread.cs +++ b/src/Lucene.Net/Index/DocumentsWriterPerThread.cs @@ -236,12 +236,12 @@ public DocumentsWriterPerThread(string segmentName, Directory directory, LiveInd pendingUpdates = new BufferedUpdates(); intBlockAllocator = new Int32BlockAllocator(bytesUsed); this.deleteQueue = deleteQueue; - if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocsInRAM == 0, () => "num docs " + numDocsInRAM); + if (Debugging.AssertsEnabled) Debugging.Assert(numDocsInRAM == 0, () => "num docs " + numDocsInRAM); pendingUpdates.Clear(); deleteSlice = deleteQueue.NewSlice(); segmentInfo = new SegmentInfo(directoryOrig, Constants.LUCENE_MAIN_VERSION, segmentName, -1, false, codec, null); - if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocsInRAM == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(numDocsInRAM == 0); if (INFO_VERBOSE && infoStream.IsEnabled("DWPT")) { infoStream.Message("DWPT", Thread.CurrentThread.Name + " init seg=" + segmentName + " delQueue=" + deleteQueue); @@ -276,8 +276,8 @@ public virtual void UpdateDocument(IEnumerable doc, Analyzer an { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => TestPoint("DocumentsWriterPerThread addDocument start")); - Debugging.Assert(() => deleteQueue != null); + Debugging.Assert(TestPoint("DocumentsWriterPerThread addDocument start")); + Debugging.Assert(deleteQueue != null); } docState.doc = doc; docState.analyzer = analyzer; @@ -335,8 +335,8 @@ public virtual int UpdateDocuments(IEnumerable> doc { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => TestPoint("DocumentsWriterPerThread addDocuments start")); - Debugging.Assert(() => deleteQueue != null); + Debugging.Assert(TestPoint("DocumentsWriterPerThread addDocuments start")); + Debugging.Assert(deleteQueue != null); } docState.analyzer = analyzer; if (INFO_VERBOSE && infoStream.IsEnabled("DWPT")) @@ -400,7 +400,7 @@ public virtual int UpdateDocuments(IEnumerable> doc if (delTerm != null) { deleteQueue.Add(delTerm, deleteSlice); - if (Debugging.AssertsEnabled) Debugging.Assert(() => deleteSlice.IsTailItem(delTerm), () => "expected the delete term as the tail item"); + if (Debugging.AssertsEnabled) Debugging.Assert(deleteSlice.IsTailItem(delTerm), () => "expected the delete term as the tail item"); deleteSlice.Apply(pendingUpdates, numDocsInRAM - docCount); } } @@ -439,7 +439,7 @@ private void FinishDocument(Term delTerm) if (delTerm != null) { deleteQueue.Add(delTerm, deleteSlice); - if (Debugging.AssertsEnabled) Debugging.Assert(() => deleteSlice.IsTailItem(delTerm), () => "expected the delete term as the tail item"); + if (Debugging.AssertsEnabled) Debugging.Assert(deleteSlice.IsTailItem(delTerm), () => "expected the delete term as the tail item"); } else { @@ -490,7 +490,7 @@ internal virtual void DeleteDocID(int docIDUpto) /// internal virtual FrozenBufferedUpdates PrepareFlush() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocsInRAM > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(numDocsInRAM > 0); FrozenBufferedUpdates globalUpdates = deleteQueue.FreezeGlobalBuffer(deleteSlice); /* deleteSlice can possibly be null if we have hit non-aborting exceptions during indexing and never succeeded adding a document. */ @@ -498,7 +498,7 @@ adding a document. */ { // apply all deletes before we flush and release the delete slice deleteSlice.Apply(pendingUpdates, numDocsInRAM); - if (Debugging.AssertsEnabled) Debugging.Assert(() => deleteSlice.IsEmpty); + if (Debugging.AssertsEnabled) Debugging.Assert(deleteSlice.IsEmpty); deleteSlice.Reset(); } return globalUpdates; @@ -511,8 +511,8 @@ internal virtual FlushedSegment Flush() { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => numDocsInRAM > 0); - Debugging.Assert(() => deleteSlice.IsEmpty, () => "all deletes must be applied in prepareFlush"); + Debugging.Assert(numDocsInRAM > 0); + Debugging.Assert(deleteSlice.IsEmpty, () => "all deletes must be applied in prepareFlush"); } segmentInfo.DocCount = numDocsInRAM; SegmentWriteState flushState = new SegmentWriteState(infoStream, directory, segmentInfo, fieldInfos.Finish(), indexWriterConfig.TermIndexInterval, pendingUpdates, new IOContext(new FlushInfo(numDocsInRAM, BytesUsed))); @@ -581,7 +581,7 @@ internal virtual FlushedSegment Flush() infoStream.Message("DWPT", "flushed: segment=" + segmentInfo.Name + " ramUsed=" + startMBUsed.ToString(nf) + " MB" + " newFlushedSize(includes docstores)=" + newSegmentSize.ToString(nf) + " MB" + " docs/MB=" + (flushState.SegmentInfo.DocCount / newSegmentSize).ToString(nf)); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => segmentInfo != null); + if (Debugging.AssertsEnabled) Debugging.Assert(segmentInfo != null); FlushedSegment fs = new FlushedSegment(segmentInfoPerCommit, flushState.FieldInfos, segmentDeletes, flushState.LiveDocs, flushState.DelCountOnFlush); SealFlushedSegment(fs); @@ -609,7 +609,7 @@ internal virtual FlushedSegment Flush() [MethodImpl(MethodImplOptions.NoInlining)] internal virtual void SealFlushedSegment(FlushedSegment flushedSegment) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => flushedSegment != null); + if (Debugging.AssertsEnabled) Debugging.Assert(flushedSegment != null); SegmentCommitInfo newSegment = flushedSegment.segmentInfo; @@ -641,7 +641,7 @@ internal virtual void SealFlushedSegment(FlushedSegment flushedSegment) if (flushedSegment.liveDocs != null) { int delCount = flushedSegment.delCount; - if (Debugging.AssertsEnabled) Debugging.Assert(() => delCount > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(delCount > 0); if (infoStream.IsEnabled("DWPT")) { infoStream.Message("DWPT", "flush: write " + delCount + " deletes gen=" + flushedSegment.segmentInfo.DelGen); diff --git a/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs b/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs index 2dacc4c2ef..41394ccf1c 100644 --- a/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs +++ b/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs @@ -80,14 +80,14 @@ internal ThreadState(DocumentsWriterPerThread dpwt) /// internal void Deactivate() // LUCENENET NOTE: Made internal because it is called outside of this context { - if (Debugging.AssertsEnabled) Debugging.Assert(() => this.IsHeldByCurrentThread); + if (Debugging.AssertsEnabled) Debugging.Assert(this.IsHeldByCurrentThread); isActive = false; Reset(); } internal void Reset() // LUCENENET NOTE: Made internal because it is called outside of this context { - if (Debugging.AssertsEnabled) Debugging.Assert(() => this.IsHeldByCurrentThread); + if (Debugging.AssertsEnabled) Debugging.Assert(this.IsHeldByCurrentThread); this.dwpt = null; this.bytesUsed = 0; this.flushPending = false; @@ -102,7 +102,7 @@ internal bool IsActive { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => this.IsHeldByCurrentThread); + if (Debugging.AssertsEnabled) Debugging.Assert(this.IsHeldByCurrentThread); return isActive; } @@ -112,7 +112,7 @@ internal bool IsInitialized { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => this.IsHeldByCurrentThread); + if (Debugging.AssertsEnabled) Debugging.Assert(this.IsHeldByCurrentThread); return IsActive && dwpt != null; } } @@ -126,7 +126,7 @@ public long BytesUsedPerThread { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => this.IsHeldByCurrentThread); + if (Debugging.AssertsEnabled) Debugging.Assert(this.IsHeldByCurrentThread); // public for FlushPolicy return bytesUsed; } @@ -139,7 +139,7 @@ public DocumentsWriterPerThread DocumentsWriterPerThread { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => this.IsHeldByCurrentThread); + if (Debugging.AssertsEnabled) Debugging.Assert(this.IsHeldByCurrentThread); // public for FlushPolicy return dwpt; } @@ -226,12 +226,12 @@ public virtual ThreadState NewThreadState() { // unreleased thread states are deactivated during DW#close() numThreadStatesActive++; // increment will publish the ThreadState - if (Debugging.AssertsEnabled) Debugging.Assert(() => threadState.dwpt == null); + if (Debugging.AssertsEnabled) Debugging.Assert(threadState.dwpt == null); unlock = false; return threadState; } // unlock since the threadstate is not active anymore - we are closed! - if (Debugging.AssertsEnabled) Debugging.Assert(AssertUnreleasedThreadStatesInactive); + if (Debugging.AssertsEnabled) Debugging.Assert(AssertUnreleasedThreadStatesInactive()); return null; } finally @@ -253,10 +253,10 @@ private bool AssertUnreleasedThreadStatesInactive() { for (int i = numThreadStatesActive; i < threadStates.Length; i++) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => threadStates[i].TryLock(), () => "unreleased threadstate should not be locked"); + if (Debugging.AssertsEnabled) Debugging.Assert(threadStates[i].TryLock(), () => "unreleased threadstate should not be locked"); try { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !threadStates[i].IsInitialized, () => "expected unreleased thread state to be inactive"); + if (Debugging.AssertsEnabled) Debugging.Assert(!threadStates[i].IsInitialized, () => "expected unreleased thread state to be inactive"); } finally { @@ -292,7 +292,7 @@ internal virtual void DeactivateUnreleasedStates() internal virtual DocumentsWriterPerThread Reset(ThreadState threadState, bool closed) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => threadState.IsHeldByCurrentThread); + if (Debugging.AssertsEnabled) Debugging.Assert(threadState.IsHeldByCurrentThread); DocumentsWriterPerThread dwpt = threadState.dwpt; if (!closed) { @@ -382,7 +382,7 @@ internal virtual int NumDeactivatedThreadStates() /// the state to deactivate internal virtual void DeactivateThreadState(ThreadState threadState) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => threadState.IsActive); + if (Debugging.AssertsEnabled) Debugging.Assert(threadState.IsActive); threadState.Deactivate(); } } diff --git a/src/Lucene.Net/Index/DocumentsWriterStallControl.cs b/src/Lucene.Net/Index/DocumentsWriterStallControl.cs index 81c16ce064..0f1c8f5ea6 100644 --- a/src/Lucene.Net/Index/DocumentsWriterStallControl.cs +++ b/src/Lucene.Net/Index/DocumentsWriterStallControl.cs @@ -88,10 +88,10 @@ internal void WaitIfStalled() // LUCENENET: make sure not to run IncWaiters / DecrWaiters in Debugging.Assert as that gets // disabled in production var result = IncWaiters(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => result); + if (Debugging.AssertsEnabled) Debugging.Assert(result); Monitor.Wait(this); result = DecrWaiters(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => result); + if (Debugging.AssertsEnabled) Debugging.Assert(result); //#if !NETSTANDARD1_6 // LUCENENET NOTE: Senseless to catch and rethrow the same exception type // } // catch (ThreadInterruptedException e) @@ -112,7 +112,7 @@ internal bool AnyStalledThreads() private bool IncWaiters() { numWaiting++; - if (Debugging.AssertsEnabled) Debugging.Assert(() => !waiting.ContainsKey(ThreadJob.CurrentThread)); + if (Debugging.AssertsEnabled) Debugging.Assert(!waiting.ContainsKey(ThreadJob.CurrentThread)); waiting[ThreadJob.CurrentThread] = true; return numWaiting > 0; @@ -122,7 +122,7 @@ private bool DecrWaiters() { numWaiting--; bool removed = waiting.Remove(ThreadJob.CurrentThread); - if (Debugging.AssertsEnabled) Debugging.Assert(() => removed); + if (Debugging.AssertsEnabled) Debugging.Assert(removed); return numWaiting >= 0; } diff --git a/src/Lucene.Net/Index/FieldInfo.cs b/src/Lucene.Net/Index/FieldInfo.cs index 9a61c25c07..b5ce647d02 100644 --- a/src/Lucene.Net/Index/FieldInfo.cs +++ b/src/Lucene.Net/Index/FieldInfo.cs @@ -87,7 +87,7 @@ public FieldInfo(string name, bool indexed, int number, bool storeTermVector, bo this.normType = DocValuesType.NONE; } this.attributes = attributes; - if (Debugging.AssertsEnabled) Debugging.Assert(CheckConsistency); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckConsistency()); } private bool CheckConsistency() @@ -96,22 +96,22 @@ private bool CheckConsistency() { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => !storeTermVector); - Debugging.Assert(() => !storePayloads); - Debugging.Assert(() => !omitNorms); - Debugging.Assert(() => normType == DocValuesType.NONE); - Debugging.Assert(() => indexOptions == IndexOptions.NONE); + Debugging.Assert(!storeTermVector); + Debugging.Assert(!storePayloads); + Debugging.Assert(!omitNorms); + Debugging.Assert(normType == DocValuesType.NONE); + Debugging.Assert(indexOptions == IndexOptions.NONE); } } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => indexOptions != IndexOptions.NONE); + if (Debugging.AssertsEnabled) Debugging.Assert(indexOptions != IndexOptions.NONE); if (omitNorms) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => normType == DocValuesType.NONE); + if (Debugging.AssertsEnabled) Debugging.Assert(normType == DocValuesType.NONE); } // Cannot store payloads unless positions are indexed: - if (Debugging.AssertsEnabled) Debugging.Assert(() => indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !this.storePayloads); + if (Debugging.AssertsEnabled) Debugging.Assert(indexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !this.storePayloads); } return true; @@ -163,7 +163,7 @@ internal void Update(bool indexed, bool storeTermVector, bool omitNorms, bool st } } } - if (Debugging.AssertsEnabled) Debugging.Assert(CheckConsistency); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckConsistency()); } public DocValuesType DocValuesType @@ -176,7 +176,7 @@ internal set throw new ArgumentException("cannot change DocValues type from " + docValueType + " to " + value + " for field \"" + Name + "\""); } docValueType = value; - if (Debugging.AssertsEnabled) Debugging.Assert(CheckConsistency); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckConsistency()); } } @@ -210,14 +210,14 @@ internal set throw new ArgumentException("cannot change Norm type from " + normType + " to " + value + " for field \"" + Name + "\""); } normType = value; - if (Debugging.AssertsEnabled) Debugging.Assert(CheckConsistency); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckConsistency()); } } internal void SetStoreTermVectors() { storeTermVector = true; - if (Debugging.AssertsEnabled) Debugging.Assert(CheckConsistency); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckConsistency()); } internal void SetStorePayloads() @@ -226,7 +226,7 @@ internal void SetStorePayloads() { storePayloads = true; } - if (Debugging.AssertsEnabled) Debugging.Assert(CheckConsistency); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckConsistency()); } /// diff --git a/src/Lucene.Net/Index/FieldInfos.cs b/src/Lucene.Net/Index/FieldInfos.cs index 64ae7e437c..b418d8eb86 100644 --- a/src/Lucene.Net/Index/FieldInfos.cs +++ b/src/Lucene.Net/Index/FieldInfos.cs @@ -136,7 +136,7 @@ public virtual int Count { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => byNumber.Count == byName.Count); + if (Debugging.AssertsEnabled) Debugging.Assert(byNumber.Count == byName.Count); return byNumber.Count; } } @@ -314,7 +314,7 @@ internal void SetDocValuesType(int number, string name, DocValuesType dvType) { lock (this) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ContainsConsistent(number, name, dvType)); + if (Debugging.AssertsEnabled) Debugging.Assert(ContainsConsistent(number, name, dvType)); docValuesType[name] = dvType; } } @@ -335,7 +335,7 @@ internal Builder() /// internal Builder(FieldNumbers globalFieldNumbers) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => globalFieldNumbers != null); + if (Debugging.AssertsEnabled) Debugging.Assert(globalFieldNumbers != null); this.globalFieldNumbers = globalFieldNumbers; } @@ -378,8 +378,8 @@ private FieldInfo AddOrUpdateInternal(string name, int preferredFieldNumber, boo fi = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, docValues, normType, null); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => !byName.ContainsKey(fi.Name)); - Debugging.Assert(() => globalFieldNumbers.ContainsConsistent(fi.Number, fi.Name, fi.DocValuesType)); + Debugging.Assert(!byName.ContainsKey(fi.Name)); + Debugging.Assert(globalFieldNumbers.ContainsConsistent(fi.Number, fi.Name, fi.DocValuesType)); } byName[fi.Name] = fi; } diff --git a/src/Lucene.Net/Index/FilteredTermsEnum.cs b/src/Lucene.Net/Index/FilteredTermsEnum.cs index 209605804d..7a5b380483 100644 --- a/src/Lucene.Net/Index/FilteredTermsEnum.cs +++ b/src/Lucene.Net/Index/FilteredTermsEnum.cs @@ -97,7 +97,7 @@ public FilteredTermsEnum(TermsEnum tenum) /// start with seek public FilteredTermsEnum(TermsEnum tenum, bool startWithSeek) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => tenum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(tenum != null); this.tenum = tenum; doSeek = startWithSeek; } @@ -207,7 +207,7 @@ public override void SeekExact(BytesRef term, TermState state) /// public override TermState GetTermState() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => tenum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(tenum != null); return tenum.GetTermState(); } @@ -224,7 +224,7 @@ public override BytesRef Next() BytesRef t = NextSeekTerm(actualTerm); //System.out.println(" seek to t=" + (t == null ? "null" : t.utf8ToString()) + " tenum=" + tenum); // Make sure we always seek forward: - if (Debugging.AssertsEnabled) Debugging.Assert(() => actualTerm == null || t == null || Comparer.Compare(t, actualTerm) > 0, () => "curTerm=" + actualTerm + " seekTerm=" + t); + if (Debugging.AssertsEnabled) Debugging.Assert(actualTerm == null || t == null || Comparer.Compare(t, actualTerm) > 0, () => "curTerm=" + actualTerm + " seekTerm=" + t); if (t == null || tenum.SeekCeil(t) == SeekStatus.END) { // no more terms to seek to or enum exhausted diff --git a/src/Lucene.Net/Index/FlushPolicy.cs b/src/Lucene.Net/Index/FlushPolicy.cs index d525a77e55..2d2bff0ed9 100644 --- a/src/Lucene.Net/Index/FlushPolicy.cs +++ b/src/Lucene.Net/Index/FlushPolicy.cs @@ -113,11 +113,11 @@ protected internal virtual void Init(LiveIndexWriterConfig indexWriterConfig) /// protected virtual ThreadState FindLargestNonPendingWriter(DocumentsWriterFlushControl control, ThreadState perThreadState) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => perThreadState.dwpt.NumDocsInRAM > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(perThreadState.dwpt.NumDocsInRAM > 0); long maxRamSoFar = perThreadState.bytesUsed; // the dwpt which needs to be flushed eventually ThreadState maxRamUsingThreadState = perThreadState; - if (Debugging.AssertsEnabled) Debugging.Assert(() => !perThreadState.flushPending, () => "DWPT should have flushed"); + if (Debugging.AssertsEnabled) Debugging.Assert(!perThreadState.flushPending, () => "DWPT should have flushed"); IEnumerator activePerThreadsIterator = control.AllActiveThreadStates(); while (activePerThreadsIterator.MoveNext()) { @@ -132,7 +132,7 @@ protected virtual ThreadState FindLargestNonPendingWriter(DocumentsWriterFlushCo } } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => AssertMessage("set largest ram consuming thread pending on lower watermark")); + if (Debugging.AssertsEnabled) Debugging.Assert(AssertMessage("set largest ram consuming thread pending on lower watermark")); return maxRamUsingThreadState; } diff --git a/src/Lucene.Net/Index/FreqProxTermsWriter.cs b/src/Lucene.Net/Index/FreqProxTermsWriter.cs index 90a2fbf456..3bb8fc68ff 100644 --- a/src/Lucene.Net/Index/FreqProxTermsWriter.cs +++ b/src/Lucene.Net/Index/FreqProxTermsWriter.cs @@ -89,7 +89,7 @@ public override void Flush(IDictionary fields fieldWriter.Flush(fieldInfo.Name, consumer, state); TermsHashPerField perField = fieldWriter.termsHashPerField; - if (Debugging.AssertsEnabled) Debugging.Assert(() => termsHash == null || termsHash == perField.termsHash); + if (Debugging.AssertsEnabled) Debugging.Assert(termsHash == null || termsHash == perField.termsHash); termsHash = perField.termsHash; int numPostings = perField.bytesHash.Count; perField.Reset(); diff --git a/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs b/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs index 0257751762..630cf1ff6d 100644 --- a/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs +++ b/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs @@ -153,7 +153,7 @@ internal override void Start(IIndexableField f) internal void WriteProx(int termID, int proxCode) { //System.out.println("writeProx termID=" + termID + " proxCode=" + proxCode); - if (Debugging.AssertsEnabled) Debugging.Assert(() => hasProx); + if (Debugging.AssertsEnabled) Debugging.Assert(hasProx); BytesRef payload; if (payloadAttribute == null) { @@ -182,11 +182,11 @@ internal void WriteProx(int termID, int proxCode) internal void WriteOffsets(int termID, int offsetAccum) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => hasOffsets); + if (Debugging.AssertsEnabled) Debugging.Assert(hasOffsets); int startOffset = offsetAccum + offsetAttribute.StartOffset; int endOffset = offsetAccum + offsetAttribute.EndOffset; FreqProxPostingsArray postings = (FreqProxPostingsArray)termsHashPerField.postingsArray; - if (Debugging.AssertsEnabled) Debugging.Assert(() => startOffset - postings.lastOffsets[termID] >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(startOffset - postings.lastOffsets[termID] >= 0); termsHashPerField.WriteVInt32(1, startOffset - postings.lastOffsets[termID]); termsHashPerField.WriteVInt32(1, endOffset - startOffset); @@ -197,7 +197,7 @@ internal override void NewTerm(int termID) { // First time we're seeing this term since the last // flush - if (Debugging.AssertsEnabled) Debugging.Assert(() => docState.TestPoint("FreqProxTermsWriterPerField.newTerm start")); + if (Debugging.AssertsEnabled) Debugging.Assert(docState.TestPoint("FreqProxTermsWriterPerField.newTerm start")); FreqProxPostingsArray postings = (FreqProxPostingsArray)termsHashPerField.postingsArray; postings.lastDocIDs[termID] = docState.docID; @@ -219,7 +219,7 @@ internal override void NewTerm(int termID) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !hasOffsets); + if (Debugging.AssertsEnabled) Debugging.Assert(!hasOffsets); } } fieldState.MaxTermFrequency = Math.Max(1, fieldState.MaxTermFrequency); @@ -228,18 +228,18 @@ internal override void NewTerm(int termID) internal override void AddTerm(int termID) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docState.TestPoint("FreqProxTermsWriterPerField.addTerm start")); + if (Debugging.AssertsEnabled) Debugging.Assert(docState.TestPoint("FreqProxTermsWriterPerField.addTerm start")); FreqProxPostingsArray postings = (FreqProxPostingsArray)termsHashPerField.postingsArray; - if (Debugging.AssertsEnabled) Debugging.Assert(() => !hasFreq || postings.termFreqs[termID] > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(!hasFreq || postings.termFreqs[termID] > 0); if (!hasFreq) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => postings.termFreqs == null); + if (Debugging.AssertsEnabled) Debugging.Assert(postings.termFreqs == null); if (docState.docID != postings.lastDocIDs[termID]) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docState.docID > postings.lastDocIDs[termID]); + if (Debugging.AssertsEnabled) Debugging.Assert(docState.docID > postings.lastDocIDs[termID]); termsHashPerField.WriteVInt32(0, postings.lastDocCodes[termID]); postings.lastDocCodes[termID] = docState.docID - postings.lastDocIDs[termID]; postings.lastDocIDs[termID] = docState.docID; @@ -248,7 +248,7 @@ internal override void AddTerm(int termID) } else if (docState.docID != postings.lastDocIDs[termID]) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docState.docID > postings.lastDocIDs[termID], () => "id: " + docState.docID + " postings ID: " + postings.lastDocIDs[termID] + " termID: " + termID); + if (Debugging.AssertsEnabled) Debugging.Assert(docState.docID > postings.lastDocIDs[termID], () => "id: " + docState.docID + " postings ID: " + postings.lastDocIDs[termID] + " termID: " + termID); // Term not yet seen in the current doc but previously // seen in other doc(s) since the last flush @@ -278,7 +278,7 @@ internal override void AddTerm(int termID) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !hasOffsets); + if (Debugging.AssertsEnabled) Debugging.Assert(!hasOffsets); } fieldState.UniqueTermCount++; } @@ -322,7 +322,7 @@ public FreqProxPostingsArray(int size, bool writeFreqs, bool writeProx, bool wri } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !writeOffsets); + if (Debugging.AssertsEnabled) Debugging.Assert(!writeOffsets); } //System.out.println("PA init freqs=" + writeFreqs + " pos=" + writeProx + " offs=" + writeOffsets); } @@ -340,7 +340,7 @@ internal override ParallelPostingsArray NewInstance(int size) internal override void CopyTo(ParallelPostingsArray toArray, int numToCopy) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => toArray is FreqProxPostingsArray); + if (Debugging.AssertsEnabled) Debugging.Assert(toArray is FreqProxPostingsArray); FreqProxPostingsArray to = (FreqProxPostingsArray)toArray; base.CopyTo(toArray, numToCopy); @@ -349,17 +349,17 @@ internal override void CopyTo(ParallelPostingsArray toArray, int numToCopy) Array.Copy(lastDocCodes, 0, to.lastDocCodes, 0, numToCopy); if (lastPositions != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => to.lastPositions != null); + if (Debugging.AssertsEnabled) Debugging.Assert(to.lastPositions != null); Array.Copy(lastPositions, 0, to.lastPositions, 0, numToCopy); } if (lastOffsets != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => to.lastOffsets != null); + if (Debugging.AssertsEnabled) Debugging.Assert(to.lastOffsets != null); Array.Copy(lastOffsets, 0, to.lastOffsets, 0, numToCopy); } if (termFreqs != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => to.termFreqs != null); + if (Debugging.AssertsEnabled) Debugging.Assert(to.termFreqs != null); Array.Copy(termFreqs, 0, to.termFreqs, 0, numToCopy); } } @@ -416,7 +416,7 @@ internal void Flush(string fieldName, FieldsConsumer consumer, SegmentWriteState // new segment to the directory according to // currentFieldIndexOptions: IndexOptions currentFieldIndexOptions = fieldInfo.IndexOptions; - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentFieldIndexOptions != IndexOptions.NONE); + if (Debugging.AssertsEnabled) Debugging.Assert(currentFieldIndexOptions != IndexOptions.NONE); bool writeTermFreq = currentFieldIndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS) >= 0; bool writePositions = currentFieldIndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; @@ -431,11 +431,11 @@ internal void Flush(string fieldName, FieldsConsumer consumer, SegmentWriteState // Make sure FieldInfo.update is working correctly!: if (Debugging.AssertsEnabled) { - Debugging.Assert(() => !writeTermFreq || readTermFreq); - Debugging.Assert(() => !writePositions || readPositions); - Debugging.Assert(() => !writeOffsets || readOffsets); + Debugging.Assert(!writeTermFreq || readTermFreq); + Debugging.Assert(!writePositions || readPositions); + Debugging.Assert(!writeOffsets || readOffsets); - Debugging.Assert(() => !writeOffsets || writePositions); + Debugging.Assert(!writeOffsets || writePositions); } IDictionary segDeletes; @@ -555,11 +555,11 @@ internal void Flush(string fieldName, FieldsConsumer consumer, SegmentWriteState } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => docID != postings.lastDocIDs[termID]); + if (Debugging.AssertsEnabled) Debugging.Assert(docID != postings.lastDocIDs[termID]); } docFreq++; - if (Debugging.AssertsEnabled) Debugging.Assert(() => docID < state.SegmentInfo.DocCount, () => "doc=" + docID + " maxDoc=" + state.SegmentInfo.DocCount); + if (Debugging.AssertsEnabled) Debugging.Assert(docID < state.SegmentInfo.DocCount, () => "doc=" + docID + " maxDoc=" + state.SegmentInfo.DocCount); // NOTE: we could check here if the docID was // deleted, and skip it. However, this is somewhat @@ -644,7 +644,7 @@ internal void Flush(string fieldName, FieldsConsumer consumer, SegmentWriteState { if (writeOffsets) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => startOffset >= 0 && endOffset >= startOffset, () => "startOffset=" + startOffset + ",endOffset=" + endOffset + ",offset=" + offset); + if (Debugging.AssertsEnabled) Debugging.Assert(startOffset >= 0 && endOffset >= startOffset, () => "startOffset=" + startOffset + ",endOffset=" + endOffset + ",offset=" + offset); postingsConsumer.AddPosition(position, thisPayload, startOffset, endOffset); } else diff --git a/src/Lucene.Net/Index/FrozenBufferedUpdates.cs b/src/Lucene.Net/Index/FrozenBufferedUpdates.cs index 0c8d0a5a2f..25a8a3593c 100644 --- a/src/Lucene.Net/Index/FrozenBufferedUpdates.cs +++ b/src/Lucene.Net/Index/FrozenBufferedUpdates.cs @@ -67,7 +67,7 @@ internal class FrozenBufferedUpdates public FrozenBufferedUpdates(BufferedUpdates deletes, bool isSegmentPrivate) { this.isSegmentPrivate = isSegmentPrivate; - if (Debugging.AssertsEnabled) Debugging.Assert(() => !isSegmentPrivate || deletes.terms.Count == 0, () => "segment private package should only have del queries"); + if (Debugging.AssertsEnabled) Debugging.Assert(!isSegmentPrivate || deletes.terms.Count == 0, () => "segment private package should only have del queries"); Term[] termsArray = deletes.terms.Keys.ToArray(/*new Term[deletes.terms.Count]*/); termCount = termsArray.Length; @@ -140,12 +140,12 @@ public virtual long DelGen { set { - if (Debugging.AssertsEnabled) Debugging.Assert(() => this.gen == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(this.gen == -1); this.gen = value; } get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => gen != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(gen != -1); return gen; } } diff --git a/src/Lucene.Net/Index/IndexFileDeleter.cs b/src/Lucene.Net/Index/IndexFileDeleter.cs index cf239c6ed4..7aeab61c43 100644 --- a/src/Lucene.Net/Index/IndexFileDeleter.cs +++ b/src/Lucene.Net/Index/IndexFileDeleter.cs @@ -405,7 +405,7 @@ private void DeleteCommits() /// public void Refresh(string segmentName) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(IsLocked); string[] files = directory.ListAll(); string segmentPrefix1; @@ -446,7 +446,7 @@ public void Refresh() // Set to null so that we regenerate the list of pending // files; else we can accumulate same file more than // once - if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(IsLocked); deletable = null; Refresh(null); } @@ -454,7 +454,7 @@ public void Refresh() public void Dispose() { // DecRef old files from the last checkpoint, if any: - if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(IsLocked); if (lastFiles.Count > 0) { @@ -476,7 +476,7 @@ public void Dispose() /// internal void RevisitPolicy() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(IsLocked); if (infoStream.IsEnabled("IFD")) { infoStream.Message("IFD", "now revisitPolicy"); @@ -491,7 +491,7 @@ internal void RevisitPolicy() public void DeletePendingFiles() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(IsLocked); if (deletable != null) { IList oldDeletable = deletable; @@ -530,9 +530,9 @@ public void DeletePendingFiles() /// public void Checkpoint(SegmentInfos segmentInfos, bool isCommit) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(IsLocked); - if (Debugging.AssertsEnabled) Debugging.Assert(() => Monitor.IsEntered(writer)); + if (Debugging.AssertsEnabled) Debugging.Assert(Monitor.IsEntered(writer)); long t0 = 0; if (infoStream.IsEnabled("IFD")) { @@ -576,7 +576,7 @@ public void Checkpoint(SegmentInfos segmentInfos, bool isCommit) internal void IncRef(SegmentInfos segmentInfos, bool isCommit) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(IsLocked); // If this is a commit point, also incRef the // segments_N file: foreach (string fileName in segmentInfos.GetFiles(directory, isCommit)) @@ -587,7 +587,7 @@ internal void IncRef(SegmentInfos segmentInfos, bool isCommit) internal void IncRef(ICollection files) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(IsLocked); foreach (string file in files) { IncRef(file); @@ -596,7 +596,7 @@ internal void IncRef(ICollection files) internal void IncRef(string fileName) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(IsLocked); RefCount rc = GetRefCount(fileName); if (infoStream.IsEnabled("IFD")) { @@ -610,7 +610,7 @@ internal void IncRef(string fileName) internal void DecRef(ICollection files) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(IsLocked); foreach (string file in files) { DecRef(file); @@ -619,7 +619,7 @@ internal void DecRef(ICollection files) internal void DecRef(string fileName) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(IsLocked); RefCount rc = GetRefCount(fileName); if (infoStream.IsEnabled("IFD")) { @@ -639,7 +639,7 @@ internal void DecRef(string fileName) internal void DecRef(SegmentInfos segmentInfos) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(IsLocked); foreach (string file in segmentInfos.GetFiles(directory, false)) { DecRef(file); @@ -648,14 +648,14 @@ internal void DecRef(SegmentInfos segmentInfos) public bool Exists(string fileName) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(IsLocked); // LUCENENET: Using TryGetValue to eliminate extra lookup return refCounts.TryGetValue(fileName, out RefCount value) && value.count > 0; } private RefCount GetRefCount(string fileName) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(IsLocked); // LUCENENET: Using TryGetValue to eliminate extra lookup if (!refCounts.TryGetValue(fileName, out RefCount rc)) { @@ -667,7 +667,7 @@ private RefCount GetRefCount(string fileName) internal void DeleteFiles(IList files) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(IsLocked); foreach (string file in files) { DeleteFile(file); @@ -680,7 +680,7 @@ internal void DeleteFiles(IList files) /// internal void DeleteNewFiles(ICollection files) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(IsLocked); foreach (string fileName in files) { // NOTE: it's very unusual yet possible for the @@ -705,7 +705,7 @@ internal void DeleteNewFiles(ICollection files) internal void DeleteFile(string fileName) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => IsLocked); + if (Debugging.AssertsEnabled) Debugging.Assert(IsLocked); EnsureOpen(); try { @@ -724,7 +724,7 @@ internal void DeleteFile(string fileName) // the file is open in another process, and queue // the file for subsequent deletion. - //if (Debugging.AssertsEnabled) Debugging.Assert(() => e.Message.Contains("cannot delete")); + //if (Debugging.AssertsEnabled) Debugging.Assert(e.Message.Contains("cannot delete")); if (infoStream.IsEnabled("IFD")) { @@ -764,14 +764,14 @@ public int IncRef() } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => count > 0, () => Thread.CurrentThread.Name + ": RefCount is 0 pre-increment for file \"" + fileName + "\""); + if (Debugging.AssertsEnabled) Debugging.Assert(count > 0, () => Thread.CurrentThread.Name + ": RefCount is 0 pre-increment for file \"" + fileName + "\""); } return ++count; } public int DecRef() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => count > 0, () => Thread.CurrentThread.Name + ": RefCount is 0 pre-decrement for file \"" + fileName + "\""); + if (Debugging.AssertsEnabled) Debugging.Assert(count > 0, () => Thread.CurrentThread.Name + ": RefCount is 0 pre-decrement for file \"" + fileName + "\""); return --count; } } diff --git a/src/Lucene.Net/Index/IndexFileNames.cs b/src/Lucene.Net/Index/IndexFileNames.cs index 0aabe9a461..986968185b 100644 --- a/src/Lucene.Net/Index/IndexFileNames.cs +++ b/src/Lucene.Net/Index/IndexFileNames.cs @@ -106,7 +106,7 @@ public static string FileNameFromGeneration(string @base, string ext, long gen) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => gen > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(gen > 0); // The '6' part in the length is: 1 for '.', 1 for '_' and 4 as estimate // to the gen length as string (hopefully an upper limit so SB won't // expand in the middle. @@ -139,7 +139,7 @@ public static string SegmentFileName(string segmentName, string segmentSuffix, s { if (ext.Length > 0 || segmentSuffix.Length > 0) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !ext.StartsWith(".", StringComparison.Ordinal)); + if (Debugging.AssertsEnabled) Debugging.Assert(!ext.StartsWith(".", StringComparison.Ordinal)); StringBuilder sb = new StringBuilder(segmentName.Length + 2 + segmentSuffix.Length + ext.Length); sb.Append(segmentName); if (segmentSuffix.Length > 0) diff --git a/src/Lucene.Net/Index/IndexFormatTooNewException.cs b/src/Lucene.Net/Index/IndexFormatTooNewException.cs index ebd4f6f16c..27ed51caee 100644 --- a/src/Lucene.Net/Index/IndexFormatTooNewException.cs +++ b/src/Lucene.Net/Index/IndexFormatTooNewException.cs @@ -48,7 +48,7 @@ public class IndexFormatTooNewException : CorruptIndexException public IndexFormatTooNewException(string resourceDesc, int version, int minVersion, int maxVersion) : base("Format version is not supported (resource: " + resourceDesc + "): " + version + " (needs to be between " + minVersion + " and " + maxVersion + ")") { - if (Debugging.AssertsEnabled) Debugging.Assert(() => resourceDesc != null); + if (Debugging.AssertsEnabled) Debugging.Assert(resourceDesc != null); } /// diff --git a/src/Lucene.Net/Index/IndexFormatTooOldException.cs b/src/Lucene.Net/Index/IndexFormatTooOldException.cs index 6153bfcb4b..fb29d777a8 100644 --- a/src/Lucene.Net/Index/IndexFormatTooOldException.cs +++ b/src/Lucene.Net/Index/IndexFormatTooOldException.cs @@ -46,7 +46,7 @@ public class IndexFormatTooOldException : CorruptIndexException public IndexFormatTooOldException(string resourceDesc, string version) : base("Format version is not supported (resource: " + resourceDesc + "): " + version + ". this version of Lucene only supports indexes created with release 3.0 and later.") { - if (Debugging.AssertsEnabled) Debugging.Assert(() => resourceDesc != null); + if (Debugging.AssertsEnabled) Debugging.Assert(resourceDesc != null); } /// @@ -73,7 +73,7 @@ public IndexFormatTooOldException(DataInput input, string version) public IndexFormatTooOldException(string resourceDesc, int version, int minVersion, int maxVersion) : base("Format version is not supported (resource: " + resourceDesc + "): " + version + " (needs to be between " + minVersion + " and " + maxVersion + "). this version of Lucene only supports indexes created with release 3.0 and later.") { - if (Debugging.AssertsEnabled) Debugging.Assert(() => resourceDesc != null); + if (Debugging.AssertsEnabled) Debugging.Assert(resourceDesc != null); } /// diff --git a/src/Lucene.Net/Index/IndexWriter.cs b/src/Lucene.Net/Index/IndexWriter.cs index 8c9cc5dc94..f3204c096e 100644 --- a/src/Lucene.Net/Index/IndexWriter.cs +++ b/src/Lucene.Net/Index/IndexWriter.cs @@ -462,8 +462,8 @@ public virtual bool InfoIsLive(SegmentCommitInfo info) lock (this) { int idx = outerInstance.segmentInfos.IndexOf(info); - Debugging.Assert(() => idx != -1, () => "info=" + info + " isn't live"); - Debugging.Assert(() => outerInstance.segmentInfos.Info(idx) == info, () => "info=" + info + " doesn't match live info in segmentInfos"); + Debugging.Assert(idx != -1, () => "info=" + info + " isn't live"); + Debugging.Assert(outerInstance.segmentInfos.Info(idx) == info, () => "info=" + info + " doesn't match live info in segmentInfos"); return true; } } @@ -476,7 +476,7 @@ public virtual void Drop(SegmentCommitInfo info) readerMap.TryGetValue(info, out rld); if (rld != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => info == rld.Info); + if (Debugging.AssertsEnabled) Debugging.Assert(info == rld.Info); // System.out.println("[" + Thread.currentThread().getName() + "] ReaderPool.drop: " + info); readerMap.Remove(info); rld.DropReaders(); @@ -516,7 +516,7 @@ public virtual void Release(ReadersAndUpdates rld, bool assertInfoLive) rld.DecRef(); // Pool still holds a ref: - if (Debugging.AssertsEnabled) Debugging.Assert(() => rld.RefCount() >= 1); + if (Debugging.AssertsEnabled) Debugging.Assert(rld.RefCount() >= 1); if (!outerInstance.poolReaders && rld.RefCount() == 1) { @@ -526,7 +526,7 @@ public virtual void Release(ReadersAndUpdates rld, bool assertInfoLive) if (rld.WriteLiveDocs(outerInstance.directory)) { // Make sure we only write del docs for a live segment: - if (Debugging.AssertsEnabled) Debugging.Assert(() => assertInfoLive == false || InfoIsLive(rld.Info)); + if (Debugging.AssertsEnabled) Debugging.Assert(assertInfoLive == false || InfoIsLive(rld.Info)); // Must checkpoint because we just // created new _X_N.del and field updates files; // don't call IW.checkpoint because that also @@ -575,7 +575,7 @@ internal virtual void DropAll(bool doSave) if (doSave && rld.WriteLiveDocs(outerInstance.directory)) // Throws IOException { // Make sure we only write del docs and field updates for a live segment: - if (Debugging.AssertsEnabled) Debugging.Assert(() => InfoIsLive(rld.Info)); + if (Debugging.AssertsEnabled) Debugging.Assert(InfoIsLive(rld.Info)); // Must checkpoint because we just // created new _X_N.del and field updates files; // don't call IW.checkpoint because that also @@ -644,7 +644,7 @@ internal virtual void DropAll(bool doSave) // before possibly throwing an exception. readerMap.RemoveAll(toDelete); - if (Debugging.AssertsEnabled) Debugging.Assert(() => readerMap.Count == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(readerMap.Count == 0); IOUtils.ReThrow(priorE); } } @@ -663,11 +663,11 @@ public virtual void Commit(SegmentInfos infos) ReadersAndUpdates rld; if (readerMap.TryGetValue(info, out rld)) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => rld.Info == info); + if (Debugging.AssertsEnabled) Debugging.Assert(rld.Info == info); if (rld.WriteLiveDocs(outerInstance.directory)) { // Make sure we only write del docs for a live segment: - if (Debugging.AssertsEnabled) Debugging.Assert(() => InfoIsLive(info)); + if (Debugging.AssertsEnabled) Debugging.Assert(InfoIsLive(info)); // Must checkpoint because we just // created new _X_N.del and field updates files; // don't call IW.checkpoint because that also @@ -691,7 +691,7 @@ public virtual ReadersAndUpdates Get(SegmentCommitInfo info, bool create) { lock (this) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => info.Info.Dir == outerInstance.directory, () => "info.dir=" + info.Info.Dir + " vs " + outerInstance.directory); + if (Debugging.AssertsEnabled) Debugging.Assert(info.Info.Dir == outerInstance.directory, () => "info.dir=" + info.Info.Dir + " vs " + outerInstance.directory); ReadersAndUpdates rld; readerMap.TryGetValue(info, out rld); @@ -707,7 +707,7 @@ public virtual ReadersAndUpdates Get(SegmentCommitInfo info, bool create) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => rld.Info == info, () => "rld.info=" + rld.Info + " info=" + info + " isLive?=" + InfoIsLive(rld.Info) + " vs " + InfoIsLive(info)); + if (Debugging.AssertsEnabled) Debugging.Assert(rld.Info == info, () => "rld.info=" + rld.Info + " info=" + info + " isLive?=" + InfoIsLive(rld.Info) + " vs " + InfoIsLive(info)); } if (create) @@ -716,7 +716,7 @@ public virtual ReadersAndUpdates Get(SegmentCommitInfo info, bool create) rld.IncRef(); } - if (Debugging.AssertsEnabled) Debugging.Assert(NoDups); + if (Debugging.AssertsEnabled) Debugging.Assert(NoDups()); return rld; } @@ -731,7 +731,7 @@ private bool NoDups() JCG.HashSet seen = new JCG.HashSet(); foreach (SegmentCommitInfo info in readerMap.Keys) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !seen.Contains(info.Info.Name)); + if (Debugging.AssertsEnabled) Debugging.Assert(!seen.Contains(info.Info.Name)); seen.Add(info.Info.Name); } return true; @@ -1095,7 +1095,7 @@ public virtual void Dispose(bool waitForMerges) // LUCENENET TODO: API - mark pr else { CloseInternal(waitForMerges, true); - if (Debugging.AssertsEnabled) Debugging.Assert(AssertEventQueueAfterClose); + if (Debugging.AssertsEnabled) Debugging.Assert(AssertEventQueueAfterClose()); } } } @@ -1109,7 +1109,7 @@ private bool AssertEventQueueAfterClose() } foreach (IEvent e in eventQueue) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => e is DocumentsWriter.MergePendingEvent, () => e.ToString()); + if (Debugging.AssertsEnabled) Debugging.Assert(e is DocumentsWriter.MergePendingEvent, () => e.ToString()); } return true; } @@ -1276,7 +1276,7 @@ private void CloseInternal(bool waitForMerges, bool doFlush) { closed = true; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => docWriter.perThreadPool.NumDeactivatedThreadStates() == docWriter.perThreadPool.MaxThreadStates, () => "" + docWriter.perThreadPool.NumDeactivatedThreadStates() + " " + docWriter.perThreadPool.MaxThreadStates); + if (Debugging.AssertsEnabled) Debugging.Assert(docWriter.perThreadPool.NumDeactivatedThreadStates() == docWriter.perThreadPool.MaxThreadStates, () => "" + docWriter.perThreadPool.NumDeactivatedThreadStates() + " " + docWriter.perThreadPool.MaxThreadStates); } catch (OutOfMemoryException oom) { @@ -1644,8 +1644,8 @@ public virtual bool TryDeleteDocument(IndexReader readerIn, int docID) docID -= leaves[subIndex].DocBase; if (Debugging.AssertsEnabled) { - Debugging.Assert(() => docID >= 0); - Debugging.Assert(() => docID < reader.MaxDoc); + Debugging.Assert(docID >= 0); + Debugging.Assert(docID < reader.MaxDoc); } } @@ -2386,7 +2386,7 @@ private bool UpdatePendingMerges(MergeTrigger trigger, int maxNumSegments) { lock (this) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => maxNumSegments == -1 || maxNumSegments > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(maxNumSegments == -1 || maxNumSegments > 0); //if (Debugging.AssertsEnabled) Debugging.Assert(trigger != null); // LUCENENET NOTE: Enum cannot be null in .NET if (stopMerges) { @@ -2402,7 +2402,7 @@ private bool UpdatePendingMerges(MergeTrigger trigger, int maxNumSegments) MergePolicy.MergeSpecification spec; if (maxNumSegments != UNBOUNDED_MAX_MERGE_SEGMENTS) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => trigger == MergeTrigger.EXPLICIT || trigger == MergeTrigger.MERGE_FINISHED, () => "Expected EXPLICT or MERGE_FINISHED as trigger even with maxNumSegments set but was: " + trigger.ToString()); + if (Debugging.AssertsEnabled) Debugging.Assert(trigger == MergeTrigger.EXPLICIT || trigger == MergeTrigger.MERGE_FINISHED, () => "Expected EXPLICT or MERGE_FINISHED as trigger even with maxNumSegments set but was: " + trigger.ToString()); spec = mergePolicy.FindForcedMerges(segmentInfos, maxNumSegments, segmentsToMerge); newMergesFound = spec != null; if (newMergesFound) @@ -2572,7 +2572,7 @@ private void RollbackInternal() infoStream.Message("IW", "rollback: infos=" + SegString(segmentInfos.Segments)); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => TestPoint("rollback before checkpoint")); + if (Debugging.AssertsEnabled) Debugging.Assert(TestPoint("rollback before checkpoint")); // Ask deleter to locate unreferenced files & remove // them: @@ -2587,7 +2587,7 @@ private void RollbackInternal() IOUtils.Dispose(writeLock); // release write lock writeLock = null; - if (Debugging.AssertsEnabled) Debugging.Assert(() => docWriter.perThreadPool.NumDeactivatedThreadStates() == docWriter.perThreadPool.MaxThreadStates, () => "" + docWriter.perThreadPool.NumDeactivatedThreadStates() + " " + docWriter.perThreadPool.MaxThreadStates); + if (Debugging.AssertsEnabled) Debugging.Assert(docWriter.perThreadPool.NumDeactivatedThreadStates() == docWriter.perThreadPool.MaxThreadStates, () => "" + docWriter.perThreadPool.NumDeactivatedThreadStates() + " " + docWriter.perThreadPool.MaxThreadStates); } success = true; @@ -2772,7 +2772,7 @@ private void FinishMerges(bool waitForMerges) stopMerges = false; Monitor.PulseAll(this); - if (Debugging.AssertsEnabled) Debugging.Assert(() => 0 == mergingSegments.Count); + if (Debugging.AssertsEnabled) Debugging.Assert(0 == mergingSegments.Count); if (infoStream.IsEnabled("IW")) { @@ -2812,7 +2812,7 @@ public virtual void WaitForMerges() } // sanity check - if (Debugging.AssertsEnabled) Debugging.Assert(() => 0 == mergingSegments.Count); + if (Debugging.AssertsEnabled) Debugging.Assert(0 == mergingSegments.Count); if (infoStream.IsEnabled("IW")) { @@ -2865,7 +2865,7 @@ internal virtual void PublishFrozenUpdates(FrozenBufferedUpdates packet) { lock (this) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => packet != null && packet.Any()); + if (Debugging.AssertsEnabled) Debugging.Assert(packet != null && packet.Any()); lock (bufferedUpdatesStream) { bufferedUpdatesStream.Push(packet); @@ -3061,7 +3061,7 @@ public virtual void AddIndexes(params Directory[] dirs) JCG.HashSet copiedFiles = new JCG.HashSet(); foreach (SegmentCommitInfo info in sis.Segments) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !infos.Contains(info), () => "dup info dir=" + info.Info.Dir + " name=" + info.Info.Name); + if (Debugging.AssertsEnabled) Debugging.Assert(!infos.Contains(info), () => "dup info dir=" + info.Info.Dir + " name=" + info.Info.Name); string newSegName = NewSegmentName(); @@ -3336,7 +3336,7 @@ private SegmentCommitInfo CopySegmentAsIs(SegmentCommitInfo info, string segName // because the DS might have been copied already, in which case we // just want to update the DS name of this SegmentInfo. string dsName = Lucene3xSegmentInfoFormat.GetDocStoreSegment(info.Info); - if (Debugging.AssertsEnabled) Debugging.Assert(() => dsName != null); + if (Debugging.AssertsEnabled) Debugging.Assert(dsName != null); // LUCENENET: Eliminated extra lookup by using TryGetValue instead of ContainsKey if (!dsNames.TryGetValue(dsName, out string newDsName)) { @@ -3450,8 +3450,8 @@ private SegmentCommitInfo CopySegmentAsIs(SegmentCommitInfo info, string segName if (Debugging.AssertsEnabled) { - Debugging.Assert(() => !SlowFileExists(directory, newFileName), () => "file \"" + newFileName + "\" already exists; siFiles=" + string.Format(J2N.Text.StringFormatter.InvariantCulture, "{0}", siFiles)); - Debugging.Assert(() => !copiedFiles.Contains(file), () => "file \"" + file + "\" is being copied more than once"); + Debugging.Assert(!SlowFileExists(directory, newFileName), () => "file \"" + newFileName + "\" already exists; siFiles=" + string.Format(J2N.Text.StringFormatter.InvariantCulture, "{0}", siFiles)); + Debugging.Assert(!copiedFiles.Contains(file), () => "file \"" + file + "\" is being copied more than once"); } copiedFiles.Add(file); info.Info.Dir.Copy(directory, file, newFileName, context); @@ -4018,7 +4018,7 @@ private void SkipDeletedDoc(DocValuesFieldUpdates.Iterator[] updatesIters, int d // when entering the method, all iterators must already be beyond the // deleted document, or right on it, in which case we advance them over // and they must be beyond it now. - if (Debugging.AssertsEnabled) Debugging.Assert(() => iter.Doc > deletedDoc, () => "updateDoc=" + iter.Doc + " deletedDoc=" + deletedDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(iter.Doc > deletedDoc, () => "updateDoc=" + iter.Doc + " deletedDoc=" + deletedDoc); } } @@ -4038,7 +4038,7 @@ internal void Init(ReaderPool readerPool, MergePolicy.OneMerge merge, MergeState { mergedDeletesAndUpdates = readerPool.Get(merge.info, true); docMap = merge.GetDocMap(mergeState); - if (Debugging.AssertsEnabled) Debugging.Assert(() => docMap.IsConsistent(merge.info.Info.DocCount)); + if (Debugging.AssertsEnabled) Debugging.Assert(docMap.IsConsistent(merge.info.Info.DocCount)); } if (initWritableLiveDocs && !initializedWritableLiveDocs) { @@ -4070,7 +4070,7 @@ private void MaybeApplyMergedDVUpdates(MergePolicy.OneMerge merge, MergeState me } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => updatesIter.Doc > curDoc, () => "field=" + mergingFields[idx] + " updateDoc=" + updatesIter.Doc + " curDoc=" + curDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(updatesIter.Doc > curDoc, () => "field=" + mergingFields[idx] + " updateDoc=" + updatesIter.Doc + " curDoc=" + curDoc); } } } @@ -4089,7 +4089,7 @@ private ReadersAndUpdates CommitMergedDeletesAndUpdates(MergePolicy.OneMerge mer { lock (this) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => TestPoint("startCommitMergeDeletes")); + if (Debugging.AssertsEnabled) Debugging.Assert(TestPoint("startCommitMergeDeletes")); IList sourceSegments = merge.Segments; @@ -4115,7 +4115,7 @@ private ReadersAndUpdates CommitMergedDeletesAndUpdates(MergePolicy.OneMerge mer IBits prevLiveDocs = merge.readers[i].LiveDocs; ReadersAndUpdates rld = readerPool.Get(info, false); // We hold a ref so it should still be in the pool: - if (Debugging.AssertsEnabled) Debugging.Assert(() => rld != null, () => "seg=" + info.Info.Name); + if (Debugging.AssertsEnabled) Debugging.Assert(rld != null, () => "seg=" + info.Info.Name); IBits currentLiveDocs = rld.LiveDocs; IDictionary mergingFieldUpdates = rld.MergingFieldUpdates; string[] mergingFields; @@ -4156,9 +4156,9 @@ private ReadersAndUpdates CommitMergedDeletesAndUpdates(MergePolicy.OneMerge mer // still have deletions now: if (Debugging.AssertsEnabled) { - Debugging.Assert(() => currentLiveDocs != null); - Debugging.Assert(() => prevLiveDocs.Length == docCount); - Debugging.Assert(() => currentLiveDocs.Length == docCount); + Debugging.Assert(currentLiveDocs != null); + Debugging.Assert(prevLiveDocs.Length == docCount); + Debugging.Assert(currentLiveDocs.Length == docCount); } // There were deletes on this segment when the merge @@ -4182,7 +4182,7 @@ private ReadersAndUpdates CommitMergedDeletesAndUpdates(MergePolicy.OneMerge mer { if (!prevLiveDocs.Get(j)) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !currentLiveDocs.Get(j)); + if (Debugging.AssertsEnabled) Debugging.Assert(!currentLiveDocs.Get(j)); } else { @@ -4232,7 +4232,7 @@ private ReadersAndUpdates CommitMergedDeletesAndUpdates(MergePolicy.OneMerge mer } else if (currentLiveDocs != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentLiveDocs.Length == docCount); + if (Debugging.AssertsEnabled) Debugging.Assert(currentLiveDocs.Length == docCount); // this segment had no deletes before but now it // does: for (int j = 0; j < docCount; j++) @@ -4273,7 +4273,7 @@ private ReadersAndUpdates CommitMergedDeletesAndUpdates(MergePolicy.OneMerge mer } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => docUpto == merge.info.Info.DocCount); + if (Debugging.AssertsEnabled) Debugging.Assert(docUpto == merge.info.Info.DocCount); if (mergedDVUpdates.Any()) { @@ -4328,7 +4328,7 @@ private bool CommitMerge(MergePolicy.OneMerge merge, MergeState mergeState) { lock (this) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => TestPoint("startCommitMerge")); + if (Debugging.AssertsEnabled) Debugging.Assert(TestPoint("startCommitMerge")); if (hitOOM) { @@ -4340,7 +4340,7 @@ private bool CommitMerge(MergePolicy.OneMerge merge, MergeState mergeState) infoStream.Message("IW", "commitMerge: " + SegString(merge.Segments) + " index=" + SegString()); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => merge.registerDone); + if (Debugging.AssertsEnabled) Debugging.Assert(merge.registerDone); // If merge was explicitly aborted, or, if rollback() or // rollbackTransaction() had been called since our merge @@ -4376,7 +4376,7 @@ private bool CommitMerge(MergePolicy.OneMerge merge, MergeState mergeState) // started), then we will switch to the compound // format as well: - if (Debugging.AssertsEnabled) Debugging.Assert(() => !segmentInfos.Contains(merge.info)); + if (Debugging.AssertsEnabled) Debugging.Assert(!segmentInfos.Contains(merge.info)); bool allDeleted = merge.Segments.Count == 0 || merge.info.Info.DocCount == 0 || (mergedUpdates != null && mergedUpdates.PendingDeleteCount == merge.info.Info.DocCount); @@ -4392,9 +4392,9 @@ private bool CommitMerge(MergePolicy.OneMerge merge, MergeState mergeState) // If we merged no segments then we better be dropping // the new segment: - if (Debugging.AssertsEnabled) Debugging.Assert(() => merge.Segments.Count > 0 || dropSegment); + if (Debugging.AssertsEnabled) Debugging.Assert(merge.Segments.Count > 0 || dropSegment); - if (Debugging.AssertsEnabled) Debugging.Assert(() => merge.info.Info.DocCount != 0 || keepFullyDeletedSegments || dropSegment); + if (Debugging.AssertsEnabled) Debugging.Assert(merge.info.Info.DocCount != 0 || keepFullyDeletedSegments || dropSegment); if (mergedUpdates != null) { @@ -4429,7 +4429,7 @@ private bool CommitMerge(MergePolicy.OneMerge merge, MergeState mergeState) if (dropSegment) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !segmentInfos.Contains(merge.info)); + if (Debugging.AssertsEnabled) Debugging.Assert(!segmentInfos.Contains(merge.info)); readerPool.Drop(merge.info); deleter.DeleteNewFiles(merge.info.GetFiles()); } @@ -4618,7 +4618,7 @@ internal bool RegisterMerge(MergePolicy.OneMerge merge) { return true; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => merge.Segments.Count > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(merge.Segments.Count > 0); if (stopMerges) { @@ -4697,15 +4697,15 @@ internal bool RegisterMerge(MergePolicy.OneMerge merge) if (Debugging.AssertsEnabled) { - Debugging.Assert(() => merge.EstimatedMergeBytes == 0); - Debugging.Assert(() => merge.totalMergeBytes == 0); + Debugging.Assert(merge.EstimatedMergeBytes == 0); + Debugging.Assert(merge.totalMergeBytes == 0); } foreach (SegmentCommitInfo info in merge.Segments) { if (info.Info.DocCount > 0) { int delCount = NumDeletedDocs(info); - if (Debugging.AssertsEnabled) Debugging.Assert(() => delCount <= info.Info.DocCount); + if (Debugging.AssertsEnabled) Debugging.Assert(delCount <= info.Info.DocCount); double delRatio = ((double)delCount) / info.Info.DocCount; merge.EstimatedMergeBytes += (long)(info.GetSizeInBytes() * (1.0 - delRatio)); merge.totalMergeBytes += info.GetSizeInBytes(); @@ -4753,10 +4753,10 @@ private void MergeInitImpl(MergePolicy.OneMerge merge) // LUCENENET specific: re { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => TestPoint("startMergeInit")); + Debugging.Assert(TestPoint("startMergeInit")); - Debugging.Assert(() => merge.registerDone); - Debugging.Assert(() => merge.MaxNumSegments == -1 || merge.MaxNumSegments > 0); + Debugging.Assert(merge.registerDone); + Debugging.Assert(merge.MaxNumSegments == -1 || merge.MaxNumSegments > 0); } if (hitOOM) @@ -4899,7 +4899,7 @@ private void CloseMergeReaders(MergePolicy.OneMerge merge, bool suppressExceptio { ReadersAndUpdates rld = readerPool.Get(sr.SegmentInfo, false); // We still hold a ref so it should not have been removed: - if (Debugging.AssertsEnabled) Debugging.Assert(() => rld != null); + if (Debugging.AssertsEnabled) Debugging.Assert(rld != null); if (drop) { rld.DropChanges(); @@ -4988,8 +4988,8 @@ private int MergeMiddle(MergePolicy.OneMerge merge) if (Debugging.AssertsEnabled) { - Debugging.Assert(() => reader != null); - Debugging.Assert(rld.VerifyDocCounts); + Debugging.Assert(reader != null); + Debugging.Assert(rld.VerifyDocCounts()); } if (infoStream.IsEnabled("IW")) @@ -5016,7 +5016,7 @@ private int MergeMiddle(MergePolicy.OneMerge merge) if (reader.NumDeletedDocs != delCount) { // fix the reader's live docs and del count - if (Debugging.AssertsEnabled) Debugging.Assert(() => delCount > reader.NumDeletedDocs); // beware of zombies + if (Debugging.AssertsEnabled) Debugging.Assert(delCount > reader.NumDeletedDocs); // beware of zombies SegmentReader newReader = new SegmentReader(info, reader, liveDocs, info.Info.DocCount - delCount); bool released = false; @@ -5037,7 +5037,7 @@ private int MergeMiddle(MergePolicy.OneMerge merge) } merge.readers.Add(reader); - if (Debugging.AssertsEnabled) Debugging.Assert(() => delCount <= info.Info.DocCount, () => "delCount=" + delCount + " info.docCount=" + info.Info.DocCount + " rld.pendingDeleteCount=" + rld.PendingDeleteCount + " info.getDelCount()=" + info.DelCount); + if (Debugging.AssertsEnabled) Debugging.Assert(delCount <= info.Info.DocCount, () => "delCount=" + delCount + " info.docCount=" + info.Info.DocCount + " rld.pendingDeleteCount=" + rld.PendingDeleteCount + " info.getDelCount()=" + info.DelCount); segUpto++; } @@ -5075,7 +5075,7 @@ private int MergeMiddle(MergePolicy.OneMerge merge) } } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => mergeState.SegmentInfo == merge.info.Info); + if (Debugging.AssertsEnabled) Debugging.Assert(mergeState.SegmentInfo == merge.info.Info); merge.info.Info.SetFiles(new JCG.HashSet(dirWrapper.CreatedFiles)); // Record which codec was used to write the segment @@ -5261,7 +5261,7 @@ internal virtual void AddMergeException(MergePolicy.OneMerge merge) { lock (this) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => merge.Exception != null); + if (Debugging.AssertsEnabled) Debugging.Assert(merge.Exception != null); if (!mergeExceptions.Contains(merge) && mergeGen == merge.mergeGen) { mergeExceptions.Add(merge); @@ -5379,13 +5379,13 @@ private bool FilesExist(SegmentInfos toSync) ICollection files = toSync.GetFiles(directory, false); foreach (string fileName in files) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => SlowFileExists(directory, fileName), () => "file " + fileName + " does not exist; files=" + Arrays.ToString(directory.ListAll())); + if (Debugging.AssertsEnabled) Debugging.Assert(SlowFileExists(directory, fileName), () => "file " + fileName + " does not exist; files=" + Arrays.ToString(directory.ListAll())); // If this trips it means we are missing a call to // .checkpoint somewhere, because by the time we // are called, deleter should know about every // file referenced by the current head // segmentInfos: - if (Debugging.AssertsEnabled) Debugging.Assert(() => deleter.Exists(fileName), () => "IndexFileDeleter doesn't know about file " + fileName); + if (Debugging.AssertsEnabled) Debugging.Assert(deleter.Exists(fileName), () => "IndexFileDeleter doesn't know about file " + fileName); } return true; } @@ -5427,8 +5427,8 @@ private void StartCommit(SegmentInfos toSync) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => TestPoint("startStartCommit")); - Debugging.Assert(() => pendingCommit == null); + Debugging.Assert(TestPoint("startStartCommit")); + Debugging.Assert(pendingCommit == null); } if (hitOOM) @@ -5445,7 +5445,7 @@ private void StartCommit(SegmentInfos toSync) lock (this) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => lastCommitChangeCount <= changeCount, () => "lastCommitChangeCount=" + lastCommitChangeCount + " changeCount=" + changeCount); + if (Debugging.AssertsEnabled) Debugging.Assert(lastCommitChangeCount <= changeCount, () => "lastCommitChangeCount=" + lastCommitChangeCount + " changeCount=" + changeCount); if (pendingCommitChangeCount == lastCommitChangeCount) { @@ -5463,22 +5463,22 @@ private void StartCommit(SegmentInfos toSync) infoStream.Message("IW", "startCommit index=" + SegString(ToLiveInfos(toSync).Segments) + " changeCount=" + changeCount); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => FilesExist(toSync)); + if (Debugging.AssertsEnabled) Debugging.Assert(FilesExist(toSync)); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => TestPoint("midStartCommit")); + if (Debugging.AssertsEnabled) Debugging.Assert(TestPoint("midStartCommit")); bool pendingCommitSet = false; try { - if (Debugging.AssertsEnabled) Debugging.Assert(() => TestPoint("midStartCommit2")); + if (Debugging.AssertsEnabled) Debugging.Assert(TestPoint("midStartCommit2")); lock (this) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => pendingCommit == null); + if (Debugging.AssertsEnabled) Debugging.Assert(pendingCommit == null); - if (Debugging.AssertsEnabled) Debugging.Assert(() => segmentInfos.Generation == toSync.Generation); + if (Debugging.AssertsEnabled) Debugging.Assert(segmentInfos.Generation == toSync.Generation); // Exception here means nothing is prepared // (this method unwinds everything it did on @@ -5515,7 +5515,7 @@ private void StartCommit(SegmentInfos toSync) infoStream.Message("IW", "done all syncs: " + string.Format(J2N.Text.StringFormatter.InvariantCulture, "{0}", filesToSync)); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => TestPoint("midStartCommitSuccess")); + if (Debugging.AssertsEnabled) Debugging.Assert(TestPoint("midStartCommitSuccess")); } finally { @@ -5545,7 +5545,7 @@ private void StartCommit(SegmentInfos toSync) { HandleOOM(oom, "startCommit"); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => TestPoint("finishStartCommit")); + if (Debugging.AssertsEnabled) Debugging.Assert(TestPoint("finishStartCommit")); } /// @@ -5715,7 +5715,7 @@ internal static ICollection CreateCompoundFile(InfoStream infoStream, Di { infoStream.Message("IW", "create compound file " + fileName); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => Lucene3xSegmentInfoFormat.GetDocStoreOffset(info) == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(Lucene3xSegmentInfoFormat.GetDocStoreOffset(info) == -1); // Now merge all added files ICollection files = info.GetFiles(); CompoundFileDirectory cfsDir = new CompoundFileDirectory(directory, fileName, context, true); diff --git a/src/Lucene.Net/Index/LogMergePolicy.cs b/src/Lucene.Net/Index/LogMergePolicy.cs index 251980f288..1424d99cc3 100644 --- a/src/Lucene.Net/Index/LogMergePolicy.cs +++ b/src/Lucene.Net/Index/LogMergePolicy.cs @@ -192,7 +192,7 @@ protected virtual long SizeDocs(SegmentCommitInfo info) if (m_calibrateSizeByDeletes) { int delCount = m_writer.Get().NumDeletedDocs(info); - if (Debugging.AssertsEnabled) Debugging.Assert(() => delCount <= info.Info.DocCount); + if (Debugging.AssertsEnabled) Debugging.Assert(delCount <= info.Info.DocCount); return (info.Info.DocCount - (long)delCount); } else @@ -378,7 +378,7 @@ private MergeSpecification FindForcedMergesMaxNumSegments(SegmentInfos infos, in /// public override MergeSpecification FindForcedMerges(SegmentInfos infos, int maxNumSegments, IDictionary segmentsToMerge) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => maxNumSegments > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(maxNumSegments > 0); if (IsVerbose) { Message("findForcedMerges: maxNumSegs=" + maxNumSegments + " segsToMerge=" + @@ -469,7 +469,7 @@ public override MergeSpecification FindForcedDeletesMerges(SegmentInfos segmentI var spec = new MergeSpecification(); int firstSegmentWithDeletions = -1; IndexWriter w = m_writer.Get(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => w != null); + if (Debugging.AssertsEnabled) Debugging.Assert(w != null); for (int i = 0; i < numSegments; i++) { SegmentCommitInfo info = segmentInfos.Info(i); @@ -692,7 +692,7 @@ public override MergeSpecification FindMerges(MergeTrigger mergeTrigger, Segment for (int i = start; i < end; i++) { mergeInfos.Add(levels[i].info); - if (Debugging.AssertsEnabled) Debugging.Assert(() => infos.Contains(levels[i].info)); + if (Debugging.AssertsEnabled) Debugging.Assert(infos.Contains(levels[i].info)); } if (IsVerbose) { diff --git a/src/Lucene.Net/Index/MergePolicy.cs b/src/Lucene.Net/Index/MergePolicy.cs index c31f8551ff..7cee5353ad 100644 --- a/src/Lucene.Net/Index/MergePolicy.cs +++ b/src/Lucene.Net/Index/MergePolicy.cs @@ -93,12 +93,12 @@ internal virtual bool IsConsistent(int maxDoc) int target = Map(i); if (target < 0 || target >= maxDoc) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "out of range: " + target + " not in [0-" + maxDoc + "["); + if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "out of range: " + target + " not in [0-" + maxDoc + "["); return false; } else if (targets.Get(target)) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => target + " is already taken (" + i + ")"); + if (Debugging.AssertsEnabled) Debugging.Assert(false, () => target + " is already taken (" + i + ")"); return false; } } @@ -721,7 +721,7 @@ protected virtual long Size(SegmentCommitInfo info) long byteSize = info.GetSizeInBytes(); int delCount = m_writer.Get().NumDeletedDocs(info); double delRatio = (info.Info.DocCount <= 0 ? 0.0f : ((float)delCount / (float)info.Info.DocCount)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => delRatio <= 1.0); + if (Debugging.AssertsEnabled) Debugging.Assert(delRatio <= 1.0); return (info.Info.DocCount <= 0 ? byteSize : (long)(byteSize * (1.0 - delRatio))); } @@ -733,7 +733,7 @@ protected virtual long Size(SegmentCommitInfo info) protected bool IsMerged(SegmentInfos infos, SegmentCommitInfo info) { IndexWriter w = m_writer.Get(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => w != null); + if (Debugging.AssertsEnabled) Debugging.Assert(w != null); bool hasDeletions = w.NumDeletedDocs(info) > 0; return !hasDeletions #pragma warning disable 612, 618 diff --git a/src/Lucene.Net/Index/MergeState.cs b/src/Lucene.Net/Index/MergeState.cs index 5a7af1bce7..d9a7b68d10 100644 --- a/src/Lucene.Net/Index/MergeState.cs +++ b/src/Lucene.Net/Index/MergeState.cs @@ -82,7 +82,7 @@ public static DocMap Build(AtomicReader reader) internal static DocMap Build(int maxDoc, IBits liveDocs) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => liveDocs != null); + if (Debugging.AssertsEnabled) Debugging.Assert(liveDocs != null); MonotonicAppendingInt64Buffer docMap = new MonotonicAppendingInt64Buffer(); int del = 0; for (int i = 0; i < maxDoc; ++i) @@ -95,7 +95,7 @@ internal static DocMap Build(int maxDoc, IBits liveDocs) } docMap.Freeze(); int numDeletedDocs = del; - if (Debugging.AssertsEnabled) Debugging.Assert(() => docMap.Count == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(docMap.Count == maxDoc); return new DocMapAnonymousInnerClassHelper(maxDoc, liveDocs, docMap, numDeletedDocs); } diff --git a/src/Lucene.Net/Index/MultiBits.cs b/src/Lucene.Net/Index/MultiBits.cs index 27d11d315e..16bb0c7dc9 100644 --- a/src/Lucene.Net/Index/MultiBits.cs +++ b/src/Lucene.Net/Index/MultiBits.cs @@ -41,7 +41,7 @@ internal sealed class MultiBits : IBits public MultiBits(IBits[] subs, int[] starts, bool defaultValue) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => starts.Length == 1 + subs.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(starts.Length == 1 + subs.Length); this.subs = subs; this.starts = starts; this.sefaultValue = defaultValue; @@ -50,14 +50,14 @@ public MultiBits(IBits[] subs, int[] starts, bool defaultValue) private bool CheckLength(int reader, int doc) { int length = starts[1 + reader] - starts[reader]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => doc - starts[reader] < length, () => "doc=" + doc + " reader=" + reader + " starts[reader]=" + starts[reader] + " length=" + length); + if (Debugging.AssertsEnabled) Debugging.Assert(doc - starts[reader] < length, () => "doc=" + doc + " reader=" + reader + " starts[reader]=" + starts[reader] + " length=" + length); return true; } public bool Get(int doc) { int reader = ReaderUtil.SubIndex(doc, starts); - if (Debugging.AssertsEnabled) Debugging.Assert(() => reader != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(reader != -1); IBits bits = subs[reader]; if (bits == null) { @@ -65,7 +65,7 @@ public bool Get(int doc) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => CheckLength(reader, doc)); + if (Debugging.AssertsEnabled) Debugging.Assert(CheckLength(reader, doc)); return bits.Get(doc - starts[reader]); } } @@ -116,8 +116,8 @@ public SubResult GetMatchingSub(ReaderSlice slice) int reader = ReaderUtil.SubIndex(slice.Start, starts); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => reader != -1); - Debugging.Assert(() => reader < subs.Length, () => "slice=" + slice + " starts[-1]=" + starts[starts.Length - 1]); + Debugging.Assert(reader != -1); + Debugging.Assert(reader < subs.Length, () => "slice=" + slice + " starts[-1]=" + starts[starts.Length - 1]); } SubResult subResult = new SubResult(); if (starts[reader] == slice.Start && starts[1 + reader] == slice.Start + slice.Length) diff --git a/src/Lucene.Net/Index/MultiDocValues.cs b/src/Lucene.Net/Index/MultiDocValues.cs index adb0f70aea..da6747e8f2 100644 --- a/src/Lucene.Net/Index/MultiDocValues.cs +++ b/src/Lucene.Net/Index/MultiDocValues.cs @@ -99,7 +99,7 @@ public static NumericDocValues GetNormValues(IndexReader r, string field) } starts[size] = r.MaxDoc; - if (Debugging.AssertsEnabled) Debugging.Assert(() => anyReal); + if (Debugging.AssertsEnabled) Debugging.Assert(anyReal); return new NumericDocValuesAnonymousInnerClassHelper(values, starts); } @@ -593,8 +593,8 @@ internal MultiSortedDocValues(SortedDocValues[] values, int[] docStarts, Ordinal { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => values.Length == mapping.ordDeltas.Length); - Debugging.Assert(() => docStarts.Length == values.Length + 1); + Debugging.Assert(values.Length == mapping.ordDeltas.Length); + Debugging.Assert(docStarts.Length == values.Length + 1); } this.values = values; this.docStarts = docStarts; @@ -655,8 +655,8 @@ internal MultiSortedSetDocValues(SortedSetDocValues[] values, int[] docStarts, O { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => values.Length == mapping.ordDeltas.Length); - Debugging.Assert(() => docStarts.Length == values.Length + 1); + Debugging.Assert(values.Length == mapping.ordDeltas.Length); + Debugging.Assert(docStarts.Length == values.Length + 1); } this.values = values; this.docStarts = docStarts; diff --git a/src/Lucene.Net/Index/MultiDocsAndPositionsEnum.cs b/src/Lucene.Net/Index/MultiDocsAndPositionsEnum.cs index f6c0546dbd..51ceec34e3 100644 --- a/src/Lucene.Net/Index/MultiDocsAndPositionsEnum.cs +++ b/src/Lucene.Net/Index/MultiDocsAndPositionsEnum.cs @@ -90,7 +90,7 @@ public override int Freq { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => current != null); + if (Debugging.AssertsEnabled) Debugging.Assert(current != null); return current.Freq; } } @@ -99,7 +99,7 @@ public override int Freq public override int Advance(int target) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => target > doc); + if (Debugging.AssertsEnabled) Debugging.Assert(target > doc); while (true) { if (current != null) diff --git a/src/Lucene.Net/Index/MultiDocsEnum.cs b/src/Lucene.Net/Index/MultiDocsEnum.cs index a2e947de55..387a1be891 100644 --- a/src/Lucene.Net/Index/MultiDocsEnum.cs +++ b/src/Lucene.Net/Index/MultiDocsEnum.cs @@ -92,7 +92,7 @@ public bool CanReuse(MultiTermsEnum parent) public override int Advance(int target) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => target > doc); + if (Debugging.AssertsEnabled) Debugging.Assert(target > doc); while (true) { if (current != null) diff --git a/src/Lucene.Net/Index/MultiFields.cs b/src/Lucene.Net/Index/MultiFields.cs index af358b3c72..923bb9f3aa 100644 --- a/src/Lucene.Net/Index/MultiFields.cs +++ b/src/Lucene.Net/Index/MultiFields.cs @@ -117,7 +117,7 @@ public static IBits GetLiveDocs(IndexReader reader) { IList leaves = reader.Leaves; int size = leaves.Count; - if (Debugging.AssertsEnabled) Debugging.Assert(() => size > 0, () => "A reader with deletions must have at least one leave"); + if (Debugging.AssertsEnabled) Debugging.Assert(size > 0, () => "A reader with deletions must have at least one leave"); if (size == 1) { return leaves[0].AtomicReader.LiveDocs; @@ -177,8 +177,8 @@ public static DocsEnum GetTermDocsEnum(IndexReader r, IBits liveDocs, string fie { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => field != null); - Debugging.Assert(() => term != null); + Debugging.Assert(field != null); + Debugging.Assert(term != null); } Terms terms = GetTerms(r, field); if (terms != null) @@ -215,8 +215,8 @@ public static DocsAndPositionsEnum GetTermPositionsEnum(IndexReader r, IBits liv { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => field != null); - Debugging.Assert(() => term != null); + Debugging.Assert(field != null); + Debugging.Assert(term != null); } Terms terms = GetTerms(r, field); if (terms != null) diff --git a/src/Lucene.Net/Index/MultiTerms.cs b/src/Lucene.Net/Index/MultiTerms.cs index 4cbf243e98..7ffec6f4b1 100644 --- a/src/Lucene.Net/Index/MultiTerms.cs +++ b/src/Lucene.Net/Index/MultiTerms.cs @@ -54,7 +54,7 @@ public MultiTerms(Terms[] subs, ReaderSlice[] subSlices) this.subSlices = subSlices; IComparer _termComp = null; - if (Debugging.AssertsEnabled) Debugging.Assert(() => subs.Length > 0, () => "inefficient: don't use MultiTerms over one sub"); + if (Debugging.AssertsEnabled) Debugging.Assert(subs.Length > 0, () => "inefficient: don't use MultiTerms over one sub"); bool _hasFreqs = true; bool _hasOffsets = true; bool _hasPositions = true; diff --git a/src/Lucene.Net/Index/MultiTermsEnum.cs b/src/Lucene.Net/Index/MultiTermsEnum.cs index 4ee98d0871..afe5955273 100644 --- a/src/Lucene.Net/Index/MultiTermsEnum.cs +++ b/src/Lucene.Net/Index/MultiTermsEnum.cs @@ -107,7 +107,7 @@ public MultiTermsEnum(ReaderSlice[] slices) /// public TermsEnum Reset(TermsEnumIndex[] termsEnumsIndex) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => termsEnumsIndex.Length <= top.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(termsEnumsIndex.Length <= top.Length); numSubs = 0; numTop = 0; termComp = null; @@ -115,7 +115,7 @@ public TermsEnum Reset(TermsEnumIndex[] termsEnumsIndex) for (int i = 0; i < termsEnumsIndex.Length; i++) { TermsEnumIndex termsEnumIndex = termsEnumsIndex[i]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => termsEnumIndex != null); + if (Debugging.AssertsEnabled) Debugging.Assert(termsEnumIndex != null); // init our term comp if (termComp == null) @@ -213,7 +213,7 @@ public override bool SeekExact(BytesRef term) { top[numTop++] = currentSubs[i]; current = currentSubs[i].Current = currentSubs[i].Terms.Term; - if (Debugging.AssertsEnabled) Debugging.Assert(() => term.Equals(currentSubs[i].Current)); + if (Debugging.AssertsEnabled) Debugging.Assert(term.Equals(currentSubs[i].Current)); } } @@ -285,7 +285,7 @@ public override SeekStatus SeekCeil(BytesRef term) if (status == SeekStatus.NOT_FOUND) { currentSubs[i].Current = currentSubs[i].Terms.Term; - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentSubs[i].Current != null); + if (Debugging.AssertsEnabled) Debugging.Assert(currentSubs[i].Current != null); queue.Add(currentSubs[i]); } else @@ -326,7 +326,7 @@ private void PullTop() { // extract all subs from the queue that have the same // top term - if (Debugging.AssertsEnabled) Debugging.Assert(() => numTop == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(numTop == 0); while (true) { top[numTop++] = queue.Pop(); @@ -367,7 +367,7 @@ public override BytesRef Next() // most impls short-circuit if you SeekCeil to term // they are already on. SeekStatus status = SeekCeil(current); - if (Debugging.AssertsEnabled) Debugging.Assert(() => status == SeekStatus.FOUND); + if (Debugging.AssertsEnabled) Debugging.Assert(status == SeekStatus.FOUND); lastSeekExact = false; } lastSeek = null; @@ -484,7 +484,7 @@ public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) b = null; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => entry.Index < docsEnum.subDocsEnum.Length, () => entry.Index + " vs " + docsEnum.subDocsEnum.Length + "; " + subs.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(entry.Index < docsEnum.subDocsEnum.Length, () => entry.Index + " vs " + docsEnum.subDocsEnum.Length + "; " + subs.Length); DocsEnum subDocsEnum = entry.Terms.Docs(b, docsEnum.subDocsEnum[entry.Index], flags); if (subDocsEnum != null) { @@ -496,7 +496,7 @@ public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) else { // should this be an error? - if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "One of our subs cannot provide a docsenum"); + if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "One of our subs cannot provide a docsenum"); } } @@ -576,7 +576,7 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPos b = null; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => entry.Index < docsAndPositionsEnum.subDocsAndPositionsEnum.Length, () => entry.Index + " vs " + docsAndPositionsEnum.subDocsAndPositionsEnum.Length + "; " + subs.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(entry.Index < docsAndPositionsEnum.subDocsAndPositionsEnum.Length, () => entry.Index + " vs " + docsAndPositionsEnum.subDocsAndPositionsEnum.Length + "; " + subs.Length); DocsAndPositionsEnum subPostings = entry.Terms.DocsAndPositions(b, docsAndPositionsEnum.subDocsAndPositionsEnum[entry.Index], flags); if (subPostings != null) @@ -619,7 +619,7 @@ public TermsEnumWithSlice(int index, ReaderSlice subSlice) { this.SubSlice = subSlice; this.Index = index; - if (Debugging.AssertsEnabled) Debugging.Assert(() => subSlice.Length >= 0, () => "length=" + subSlice.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(subSlice.Length >= 0, () => "length=" + subSlice.Length); } public void Reset(TermsEnum terms, BytesRef term) diff --git a/src/Lucene.Net/Index/NormsConsumer.cs b/src/Lucene.Net/Index/NormsConsumer.cs index 5b376409a6..d79506015a 100644 --- a/src/Lucene.Net/Index/NormsConsumer.cs +++ b/src/Lucene.Net/Index/NormsConsumer.cs @@ -50,7 +50,7 @@ internal override void Flush(IDictionary if (state.FieldInfos.HasNorms) { NormsFormat normsFormat = state.SegmentInfo.Codec.NormsFormat; - if (Debugging.AssertsEnabled) Debugging.Assert(() => normsFormat != null); + if (Debugging.AssertsEnabled) Debugging.Assert(normsFormat != null); normsConsumer = normsFormat.NormsConsumer(state); foreach (FieldInfo fi in state.FieldInfos) @@ -63,11 +63,11 @@ internal override void Flush(IDictionary if (toWrite != null && !toWrite.IsEmpty) { toWrite.Flush(state, normsConsumer); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fi.NormType == DocValuesType.NUMERIC); + if (Debugging.AssertsEnabled) Debugging.Assert(fi.NormType == DocValuesType.NUMERIC); } else if (fi.IsIndexed) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => fi.NormType == DocValuesType.NONE, () => "got " + fi.NormType + "; field=" + fi.Name); + if (Debugging.AssertsEnabled) Debugging.Assert(fi.NormType == DocValuesType.NONE, () => "got " + fi.NormType + "; field=" + fi.Name); } } } diff --git a/src/Lucene.Net/Index/NumericDocValuesFieldUpdates.cs b/src/Lucene.Net/Index/NumericDocValuesFieldUpdates.cs index 26e717ab84..96add831a7 100644 --- a/src/Lucene.Net/Index/NumericDocValuesFieldUpdates.cs +++ b/src/Lucene.Net/Index/NumericDocValuesFieldUpdates.cs @@ -206,7 +206,7 @@ protected override int Compare(int i, int j) [MethodImpl(MethodImplOptions.NoInlining)] public override void Merge(DocValuesFieldUpdates other) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => other is NumericDocValuesFieldUpdates); + if (Debugging.AssertsEnabled) Debugging.Assert(other is NumericDocValuesFieldUpdates); NumericDocValuesFieldUpdates otherUpdates = (NumericDocValuesFieldUpdates)other; if (size + otherUpdates.size > int.MaxValue) { diff --git a/src/Lucene.Net/Index/OrdTermState.cs b/src/Lucene.Net/Index/OrdTermState.cs index 2966943dc5..4842cbc6dd 100644 --- a/src/Lucene.Net/Index/OrdTermState.cs +++ b/src/Lucene.Net/Index/OrdTermState.cs @@ -40,7 +40,7 @@ public OrdTermState() public override void CopyFrom(TermState other) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => other is OrdTermState, () => "can not copy from " + other.GetType().Name); + if (Debugging.AssertsEnabled) Debugging.Assert(other is OrdTermState, () => "can not copy from " + other.GetType().Name); this.Ord = ((OrdTermState)other).Ord; } diff --git a/src/Lucene.Net/Index/ParallelCompositeReader.cs b/src/Lucene.Net/Index/ParallelCompositeReader.cs index 74740374f2..5679f9f704 100644 --- a/src/Lucene.Net/Index/ParallelCompositeReader.cs +++ b/src/Lucene.Net/Index/ParallelCompositeReader.cs @@ -145,7 +145,7 @@ private static IndexReader[] PrepareSubReaders(CompositeReader[] readers, Compos } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => firstSubReaders[i] is CompositeReader); + if (Debugging.AssertsEnabled) Debugging.Assert(firstSubReaders[i] is CompositeReader); CompositeReader[] compositeSubs = new CompositeReader[readers.Length]; for (int j = 0; j < readers.Length; j++) { diff --git a/src/Lucene.Net/Index/PrefixCodedTerms.cs b/src/Lucene.Net/Index/PrefixCodedTerms.cs index 2b01ab0994..7746d26c38 100644 --- a/src/Lucene.Net/Index/PrefixCodedTerms.cs +++ b/src/Lucene.Net/Index/PrefixCodedTerms.cs @@ -94,7 +94,7 @@ public virtual bool MoveNext() { // LUCENENET specific - Since there is no way to check for a next element // without calling this method in .NET, the assert is redundant and ineffective. - //if (Debugging.AssertsEnabled) Debugging.Assert(() => input.GetFilePointer() < input.Length); // Has next + //if (Debugging.AssertsEnabled) Debugging.Assert(input.GetFilePointer() < input.Length); // Has next if (input.GetFilePointer() < input.Length) { try @@ -149,7 +149,7 @@ internal virtual void InitializeInstanceFields() /// add a term public virtual void Add(Term term) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => lastTerm.Equals(new Term("")) || term.CompareTo(lastTerm) > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(lastTerm.Equals(new Term("")) || term.CompareTo(lastTerm) > 0); try { diff --git a/src/Lucene.Net/Index/ReadersAndUpdates.cs b/src/Lucene.Net/Index/ReadersAndUpdates.cs index a56e2241ac..8aae787bac 100644 --- a/src/Lucene.Net/Index/ReadersAndUpdates.cs +++ b/src/Lucene.Net/Index/ReadersAndUpdates.cs @@ -105,19 +105,19 @@ public ReadersAndUpdates(IndexWriter writer, SegmentCommitInfo info) public virtual void IncRef() { int rc = refCount.IncrementAndGet(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => rc > 1); + if (Debugging.AssertsEnabled) Debugging.Assert(rc > 1); } public virtual void DecRef() { int rc = refCount.DecrementAndGet(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => rc >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(rc >= 0); } public virtual int RefCount() { int rc = refCount; - if (Debugging.AssertsEnabled) Debugging.Assert(() => rc >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(rc >= 0); return rc; } @@ -154,7 +154,7 @@ public virtual bool VerifyDocCounts() count = Info.Info.DocCount; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => Info.Info.DocCount - Info.DelCount - pendingDeleteCount == count, () => "info.docCount=" + Info.Info.DocCount + " info.DelCount=" + Info.DelCount + " pendingDeleteCount=" + pendingDeleteCount + " count=" + count); + if (Debugging.AssertsEnabled) Debugging.Assert(Info.Info.DocCount - Info.DelCount - pendingDeleteCount == count, () => "info.docCount=" + Info.Info.DocCount + " info.DelCount=" + Info.DelCount + " pendingDeleteCount=" + pendingDeleteCount + " count=" + count); return true; } } @@ -221,7 +221,7 @@ public virtual void Release(SegmentReader sr) { lock (this) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => Info == sr.SegmentInfo); + if (Debugging.AssertsEnabled) Debugging.Assert(Info == sr.SegmentInfo); sr.DecRef(); } } @@ -232,10 +232,10 @@ public virtual bool Delete(int docID) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => liveDocs != null); - Debugging.Assert(() => Monitor.IsEntered(writer)); - Debugging.Assert(() => docID >= 0 && docID < liveDocs.Length, () => "out of bounds: docid=" + docID + " liveDocsLength=" + liveDocs.Length + " seg=" + Info.Info.Name + " docCount=" + Info.Info.DocCount); - Debugging.Assert(() => !liveDocsShared); + Debugging.Assert(liveDocs != null); + Debugging.Assert(Monitor.IsEntered(writer)); + Debugging.Assert(docID >= 0 && docID < liveDocs.Length, () => "out of bounds: docid=" + docID + " liveDocsLength=" + liveDocs.Length + " seg=" + Info.Info.Name + " docCount=" + Info.Info.DocCount); + Debugging.Assert(!liveDocsShared); } bool didDelete = liveDocs.Get(docID); if (didDelete) @@ -302,7 +302,7 @@ public virtual SegmentReader GetReadOnlyClone(IOContext context) if (reader == null) { GetReader(context).DecRef(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => reader != null); + if (Debugging.AssertsEnabled) Debugging.Assert(reader != null); } liveDocsShared = true; if (liveDocs != null) @@ -311,7 +311,7 @@ public virtual SegmentReader GetReadOnlyClone(IOContext context) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => reader.LiveDocs == liveDocs); + if (Debugging.AssertsEnabled) Debugging.Assert(reader.LiveDocs == liveDocs); reader.IncRef(); return reader; } @@ -324,8 +324,8 @@ public virtual void InitWritableLiveDocs() { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => Monitor.IsEntered(writer)); - Debugging.Assert(() => Info.Info.DocCount > 0); + Debugging.Assert(Monitor.IsEntered(writer)); + Debugging.Assert(Info.Info.DocCount > 0); } //System.out.println("initWritableLivedocs seg=" + info + " liveDocs=" + liveDocs + " shared=" + shared); if (liveDocsShared) @@ -355,7 +355,7 @@ public virtual IBits LiveDocs { lock (this) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => Monitor.IsEntered(writer)); + if (Debugging.AssertsEnabled) Debugging.Assert(Monitor.IsEntered(writer)); return liveDocs; } } @@ -366,7 +366,7 @@ public virtual IBits GetReadOnlyLiveDocs() lock (this) { //System.out.println("getROLiveDocs seg=" + info); - if (Debugging.AssertsEnabled) Debugging.Assert(() => Monitor.IsEntered(writer)); + if (Debugging.AssertsEnabled) Debugging.Assert(Monitor.IsEntered(writer)); liveDocsShared = true; //if (liveDocs != null) { //System.out.println(" liveCount=" + liveDocs.count()); @@ -400,7 +400,7 @@ public virtual bool WriteLiveDocs(Directory dir) { lock (this) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => Monitor.IsEntered(writer)); + if (Debugging.AssertsEnabled) Debugging.Assert(Monitor.IsEntered(writer)); //System.out.println("rld.writeLiveDocs seg=" + info + " pendingDelCount=" + pendingDeleteCount + " numericUpdates=" + numericUpdates); if (pendingDeleteCount == 0) { @@ -408,7 +408,7 @@ public virtual bool WriteLiveDocs(Directory dir) } // We have new deletes - if (Debugging.AssertsEnabled) Debugging.Assert(() => liveDocs.Length == Info.Info.DocCount); + if (Debugging.AssertsEnabled) Debugging.Assert(liveDocs.Length == Info.Info.DocCount); // Do this so we can delete any created files on // exception; this saves all codecs from having to do @@ -465,10 +465,10 @@ public virtual void WriteFieldUpdates(Directory dir, DocValuesFieldUpdates.Conta { lock (this) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => Monitor.IsEntered(writer)); + if (Debugging.AssertsEnabled) Debugging.Assert(Monitor.IsEntered(writer)); //System.out.println("rld.writeFieldUpdates: seg=" + info + " numericFieldUpdates=" + numericFieldUpdates); - if (Debugging.AssertsEnabled) Debugging.Assert(dvUpdates.Any); + if (Debugging.AssertsEnabled) Debugging.Assert(dvUpdates.Any()); // Do this so we can delete any created files on // exception; this saves all codecs from having to do @@ -530,7 +530,7 @@ public virtual void WriteFieldUpdates(Directory dir, DocValuesFieldUpdates.Conta string field = e.Key; NumericDocValuesFieldUpdates fieldUpdates = e.Value; FieldInfo fieldInfo = fieldInfos.FieldInfo(field); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldInfo != null); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldInfo != null); fieldInfo.DocValuesGen = nextFieldInfosGen; // write the numeric updates to a new gen'd docvalues file @@ -543,7 +543,7 @@ public virtual void WriteFieldUpdates(Directory dir, DocValuesFieldUpdates.Conta string field = e.Key; BinaryDocValuesFieldUpdates dvFieldUpdates = e.Value; FieldInfo fieldInfo = fieldInfos.FieldInfo(field); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fieldInfo != null); + if (Debugging.AssertsEnabled) Debugging.Assert(fieldInfo != null); // System.out.println("[" + Thread.currentThread().getName() + "] RAU.writeFieldUpdates: applying binary updates; seg=" + info + " f=" + dvFieldUpdates + ", updates=" + dvFieldUpdates); @@ -699,7 +699,7 @@ public virtual void WriteFieldUpdates(Directory dir, DocValuesFieldUpdates.Conta } else { // no update for this document - if (Debugging.AssertsEnabled) Debugging.Assert(() => curDoc < updateDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(curDoc < updateDoc); if (currentValues != null && DocsWithField.Get(curDoc)) { // only read the current value if the document had a value before @@ -732,7 +732,7 @@ private IEnumerable GetBytesRefEnumerable(SegmentReader reader, string } else { // no update for this document - if (Debugging.AssertsEnabled) Debugging.Assert(() => curDoc < updateDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(curDoc < updateDoc); if (currentValues != null && DocsWithField.Get(curDoc)) { // only read the current value if the document had a value before @@ -755,7 +755,7 @@ internal virtual SegmentReader GetReaderForMerge(IOContext context) { lock (this) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => Monitor.IsEntered(writer)); + if (Debugging.AssertsEnabled) Debugging.Assert(Monitor.IsEntered(writer)); // must execute these two statements as atomic operation, otherwise we // could lose updates if e.g. another thread calls writeFieldUpdates in // between, or the updates are applied to the obtained reader, but then diff --git a/src/Lucene.Net/Index/SegmentCoreReaders.cs b/src/Lucene.Net/Index/SegmentCoreReaders.cs index 32aa1cd4ee..b4d896d89f 100644 --- a/src/Lucene.Net/Index/SegmentCoreReaders.cs +++ b/src/Lucene.Net/Index/SegmentCoreReaders.cs @@ -106,7 +106,7 @@ internal SegmentCoreReaders(SegmentReader owner, Directory dir, SegmentCommitInf SegmentReadState segmentReadState = new SegmentReadState(cfsDir, si.Info, fieldInfos, context, termsIndexDivisor); // Ask codec for its Fields fields = format.FieldsProducer(segmentReadState); - if (Debugging.AssertsEnabled) Debugging.Assert(() => fields != null); + if (Debugging.AssertsEnabled) Debugging.Assert(fields != null); // ask codec for its Norms: // TODO: since we don't write any norms file if there are no norms, // kinda jaky to assume the codec handles the case of no norms file at all gracefully?! @@ -114,7 +114,7 @@ internal SegmentCoreReaders(SegmentReader owner, Directory dir, SegmentCommitInf if (fieldInfos.HasNorms) { normsProducer = codec.NormsFormat.NormsProducer(segmentReadState); - if (Debugging.AssertsEnabled) Debugging.Assert(() => normsProducer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(normsProducer != null); } else { @@ -160,7 +160,7 @@ internal void IncRef() internal NumericDocValues GetNormValues(FieldInfo fi) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => normsProducer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(normsProducer != null); IDictionary normFields = normsLocal.Value; diff --git a/src/Lucene.Net/Index/SegmentDocValues.cs b/src/Lucene.Net/Index/SegmentDocValues.cs index 9d6e2743ed..00d99d94d4 100644 --- a/src/Lucene.Net/Index/SegmentDocValues.cs +++ b/src/Lucene.Net/Index/SegmentDocValues.cs @@ -85,7 +85,7 @@ internal DocValuesProducer GetDocValuesProducer(long? gen, SegmentCommitInfo si, if (!(genDVProducers.TryGetValue(gen, out dvp))) { dvp = NewDocValuesProducer(si, context, dir, dvFormat, gen, infos, termsIndexDivisor); - if (Debugging.AssertsEnabled) Debugging.Assert(() => dvp != null); + if (Debugging.AssertsEnabled) Debugging.Assert(dvp != null); genDVProducers[gen] = dvp; } else @@ -108,7 +108,7 @@ internal void DecRef(IList dvProducersGens) foreach (long? gen in dvProducersGens) { RefCount dvp = genDVProducers[gen]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => dvp != null, () => "gen=" + gen); + if (Debugging.AssertsEnabled) Debugging.Assert(dvp != null, () => "gen=" + gen); try { dvp.DecRef(); diff --git a/src/Lucene.Net/Index/SegmentInfo.cs b/src/Lucene.Net/Index/SegmentInfo.cs index 9321fdb2dd..b96675e6d7 100644 --- a/src/Lucene.Net/Index/SegmentInfo.cs +++ b/src/Lucene.Net/Index/SegmentInfo.cs @@ -102,7 +102,7 @@ public SegmentInfo(Directory dir, string version, string name, int docCount, boo /// public SegmentInfo(Directory dir, string version, string name, int docCount, bool isCompoundFile, Codec codec, IDictionary diagnostics, IDictionary attributes) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !(dir is TrackingDirectoryWrapper)); + if (Debugging.AssertsEnabled) Debugging.Assert(!(dir is TrackingDirectoryWrapper)); this.Dir = dir; this.version = version; this.Name = name; @@ -137,7 +137,7 @@ public Codec Codec get => codec; set { - if (Debugging.AssertsEnabled) Debugging.Assert(() => this.codec == null); + if (Debugging.AssertsEnabled) Debugging.Assert(this.codec == null); if (value == null) { throw new ArgumentException("codec must be non-null"); diff --git a/src/Lucene.Net/Index/SegmentInfos.cs b/src/Lucene.Net/Index/SegmentInfos.cs index 155e09beae..15be276221 100644 --- a/src/Lucene.Net/Index/SegmentInfos.cs +++ b/src/Lucene.Net/Index/SegmentInfos.cs @@ -543,7 +543,7 @@ private void Write(Directory directory) segnOutput.WriteInt64(e.Key); segnOutput.WriteStringSet(e.Value); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => si.Dir == directory); + if (Debugging.AssertsEnabled) Debugging.Assert(si.Dir == directory); // If this segment is pre-4.x, perform a one-time // "ugprade" to write the .si file for it: @@ -710,7 +710,7 @@ public object Clone() sis.segments = new List(Count); foreach (SegmentCommitInfo info in segments) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => info.Info.Codec != null); + if (Debugging.AssertsEnabled) Debugging.Assert(info.Info.Codec != null); // dont directly access segments, use add method!!! sis.Add((SegmentCommitInfo)(info.Clone())); } @@ -1153,7 +1153,7 @@ public ICollection GetFiles(Directory dir, bool includeSegmentsFile) for (int i = 0; i < size; i++) { var info = Info(i); - if (Debugging.AssertsEnabled) Debugging.Assert(() => info.Info.Dir == dir); + if (Debugging.AssertsEnabled) Debugging.Assert(info.Info.Dir == dir); if (info.Info.Dir == dir) { files.UnionWith(info.GetFiles()); @@ -1341,7 +1341,7 @@ internal void ApplyMergeChanges(MergePolicy.OneMerge merge, bool dropSegment) int newSegIdx = 0; for (int segIdx = 0, cnt = segments.Count; segIdx < cnt; segIdx++) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => segIdx >= newSegIdx); + if (Debugging.AssertsEnabled) Debugging.Assert(segIdx >= newSegIdx); SegmentCommitInfo info = segments[segIdx]; if (mergedAway.Contains(info)) { @@ -1378,7 +1378,7 @@ internal IList CreateBackupSegmentInfos() var list = new List(Count); foreach (var info in segments) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => info.Info.Codec != null); + if (Debugging.AssertsEnabled) Debugging.Assert(info.Info.Codec != null); list.Add((SegmentCommitInfo)(info.Clone())); } return list; diff --git a/src/Lucene.Net/Index/SegmentMerger.cs b/src/Lucene.Net/Index/SegmentMerger.cs index d6518fa7d9..3846f3ac32 100644 --- a/src/Lucene.Net/Index/SegmentMerger.cs +++ b/src/Lucene.Net/Index/SegmentMerger.cs @@ -110,7 +110,7 @@ internal MergeState Merge() long t1 = Time.NanoTime(); mergeState.InfoStream.Message("SM", ((t1 - t0) / 1000000) + " msec to merge stored fields [" + numMerged + " docs]"); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => numMerged == mergeState.SegmentInfo.DocCount); + if (Debugging.AssertsEnabled) Debugging.Assert(numMerged == mergeState.SegmentInfo.DocCount); SegmentWriteState segmentWriteState = new SegmentWriteState(mergeState.InfoStream, directory, mergeState.SegmentInfo, mergeState.FieldInfos, termIndexInterval, null, context); if (mergeState.InfoStream.IsEnabled("SM")) @@ -164,7 +164,7 @@ internal MergeState Merge() long t1 = Time.NanoTime(); mergeState.InfoStream.Message("SM", ((t1 - t0) / 1000000) + " msec to merge vectors [" + numMerged + " docs]"); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => numMerged == mergeState.SegmentInfo.DocCount); + if (Debugging.AssertsEnabled) Debugging.Assert(numMerged == mergeState.SegmentInfo.DocCount); } // write the merged infos diff --git a/src/Lucene.Net/Index/SegmentReader.cs b/src/Lucene.Net/Index/SegmentReader.cs index 32f7392e5a..86e8cb1e54 100644 --- a/src/Lucene.Net/Index/SegmentReader.cs +++ b/src/Lucene.Net/Index/SegmentReader.cs @@ -100,7 +100,7 @@ public SegmentReader(SegmentCommitInfo si, int termInfosIndexDivisor, IOContext } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => si.DelCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(si.DelCount == 0); liveDocs = null; } numDocs = si.Info.DocCount - si.DelCount; @@ -464,7 +464,7 @@ public override NumericDocValues GetNumericDocValues(string field) { DocValuesProducer dvProducer; dvProducersByField.TryGetValue(field, out dvProducer); - if (Debugging.AssertsEnabled) Debugging.Assert(() => dvProducer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(dvProducer != null); dvs = dvProducer.GetNumeric(fi); dvFields[field] = dvs; } @@ -494,7 +494,7 @@ public override IBits GetDocsWithField(string field) { DocValuesProducer dvProducer; dvProducersByField.TryGetValue(field, out dvProducer); - if (Debugging.AssertsEnabled) Debugging.Assert(() => dvProducer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(dvProducer != null); dvs = dvProducer.GetDocsWithField(fi); dvFields[field] = dvs; } @@ -520,7 +520,7 @@ public override BinaryDocValues GetBinaryDocValues(string field) if (dvs == null) { dvProducersByField.TryGetValue(field, out DocValuesProducer dvProducer); - if (Debugging.AssertsEnabled) Debugging.Assert(() => dvProducer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(dvProducer != null); dvs = dvProducer.GetBinary(fi); dvFields[field] = dvs; } @@ -546,7 +546,7 @@ public override SortedDocValues GetSortedDocValues(string field) if (dvs == null) { dvProducersByField.TryGetValue(field, out DocValuesProducer dvProducer); - if (Debugging.AssertsEnabled) Debugging.Assert(() => dvProducer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(dvProducer != null); dvs = dvProducer.GetSorted(fi); dvFields[field] = dvs; } @@ -572,7 +572,7 @@ public override SortedSetDocValues GetSortedSetDocValues(string field) if (dvs == null) { dvProducersByField.TryGetValue(field, out DocValuesProducer dvProducer); - if (Debugging.AssertsEnabled) Debugging.Assert(() => dvProducer != null); + if (Debugging.AssertsEnabled) Debugging.Assert(dvProducer != null); dvs = dvProducer.GetSortedSet(fi); dvFields[field] = dvs; } diff --git a/src/Lucene.Net/Index/SimpleMergedSegmentWarmer.cs b/src/Lucene.Net/Index/SimpleMergedSegmentWarmer.cs index 0534ac5142..61a579a7d8 100644 --- a/src/Lucene.Net/Index/SimpleMergedSegmentWarmer.cs +++ b/src/Lucene.Net/Index/SimpleMergedSegmentWarmer.cs @@ -80,7 +80,7 @@ public override void Warm(AtomicReader reader) break; default: - if (Debugging.AssertsEnabled) Debugging.Assert(() => false); // unknown dv type + if (Debugging.AssertsEnabled) Debugging.Assert(false); // unknown dv type break; } docValuesCount++; diff --git a/src/Lucene.Net/Index/SingletonSortedSetDocValues.cs b/src/Lucene.Net/Index/SingletonSortedSetDocValues.cs index 44c2710c1a..afcd9a9c86 100644 --- a/src/Lucene.Net/Index/SingletonSortedSetDocValues.cs +++ b/src/Lucene.Net/Index/SingletonSortedSetDocValues.cs @@ -39,7 +39,7 @@ internal sealed class SingletonSortedSetDocValues : SortedSetDocValues public SingletonSortedSetDocValues(SortedDocValues @in) { this.@in = @in; - if (Debugging.AssertsEnabled) Debugging.Assert(() => NO_MORE_ORDS == -1); // this allows our nextOrd() to work for missing values without a check + if (Debugging.AssertsEnabled) Debugging.Assert(NO_MORE_ORDS == -1); // this allows our nextOrd() to work for missing values without a check } /// diff --git a/src/Lucene.Net/Index/SlowCompositeReaderWrapper.cs b/src/Lucene.Net/Index/SlowCompositeReaderWrapper.cs index 9fd3a524de..d720c27a9c 100644 --- a/src/Lucene.Net/Index/SlowCompositeReaderWrapper.cs +++ b/src/Lucene.Net/Index/SlowCompositeReaderWrapper.cs @@ -60,7 +60,7 @@ public static AtomicReader Wrap(IndexReader reader) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => reader is AtomicReader); + if (Debugging.AssertsEnabled) Debugging.Assert(reader is AtomicReader); return (AtomicReader)reader; } } @@ -174,7 +174,7 @@ public override SortedSetDocValues GetSortedSetDocValues(string field) { return null; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => map != null); + if (Debugging.AssertsEnabled) Debugging.Assert(map != null); int size = @in.Leaves.Count; var values = new SortedSetDocValues[size]; int[] starts = new int[size + 1]; diff --git a/src/Lucene.Net/Index/SnapshotDeletionPolicy.cs b/src/Lucene.Net/Index/SnapshotDeletionPolicy.cs index b017f0bba4..dc8ea9be08 100644 --- a/src/Lucene.Net/Index/SnapshotDeletionPolicy.cs +++ b/src/Lucene.Net/Index/SnapshotDeletionPolicy.cs @@ -130,7 +130,7 @@ protected internal virtual void ReleaseGen(long gen) throw new ArgumentException("commit gen=" + gen + " is not currently snapshotted"); } int refCountInt = (int)refCount; - if (Debugging.AssertsEnabled) Debugging.Assert(() => refCountInt > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(refCountInt > 0); refCountInt--; if (refCountInt == 0) { diff --git a/src/Lucene.Net/Index/SortedDocValuesTermsEnum.cs b/src/Lucene.Net/Index/SortedDocValuesTermsEnum.cs index fa08b86b74..9bc7742efc 100644 --- a/src/Lucene.Net/Index/SortedDocValuesTermsEnum.cs +++ b/src/Lucene.Net/Index/SortedDocValuesTermsEnum.cs @@ -94,7 +94,7 @@ public override bool SeekExact(BytesRef text) public override void SeekExact(long ord) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ord >= 0 && ord < values.ValueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(ord >= 0 && ord < values.ValueCount); currentOrd = (int)ord; values.LookupOrd(currentOrd, term); } @@ -132,7 +132,7 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPos public override void SeekExact(BytesRef term, TermState state) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != null && state is OrdTermState); + if (Debugging.AssertsEnabled) Debugging.Assert(state != null && state is OrdTermState); this.SeekExact(((OrdTermState)state).Ord); } diff --git a/src/Lucene.Net/Index/SortedDocValuesWriter.cs b/src/Lucene.Net/Index/SortedDocValuesWriter.cs index 4ee8226353..54842edea3 100644 --- a/src/Lucene.Net/Index/SortedDocValuesWriter.cs +++ b/src/Lucene.Net/Index/SortedDocValuesWriter.cs @@ -114,7 +114,7 @@ public override void Flush(SegmentWriteState state, DocValuesConsumer dvConsumer { int maxDoc = state.SegmentInfo.DocCount; - if (Debugging.AssertsEnabled) Debugging.Assert(() => pending.Count == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(pending.Count == maxDoc); int valueCount = hash.Count; int[] sortedValues = hash.Sort(BytesRef.UTF8SortedAsUnicodeComparer); @@ -148,7 +148,7 @@ private IEnumerable GetBytesRefEnumberable(int valueCount, int[] sorte private IEnumerable GetOrdsEnumberable(int maxDoc, int[] ordMap) { AppendingDeltaPackedInt64Buffer.Iterator iter = pending.GetIterator(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => pending.Count == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(pending.Count == maxDoc); for (int i = 0; i < maxDoc; ++i) { diff --git a/src/Lucene.Net/Index/SortedSetDocValuesTermsEnum.cs b/src/Lucene.Net/Index/SortedSetDocValuesTermsEnum.cs index 1bf62a1d67..baef17aabc 100644 --- a/src/Lucene.Net/Index/SortedSetDocValuesTermsEnum.cs +++ b/src/Lucene.Net/Index/SortedSetDocValuesTermsEnum.cs @@ -94,7 +94,7 @@ public override bool SeekExact(BytesRef text) public override void SeekExact(long ord) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ord >= 0 && ord < values.ValueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(ord >= 0 && ord < values.ValueCount); currentOrd = (int)ord; values.LookupOrd(currentOrd, term); } @@ -132,7 +132,7 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPos public override void SeekExact(BytesRef term, TermState state) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != null && state is OrdTermState); + if (Debugging.AssertsEnabled) Debugging.Assert(state != null && state is OrdTermState); this.SeekExact(((OrdTermState)state).Ord); } diff --git a/src/Lucene.Net/Index/SortedSetDocValuesWriter.cs b/src/Lucene.Net/Index/SortedSetDocValuesWriter.cs index c27c230e0d..6a7b1c95f2 100644 --- a/src/Lucene.Net/Index/SortedSetDocValuesWriter.cs +++ b/src/Lucene.Net/Index/SortedSetDocValuesWriter.cs @@ -164,7 +164,7 @@ public override void Flush(SegmentWriteState state, DocValuesConsumer dvConsumer { int maxDoc = state.SegmentInfo.DocCount; int maxCountPerDoc = maxCount; - if (Debugging.AssertsEnabled) Debugging.Assert(() => pendingCounts.Count == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(pendingCounts.Count == maxDoc); int valueCount = hash.Count; int[] sortedValues = hash.Sort(BytesRef.UTF8SortedAsUnicodeComparer); @@ -203,7 +203,7 @@ private IEnumerable GetBytesRefEnumberable(int valueCount, int[] sorte { AppendingDeltaPackedInt64Buffer.Iterator iter = pendingCounts.GetIterator(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => pendingCounts.Count == maxDoc, () => "MaxDoc: " + maxDoc + ", pending.Count: " + pending.Count); + if (Debugging.AssertsEnabled) Debugging.Assert(pendingCounts.Count == maxDoc, () => "MaxDoc: " + maxDoc + ", pending.Count: " + pending.Count); for (int i = 0; i < maxDoc; ++i) { diff --git a/src/Lucene.Net/Index/StandardDirectoryReader.cs b/src/Lucene.Net/Index/StandardDirectoryReader.cs index a86a6fb301..ed0ee8be37 100644 --- a/src/Lucene.Net/Index/StandardDirectoryReader.cs +++ b/src/Lucene.Net/Index/StandardDirectoryReader.cs @@ -117,7 +117,7 @@ internal static DirectoryReader Open(IndexWriter writer, SegmentInfos infos, boo // actual instance of SegmentInfoPerCommit in // IndexWriter's segmentInfos: SegmentCommitInfo info = infos.Info(i); - if (Debugging.AssertsEnabled) Debugging.Assert(() => info.Info.Dir == dir); + if (Debugging.AssertsEnabled) Debugging.Assert(info.Info.Dir == dir); ReadersAndUpdates rld = writer.readerPool.Get(info, true); try { @@ -237,8 +237,8 @@ private static DirectoryReader Open(Directory directory, SegmentInfos infos, ILi // Steal the ref returned by SegmentReader ctor: if (Debugging.AssertsEnabled) { - Debugging.Assert(() => infos.Info(i).Info.Dir == newReaders[i].SegmentInfo.Info.Dir); - Debugging.Assert(() => infos.Info(i).HasDeletions || infos.Info(i).HasFieldUpdates); + Debugging.Assert(infos.Info(i).Info.Dir == newReaders[i].SegmentInfo.Info.Dir); + Debugging.Assert(infos.Info(i).HasDeletions || infos.Info(i).HasFieldUpdates); } if (newReaders[i].SegmentInfo.DelGen == infos.Info(i).DelGen) { diff --git a/src/Lucene.Net/Index/StoredFieldsProcessor.cs b/src/Lucene.Net/Index/StoredFieldsProcessor.cs index 52eb040cbf..4cbb401060 100644 --- a/src/Lucene.Net/Index/StoredFieldsProcessor.cs +++ b/src/Lucene.Net/Index/StoredFieldsProcessor.cs @@ -139,7 +139,7 @@ internal void Fill(int docID) [MethodImpl(MethodImplOptions.NoInlining)] internal override void FinishDocument() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docWriter.TestPoint("StoredFieldsWriter.finishDocument start")); + if (Debugging.AssertsEnabled) Debugging.Assert(docWriter.TestPoint("StoredFieldsWriter.finishDocument start")); InitFieldsWriter(IOContext.DEFAULT); Fill(docState.docID); @@ -156,7 +156,7 @@ internal override void FinishDocument() } Reset(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => docWriter.TestPoint("StoredFieldsWriter.finishDocument end")); + if (Debugging.AssertsEnabled) Debugging.Assert(docWriter.TestPoint("StoredFieldsWriter.finishDocument end")); } public override void AddField(int docID, IIndexableField field, FieldInfo fieldInfo) @@ -179,7 +179,7 @@ public override void AddField(int docID, IIndexableField field, FieldInfo fieldI fieldInfos[numStoredFields] = fieldInfo; numStoredFields++; - if (Debugging.AssertsEnabled) Debugging.Assert(() => docState.TestPoint("StoredFieldsWriterPerThread.processFields.writeField")); + if (Debugging.AssertsEnabled) Debugging.Assert(docState.TestPoint("StoredFieldsWriterPerThread.processFields.writeField")); } } } diff --git a/src/Lucene.Net/Index/TermContext.cs b/src/Lucene.Net/Index/TermContext.cs index a275042ef1..f8c3b3e24c 100644 --- a/src/Lucene.Net/Index/TermContext.cs +++ b/src/Lucene.Net/Index/TermContext.cs @@ -57,7 +57,7 @@ public sealed class TermContext /// public TermContext(IndexReaderContext context) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => context != null && context.IsTopLevel); + if (Debugging.AssertsEnabled) Debugging.Assert(context != null && context.IsTopLevel); TopReaderContext = context; docFreq = 0; int len; @@ -92,7 +92,7 @@ public TermContext(IndexReaderContext context, TermState state, int ord, int doc /// public static TermContext Build(IndexReaderContext context, Term term) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => context != null && context.IsTopLevel); + if (Debugging.AssertsEnabled) Debugging.Assert(context != null && context.IsTopLevel); string field = term.Field; BytesRef bytes = term.Bytes; TermContext perReaderTermState = new TermContext(context); @@ -137,9 +137,9 @@ public void Register(TermState state, int ord, int docFreq, long totalTermFreq) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => state != null, () => "state must not be null"); - Debugging.Assert(() => ord >= 0 && ord < states.Length); - Debugging.Assert(() => states[ord] == null, () => "state for ord: " + ord + " already registered"); + Debugging.Assert(state != null, () => "state must not be null"); + Debugging.Assert(ord >= 0 && ord < states.Length); + Debugging.Assert(states[ord] == null, () => "state for ord: " + ord + " already registered"); } this.docFreq += docFreq; if (this.totalTermFreq >= 0 && totalTermFreq >= 0) @@ -163,7 +163,7 @@ public void Register(TermState state, int ord, int docFreq, long totalTermFreq) /// for the reader was registered public TermState Get(int ord) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ord >= 0 && ord < states.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(ord >= 0 && ord < states.Length); return states[ord]; } diff --git a/src/Lucene.Net/Index/TermVectorsConsumer.cs b/src/Lucene.Net/Index/TermVectorsConsumer.cs index f46dbca0b1..b6298f7a20 100644 --- a/src/Lucene.Net/Index/TermVectorsConsumer.cs +++ b/src/Lucene.Net/Index/TermVectorsConsumer.cs @@ -61,12 +61,12 @@ public override void Flush(IDictionary fields if (writer != null) { int numDocs = state.SegmentInfo.DocCount; - if (Debugging.AssertsEnabled) Debugging.Assert(() => numDocs > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(numDocs > 0); // At least one doc in this run had term vectors enabled try { Fill(numDocs); - if (Debugging.AssertsEnabled) Debugging.Assert(() => state.SegmentInfo != null); + if (Debugging.AssertsEnabled) Debugging.Assert(state.SegmentInfo != null); writer.Finish(state.FieldInfos, numDocs); } finally @@ -114,7 +114,7 @@ private void InitTermVectorsWriter() [MethodImpl(MethodImplOptions.NoInlining)] internal override void FinishDocument(TermsHash termsHash) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docWriter.TestPoint("TermVectorsTermsWriter.finishDocument start")); + if (Debugging.AssertsEnabled) Debugging.Assert(docWriter.TestPoint("TermVectorsTermsWriter.finishDocument start")); if (!hasVectors) { @@ -133,13 +133,13 @@ internal override void FinishDocument(TermsHash termsHash) } writer.FinishDocument(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => lastDocID == docState.docID, () => "lastDocID=" + lastDocID + " docState.docID=" + docState.docID); + if (Debugging.AssertsEnabled) Debugging.Assert(lastDocID == docState.docID, () => "lastDocID=" + lastDocID + " docState.docID=" + docState.docID); lastDocID++; termsHash.Reset(); Reset(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => docWriter.TestPoint("TermVectorsTermsWriter.finishDocument end")); + if (Debugging.AssertsEnabled) Debugging.Assert(docWriter.TestPoint("TermVectorsTermsWriter.finishDocument end")); } [MethodImpl(MethodImplOptions.NoInlining)] @@ -184,7 +184,7 @@ internal void AddFieldToFlush(TermVectorsConsumerPerField fieldToFlush) internal override void StartDocument() { - if (Debugging.AssertsEnabled) Debugging.Assert(ClearLastVectorFieldName); + if (Debugging.AssertsEnabled) Debugging.Assert(ClearLastVectorFieldName()); Reset(); } diff --git a/src/Lucene.Net/Index/TermVectorsConsumerPerField.cs b/src/Lucene.Net/Index/TermVectorsConsumerPerField.cs index 71ebdb0512..f7875353e1 100644 --- a/src/Lucene.Net/Index/TermVectorsConsumerPerField.cs +++ b/src/Lucene.Net/Index/TermVectorsConsumerPerField.cs @@ -164,13 +164,13 @@ internal override void Finish() [MethodImpl(MethodImplOptions.NoInlining)] internal void FinishDocument() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docState.TestPoint("TermVectorsTermsWriterPerField.finish start")); + if (Debugging.AssertsEnabled) Debugging.Assert(docState.TestPoint("TermVectorsTermsWriterPerField.finish start")); int numPostings = termsHashPerField.bytesHash.Count; BytesRef flushTerm = termsWriter.flushTerm; - if (Debugging.AssertsEnabled) Debugging.Assert(() => numPostings >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(numPostings >= 0); if (numPostings > maxNumPostings) { @@ -181,7 +181,7 @@ internal void FinishDocument() // of a given field in the doc. At this point we flush // our hash into the DocWriter. - if (Debugging.AssertsEnabled) Debugging.Assert(() => termsWriter.VectorFieldsInOrder(fieldInfo)); + if (Debugging.AssertsEnabled) Debugging.Assert(termsWriter.VectorFieldsInOrder(fieldInfo)); TermVectorsPostingsArray postings = (TermVectorsPostingsArray)termsHashPerField.postingsArray; TermVectorsWriter tv = termsWriter.writer; @@ -293,7 +293,7 @@ internal void WriteProx(TermVectorsPostingsArray postings, int termID) internal override void NewTerm(int termID) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docState.TestPoint("TermVectorsTermsWriterPerField.newTerm start")); + if (Debugging.AssertsEnabled) Debugging.Assert(docState.TestPoint("TermVectorsTermsWriterPerField.newTerm start")); TermVectorsPostingsArray postings = (TermVectorsPostingsArray)termsHashPerField.postingsArray; postings.freqs[termID] = 1; @@ -305,7 +305,7 @@ internal override void NewTerm(int termID) internal override void AddTerm(int termID) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docState.TestPoint("TermVectorsTermsWriterPerField.addTerm start")); + if (Debugging.AssertsEnabled) Debugging.Assert(docState.TestPoint("TermVectorsTermsWriterPerField.addTerm start")); TermVectorsPostingsArray postings = (TermVectorsPostingsArray)termsHashPerField.postingsArray; postings.freqs[termID]++; @@ -344,7 +344,7 @@ internal override ParallelPostingsArray NewInstance(int size) internal override void CopyTo(ParallelPostingsArray toArray, int numToCopy) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => toArray is TermVectorsPostingsArray); + if (Debugging.AssertsEnabled) Debugging.Assert(toArray is TermVectorsPostingsArray); TermVectorsPostingsArray to = (TermVectorsPostingsArray)toArray; base.CopyTo(toArray, numToCopy); diff --git a/src/Lucene.Net/Index/TermsHashPerField.cs b/src/Lucene.Net/Index/TermsHashPerField.cs index 0f135ac74d..fd2ba6cbb6 100644 --- a/src/Lucene.Net/Index/TermsHashPerField.cs +++ b/src/Lucene.Net/Index/TermsHashPerField.cs @@ -112,7 +112,7 @@ public override void Abort() public void InitReader(ByteSliceReader reader, int termID, int stream) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => stream < streamCount); + if (Debugging.AssertsEnabled) Debugging.Assert(stream < streamCount); int intStart = postingsArray.intStarts[termID]; int[] ints = intPool.Buffers[intStart >> Int32BlockPool.INT32_BLOCK_SHIFT]; int upto = intStart & Int32BlockPool.INT32_BLOCK_MASK; @@ -291,7 +291,7 @@ internal void WriteByte(int stream, byte b) { int upto = intUptos[intUptoStart + stream]; var bytes = bytePool.Buffers[upto >> ByteBlockPool.BYTE_BLOCK_SHIFT]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => bytes != null); + if (Debugging.AssertsEnabled) Debugging.Assert(bytes != null); int offset = upto & ByteBlockPool.BYTE_BLOCK_MASK; if (bytes[offset] != 0) { @@ -319,7 +319,7 @@ public void WriteBytes(int stream, byte[] b, int offset, int len) /// internal void WriteVInt32(int stream, int i) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => stream < streamCount); + if (Debugging.AssertsEnabled) Debugging.Assert(stream < streamCount); while ((i & ~0x7F) != 0) { WriteByte(stream, (sbyte)((i & 0x7f) | 0x80)); diff --git a/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs b/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs index 4f07784fd0..58d4fb790d 100644 --- a/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs +++ b/src/Lucene.Net/Index/ThreadAffinityDocumentsWriterThreadPool.cs @@ -45,7 +45,7 @@ internal class ThreadAffinityDocumentsWriterThreadPool : DocumentsWriterPerThrea public ThreadAffinityDocumentsWriterThreadPool(int maxNumPerThreads) : base(maxNumPerThreads) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => MaxThreadStates >= 1); + if (Debugging.AssertsEnabled) Debugging.Assert(MaxThreadStates >= 1); } public override ThreadState GetAndLock(Thread requestingThread, DocumentsWriter documentsWriter) @@ -65,7 +65,7 @@ we should somehow prevent this. */ ThreadState newState = NewThreadState(); // state is already locked if non-null if (newState != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => newState.IsHeldByCurrentThread); + if (Debugging.AssertsEnabled) Debugging.Assert(newState.IsHeldByCurrentThread); threadBindings[requestingThread] = newState; return newState; } @@ -79,7 +79,7 @@ we should somehow prevent this. */ minThreadState = MinContendedThreadState(); } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => minThreadState != null, () => "ThreadState is null"); + if (Debugging.AssertsEnabled) Debugging.Assert(minThreadState != null, () => "ThreadState is null"); minThreadState.@Lock(); return minThreadState; diff --git a/src/Lucene.Net/Search/CachingWrapperFilter.cs b/src/Lucene.Net/Search/CachingWrapperFilter.cs index b48a8bf592..daf2a0d344 100644 --- a/src/Lucene.Net/Search/CachingWrapperFilter.cs +++ b/src/Lucene.Net/Search/CachingWrapperFilter.cs @@ -120,7 +120,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo { missCount++; docIdSet = DocIdSetToCache(_filter.GetDocIdSet(context, null), reader); - if (Debugging.AssertsEnabled) Debugging.Assert(() => docIdSet.IsCacheable); + if (Debugging.AssertsEnabled) Debugging.Assert(docIdSet.IsCacheable); #if FEATURE_CONDITIONALWEAKTABLE_ADDORUPDATE _cache.AddOrUpdate(key, docIdSet); #else diff --git a/src/Lucene.Net/Search/CollectionStatistics.cs b/src/Lucene.Net/Search/CollectionStatistics.cs index c2940d2abf..02ea613546 100644 --- a/src/Lucene.Net/Search/CollectionStatistics.cs +++ b/src/Lucene.Net/Search/CollectionStatistics.cs @@ -39,10 +39,10 @@ public CollectionStatistics(string field, long maxDoc, long docCount, long sumTo { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => maxDoc >= 0); - Debugging.Assert(() => docCount >= -1 && docCount <= maxDoc); // #docs with field must be <= #docs - Debugging.Assert(() => sumDocFreq == -1 || sumDocFreq >= docCount); // #postings must be >= #docs with field - Debugging.Assert(() => sumTotalTermFreq == -1 || sumTotalTermFreq >= sumDocFreq); // #positions must be >= #postings + Debugging.Assert(maxDoc >= 0); + Debugging.Assert(docCount >= -1 && docCount <= maxDoc); // #docs with field must be <= #docs + Debugging.Assert(sumDocFreq == -1 || sumDocFreq >= docCount); // #postings must be >= #docs with field + Debugging.Assert(sumTotalTermFreq == -1 || sumTotalTermFreq >= sumDocFreq); // #positions must be >= #postings } this.field = field; this.maxDoc = maxDoc; diff --git a/src/Lucene.Net/Search/ConstantScoreAutoRewrite.cs b/src/Lucene.Net/Search/ConstantScoreAutoRewrite.cs index dfb0ab11d9..b263eeafd7 100644 --- a/src/Lucene.Net/Search/ConstantScoreAutoRewrite.cs +++ b/src/Lucene.Net/Search/ConstantScoreAutoRewrite.cs @@ -163,7 +163,7 @@ public override bool Collect(BytesRef bytes) } TermState termState = termsEnum.GetTermState(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => termState != null); + if (Debugging.AssertsEnabled) Debugging.Assert(termState != null); if (pos < 0) { pos = (-pos) - 1; @@ -235,7 +235,7 @@ public override int[] Init() { int[] ord = base.Init(); termState = new TermContext[ArrayUtil.Oversize(ord.Length, RamUsageEstimator.NUM_BYTES_OBJECT_REF)]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => termState.Length >= ord.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(termState.Length >= ord.Length); return ord; } @@ -248,7 +248,7 @@ public override int[] Grow() Array.Copy(termState, 0, tmpTermState, 0, termState.Length); termState = tmpTermState; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => termState.Length >= ord.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(termState.Length >= ord.Length); return ord; } diff --git a/src/Lucene.Net/Search/ConstantScoreQuery.cs b/src/Lucene.Net/Search/ConstantScoreQuery.cs index 20dda8a0ec..0bbaa4c7f2 100644 --- a/src/Lucene.Net/Search/ConstantScoreQuery.cs +++ b/src/Lucene.Net/Search/ConstantScoreQuery.cs @@ -84,7 +84,7 @@ public override Query Rewrite(IndexReader reader) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => m_filter != null); + if (Debugging.AssertsEnabled) Debugging.Assert(m_filter != null); // Fix outdated usage pattern from Lucene 2.x/early-3.x: // because ConstantScoreQuery only accepted filters, // QueryWrapperFilter was used to wrap queries. @@ -154,12 +154,12 @@ public override BulkScorer GetBulkScorer(AtomicReaderContext context, bool score //DocIdSetIterator disi; if (outerInstance.m_filter != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.m_query == null); + if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.m_query == null); return base.GetBulkScorer(context, scoreDocsInOrder, acceptDocs); } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.m_query != null && innerWeight != null); + if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.m_query != null && innerWeight != null); BulkScorer bulkScorer = innerWeight.GetBulkScorer(context, scoreDocsInOrder, acceptDocs); if (bulkScorer == null) { @@ -174,7 +174,7 @@ public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) DocIdSetIterator disi; if (outerInstance.m_filter != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.m_query == null); + if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.m_query == null); DocIdSet dis = outerInstance.m_filter.GetDocIdSet(context, acceptDocs); if (dis == null) { @@ -184,7 +184,7 @@ public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.m_query != null && innerWeight != null); + if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.m_query != null && innerWeight != null); disi = innerWeight.GetScorer(context, acceptDocs); } @@ -310,7 +310,7 @@ public override int NextDoc() public override float GetScore() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docIdSetIterator.DocID != NO_MORE_DOCS); + if (Debugging.AssertsEnabled) Debugging.Assert(docIdSetIterator.DocID != NO_MORE_DOCS); return theScore; } diff --git a/src/Lucene.Net/Search/DisjunctionScorer.cs b/src/Lucene.Net/Search/DisjunctionScorer.cs index cafb16207e..2e78917d5e 100644 --- a/src/Lucene.Net/Search/DisjunctionScorer.cs +++ b/src/Lucene.Net/Search/DisjunctionScorer.cs @@ -145,7 +145,7 @@ public override long GetCost() public override int NextDoc() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => m_doc != NO_MORE_DOCS); + if (Debugging.AssertsEnabled) Debugging.Assert(m_doc != NO_MORE_DOCS); while (true) { if (m_subScorers[0].NextDoc() != NO_MORE_DOCS) @@ -170,7 +170,7 @@ public override int NextDoc() public override int Advance(int target) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => m_doc != NO_MORE_DOCS); + if (Debugging.AssertsEnabled) Debugging.Assert(m_doc != NO_MORE_DOCS); while (true) { if (m_subScorers[0].Advance(target) != NO_MORE_DOCS) diff --git a/src/Lucene.Net/Search/DocIdSetIterator.cs b/src/Lucene.Net/Search/DocIdSetIterator.cs index c7e26b5462..5f3986c188 100644 --- a/src/Lucene.Net/Search/DocIdSetIterator.cs +++ b/src/Lucene.Net/Search/DocIdSetIterator.cs @@ -47,8 +47,8 @@ public override int Advance(int target) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => !exhausted); - Debugging.Assert(() => target >= 0); + Debugging.Assert(!exhausted); + Debugging.Assert(target >= 0); } exhausted = true; return NO_MORE_DOCS; @@ -58,7 +58,7 @@ public override int Advance(int target) public override int NextDoc() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !exhausted); + if (Debugging.AssertsEnabled) Debugging.Assert(!exhausted); exhausted = true; return NO_MORE_DOCS; } @@ -142,7 +142,7 @@ public override long GetCost() /// protected internal int SlowAdvance(int target) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => DocID == NO_MORE_DOCS || DocID < target); // can happen when the enum is not positioned yet + if (Debugging.AssertsEnabled) Debugging.Assert(DocID == NO_MORE_DOCS || DocID < target); // can happen when the enum is not positioned yet int doc; do { diff --git a/src/Lucene.Net/Search/DocTermOrdsRangeFilter.cs b/src/Lucene.Net/Search/DocTermOrdsRangeFilter.cs index 1964fe46c5..7f1343ed62 100644 --- a/src/Lucene.Net/Search/DocTermOrdsRangeFilter.cs +++ b/src/Lucene.Net/Search/DocTermOrdsRangeFilter.cs @@ -123,7 +123,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo return null; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => inclusiveLowerPoint >= 0 && inclusiveUpperPoint >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(inclusiveLowerPoint >= 0 && inclusiveUpperPoint >= 0); return new FieldCacheDocIdSetAnonymousInnerClassHelper(this, context.AtomicReader.MaxDoc, acceptDocs, docTermOrds, inclusiveLowerPoint, inclusiveUpperPoint); } diff --git a/src/Lucene.Net/Search/DocTermOrdsRewriteMethod.cs b/src/Lucene.Net/Search/DocTermOrdsRewriteMethod.cs index fbd9450c35..ef53a4d29b 100644 --- a/src/Lucene.Net/Search/DocTermOrdsRewriteMethod.cs +++ b/src/Lucene.Net/Search/DocTermOrdsRewriteMethod.cs @@ -100,7 +100,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo Int64BitSet termSet = new Int64BitSet(docTermOrds.ValueCount); TermsEnum termsEnum = m_query.GetTermsEnum(new TermsAnonymousInnerClassHelper(this, docTermOrds)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => termsEnum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(termsEnum != null); if (termsEnum.Next() != null) { // fill into a bitset diff --git a/src/Lucene.Net/Search/ExactPhraseScorer.cs b/src/Lucene.Net/Search/ExactPhraseScorer.cs index dbcf7c7af8..bcc7aaa600 100644 --- a/src/Lucene.Net/Search/ExactPhraseScorer.cs +++ b/src/Lucene.Net/Search/ExactPhraseScorer.cs @@ -263,7 +263,7 @@ private int PhraseFreq() cs.LastPos = cs.Pos; int posIndex = cs.Pos - chunkStart; counts[posIndex] = 1; - if (Debugging.AssertsEnabled) Debugging.Assert(() => gens[posIndex] != gen); + if (Debugging.AssertsEnabled) Debugging.Assert(gens[posIndex] != gen); gens[posIndex] = gen; } diff --git a/src/Lucene.Net/Search/FieldCacheImpl.cs b/src/Lucene.Net/Search/FieldCacheImpl.cs index d973e377a1..3617d29c6d 100644 --- a/src/Lucene.Net/Search/FieldCacheImpl.cs +++ b/src/Lucene.Net/Search/FieldCacheImpl.cs @@ -173,7 +173,7 @@ public ReaderClosedListenerAnonymousInnerClassHelper(FieldCacheImpl outerInstanc public void OnClose(IndexReader owner) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => owner is AtomicReader); + if (Debugging.AssertsEnabled) Debugging.Assert(owner is AtomicReader); outerInstance.PurgeByCacheKey(((AtomicReader)owner).CoreCacheKey); } } @@ -409,7 +409,7 @@ public virtual void DoUninvert(AtomicReader reader, string field, bool setDocsWi if (setDocsWithField) { int termsDocCount = terms.DocCount; - if (Debugging.AssertsEnabled) Debugging.Assert(() => termsDocCount <= maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(termsDocCount <= maxDoc); if (termsDocCount == maxDoc) { // Fast case: all docs have this field: @@ -475,7 +475,7 @@ internal virtual void SetDocsWithField(AtomicReader reader, string field, IBits if (numSet >= maxDoc) { // The cardinality of the BitSet is maxDoc if all documents have a value. - if (Debugging.AssertsEnabled) Debugging.Assert(() => numSet == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(numSet == maxDoc); bits = new Lucene.Net.Util.Bits.MatchAllBits(maxDoc); } else @@ -907,7 +907,7 @@ internal class Int32sFromArray : FieldCache.Int32s public Int32sFromArray(PackedInt32s.Reader values, int minValue) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => values.BitsPerValue <= 32); + if (Debugging.AssertsEnabled) Debugging.Assert(values.BitsPerValue <= 32); this.values = values; this.minValue = minValue; } @@ -1095,7 +1095,7 @@ protected override object CreateValue(AtomicReader reader, CacheKey key, bool se if (terms != null) { int termsDocCount = terms.DocCount; - if (Debugging.AssertsEnabled) Debugging.Assert(() => termsDocCount <= maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(termsDocCount <= maxDoc); if (termsDocCount == maxDoc) { // Fast case: all docs have this field: @@ -1137,7 +1137,7 @@ protected override object CreateValue(AtomicReader reader, CacheKey key, bool se if (numSet >= maxDoc) { // The cardinality of the BitSet is maxDoc if all documents have a value. - if (Debugging.AssertsEnabled) Debugging.Assert(() => numSet == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(numSet == maxDoc); return new Lucene.Net.Util.Bits.MatchAllBits(maxDoc); } return res; diff --git a/src/Lucene.Net/Search/FieldCacheRangeFilter.cs b/src/Lucene.Net/Search/FieldCacheRangeFilter.cs index d7a309af99..716ba7707b 100644 --- a/src/Lucene.Net/Search/FieldCacheRangeFilter.cs +++ b/src/Lucene.Net/Search/FieldCacheRangeFilter.cs @@ -142,7 +142,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo return null; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => inclusiveLowerPoint >= 0 && inclusiveUpperPoint >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(inclusiveLowerPoint >= 0 && inclusiveUpperPoint >= 0); return new AnonymousClassFieldCacheDocIdSet(fcsi, inclusiveLowerPoint, inclusiveUpperPoint, context.Reader.MaxDoc, acceptDocs); } @@ -230,7 +230,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo return null; ; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => inclusiveLowerPoint >= 0 && inclusiveUpperPoint >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(inclusiveLowerPoint >= 0 && inclusiveUpperPoint >= 0); return new AnonymousClassFieldCacheDocIdSet(fcsi, inclusiveLowerPoint, inclusiveUpperPoint, context.AtomicReader.MaxDoc, acceptDocs); } diff --git a/src/Lucene.Net/Search/FieldCacheRewriteMethod.cs b/src/Lucene.Net/Search/FieldCacheRewriteMethod.cs index b12412060d..b81be95fb0 100644 --- a/src/Lucene.Net/Search/FieldCacheRewriteMethod.cs +++ b/src/Lucene.Net/Search/FieldCacheRewriteMethod.cs @@ -100,7 +100,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo Int64BitSet termSet = new Int64BitSet(fcsi.ValueCount); TermsEnum termsEnum = m_query.GetTermsEnum(new TermsAnonymousInnerClassHelper(this, fcsi)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => termsEnum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(termsEnum != null); if (termsEnum.Next() != null) { // fill into a bitset diff --git a/src/Lucene.Net/Search/FieldComparator.cs b/src/Lucene.Net/Search/FieldComparator.cs index 92ab322fda..c633d1674e 100644 --- a/src/Lucene.Net/Search/FieldComparator.cs +++ b/src/Lucene.Net/Search/FieldComparator.cs @@ -927,7 +927,7 @@ public override int Compare(int slot1, int slot2) public override int CompareBottom(int doc) { float score = scorer.GetScore(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => !float.IsNaN(score)); + if (Debugging.AssertsEnabled) Debugging.Assert(!float.IsNaN(score)); // LUCENENET specific special case: // In case of zero, we may have a "positive 0" or "negative 0" @@ -938,7 +938,7 @@ public override int CompareBottom(int doc) public override void Copy(int slot, int doc) { scores[slot] = scorer.GetScore(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => !float.IsNaN(scores[slot])); + if (Debugging.AssertsEnabled) Debugging.Assert(!float.IsNaN(scores[slot])); } public override FieldComparer SetNextReader(AtomicReaderContext context) @@ -988,7 +988,7 @@ public override int CompareValues(float first, float second) public override int CompareTop(int doc) { float docValue = scorer.GetScore(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => !float.IsNaN(docValue)); + if (Debugging.AssertsEnabled) Debugging.Assert(!float.IsNaN(docValue)); // LUCENENET specific special case: // In case of zero, we may have a "positive 0" or "negative 0" @@ -1216,7 +1216,7 @@ public override int Compare(int slot1, int slot2) public override int CompareBottom(int doc) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => bottomSlot != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(bottomSlot != -1); int docOrd = termsIndex.GetOrd(doc); if (docOrd == -1) { @@ -1250,7 +1250,7 @@ public override void Copy(int slot, int doc) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ord >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(ord >= 0); if (values[slot] == null) { values[slot] = new BytesRef(); @@ -1319,7 +1319,7 @@ public override void SetBottom(int slot) if (bottomValue == null) { // missingOrd is null for all segments - if (Debugging.AssertsEnabled) Debugging.Assert(() => ords[bottomSlot] == missingOrd); + if (Debugging.AssertsEnabled) Debugging.Assert(ords[bottomSlot] == missingOrd); bottomOrd = missingOrd; bottomSameReader = true; readerGen[bottomSlot] = currentReaderGen; diff --git a/src/Lucene.Net/Search/FieldValueHitQueue.cs b/src/Lucene.Net/Search/FieldValueHitQueue.cs index 52d34028f1..310e692ad3 100644 --- a/src/Lucene.Net/Search/FieldValueHitQueue.cs +++ b/src/Lucene.Net/Search/FieldValueHitQueue.cs @@ -74,8 +74,8 @@ protected internal override bool LessThan(T hitA, T hitB) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => hitA != hitB); - Debugging.Assert(() => hitA.Slot != hitB.Slot); + Debugging.Assert(hitA != hitB); + Debugging.Assert(hitA.Slot != hitB.Slot); } int c = oneReverseMul * m_firstComparer.Compare(hitA.Slot, hitB.Slot); @@ -112,8 +112,8 @@ protected internal override bool LessThan(T hitA, T hitB) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => hitA != hitB); - Debugging.Assert(() => hitA.Slot != hitB.Slot); + Debugging.Assert(hitA != hitB); + Debugging.Assert(hitA.Slot != hitB.Slot); } int numComparers = m_comparers.Length; diff --git a/src/Lucene.Net/Search/FilteredQuery.cs b/src/Lucene.Net/Search/FilteredQuery.cs index 281be75e88..226f18ba81 100644 --- a/src/Lucene.Net/Search/FilteredQuery.cs +++ b/src/Lucene.Net/Search/FilteredQuery.cs @@ -139,7 +139,7 @@ public override Explanation Explain(AtomicReaderContext ir, int i) // return a filtering scorer public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.filter != null); + if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.filter != null); DocIdSet filterDocIdSet = outerInstance.filter.GetDocIdSet(context, acceptDocs); if (filterDocIdSet == null) @@ -154,7 +154,7 @@ public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) // return a filtering top scorer public override BulkScorer GetBulkScorer(AtomicReaderContext context, bool scoreDocsInOrder, IBits acceptDocs) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.filter != null); + if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.filter != null); DocIdSet filterDocIdSet = outerInstance.filter.GetDocIdSet(context, acceptDocs); if (filterDocIdSet == null) @@ -449,7 +449,7 @@ public override bool Equals(object o) { return false; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => o is FilteredQuery); + if (Debugging.AssertsEnabled) Debugging.Assert(o is FilteredQuery); FilteredQuery fq = (FilteredQuery)o; return fq.query.Equals(this.query) && fq.filter.Equals(this.filter) && fq.strategy.Equals(this.strategy); } @@ -594,7 +594,7 @@ public override Scorer FilteredScorer(AtomicReaderContext context, Weight weight } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => firstFilterDoc > -1); + if (Debugging.AssertsEnabled) Debugging.Assert(firstFilterDoc > -1); // we are gonna advance() this scorer, so we set inorder=true/toplevel=false // we pass null as acceptDocs, as our filter has already respected acceptDocs, no need to do twice Scorer scorer = weight.GetScorer(context, null); diff --git a/src/Lucene.Net/Search/FuzzyTermsEnum.cs b/src/Lucene.Net/Search/FuzzyTermsEnum.cs index 151b28a277..e07dbd5b78 100644 --- a/src/Lucene.Net/Search/FuzzyTermsEnum.cs +++ b/src/Lucene.Net/Search/FuzzyTermsEnum.cs @@ -251,7 +251,7 @@ protected virtual void MaxEditDistanceChanged(BytesRef lastTerm, int maxEdits, b // assert newEnum != null; if (newEnum == null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => maxEdits > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE); + if (Debugging.AssertsEnabled) Debugging.Assert(maxEdits > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE); throw new ArgumentException("maxEdits cannot be > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE"); } SetEnum(newEnum); diff --git a/src/Lucene.Net/Search/IndexSearcher.cs b/src/Lucene.Net/Search/IndexSearcher.cs index 9875ceca3a..69baacf8f3 100644 --- a/src/Lucene.Net/Search/IndexSearcher.cs +++ b/src/Lucene.Net/Search/IndexSearcher.cs @@ -135,7 +135,7 @@ public IndexSearcher(IndexReader r, TaskScheduler executor) /// public IndexSearcher(IndexReaderContext context, TaskScheduler executor) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => context.IsTopLevel, () => "IndexSearcher's ReaderContext must be topLevel for reader" + context.Reader); + if (Debugging.AssertsEnabled) Debugging.Assert(context.IsTopLevel, () => "IndexSearcher's ReaderContext must be topLevel for reader" + context.Reader); reader = context.Reader; this.executor = executor; this.m_readerContext = context; @@ -801,7 +801,7 @@ public SearcherCallableWithSort(ReentrantLock @lock, IndexSearcher searcher, Lea public TopFieldDocs Call() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => slice.Leaves.Length == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(slice.Leaves.Length == 1); TopFieldDocs docs = searcher.Search(slice.Leaves, weight, after, nDocs, sort, true, doDocScores || sort.NeedsScores, doMaxScore); @lock.Lock(); try @@ -966,7 +966,7 @@ public virtual CollectionStatistics CollectionStatistics(string field) long sumTotalTermFreq; long sumDocFreq; - if (Debugging.AssertsEnabled) Debugging.Assert(() => field != null); + if (Debugging.AssertsEnabled) Debugging.Assert(field != null); Terms terms = MultiFields.GetTerms(reader, field); if (terms == null) diff --git a/src/Lucene.Net/Search/MinShouldMatchSumScorer.cs b/src/Lucene.Net/Search/MinShouldMatchSumScorer.cs index cc19c8023e..0bab089e4d 100644 --- a/src/Lucene.Net/Search/MinShouldMatchSumScorer.cs +++ b/src/Lucene.Net/Search/MinShouldMatchSumScorer.cs @@ -116,7 +116,7 @@ public MinShouldMatchSumScorer(Weight weight, IList subScorers, int mini this.subScorers[i] = this.sortedSubScorers[mm - 1 + i]; } MinheapHeapify(); - if (Debugging.AssertsEnabled) Debugging.Assert(MinheapCheck); + if (Debugging.AssertsEnabled) Debugging.Assert(MinheapCheck()); } /// @@ -140,7 +140,7 @@ public override sealed ICollection GetChildren() public override int NextDoc() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => doc != NO_MORE_DOCS); + if (Debugging.AssertsEnabled) Debugging.Assert(doc != NO_MORE_DOCS); while (true) { // to remove current doc, call next() on all subScorers on current doc within heap diff --git a/src/Lucene.Net/Search/MultiPhraseQuery.cs b/src/Lucene.Net/Search/MultiPhraseQuery.cs index 80a2c6872c..3d09685361 100644 --- a/src/Lucene.Net/Search/MultiPhraseQuery.cs +++ b/src/Lucene.Net/Search/MultiPhraseQuery.cs @@ -223,7 +223,7 @@ public override void Normalize(float queryNorm, float topLevelBoost) public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.termArrays.Count > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.termArrays.Count > 0); AtomicReader reader = (context.AtomicReader); IBits liveDocs = acceptDocs; @@ -286,7 +286,7 @@ public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) if (postingsEnum == null) { // term does exist, but has no positions - if (Debugging.AssertsEnabled) Debugging.Assert(() => termsEnum.Docs(liveDocs, null, DocsFlags.NONE) != null, () => "termstate found but no term exists in reader"); + if (Debugging.AssertsEnabled) Debugging.Assert(termsEnum.Docs(liveDocs, null, DocsFlags.NONE) != null, () => "termstate found but no term exists in reader"); throw new InvalidOperationException("field \"" + term.Field + "\" was indexed without position data; cannot run PhraseQuery (term=" + term.Text() + ")"); } diff --git a/src/Lucene.Net/Search/MultiTermQueryWrapperFilter.cs b/src/Lucene.Net/Search/MultiTermQueryWrapperFilter.cs index ac9b307719..f86f8b6ad0 100644 --- a/src/Lucene.Net/Search/MultiTermQueryWrapperFilter.cs +++ b/src/Lucene.Net/Search/MultiTermQueryWrapperFilter.cs @@ -109,7 +109,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo } TermsEnum termsEnum = m_query.GetTermsEnum(terms); - if (Debugging.AssertsEnabled) Debugging.Assert(() => termsEnum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(termsEnum != null); if (termsEnum.Next() != null) { // fill into a FixedBitSet diff --git a/src/Lucene.Net/Search/NumericRangeQuery.cs b/src/Lucene.Net/Search/NumericRangeQuery.cs index 5ecd6a9c37..cc58d36b5a 100644 --- a/src/Lucene.Net/Search/NumericRangeQuery.cs +++ b/src/Lucene.Net/Search/NumericRangeQuery.cs @@ -320,7 +320,7 @@ internal NumericRangeTermsEnum(NumericRangeQuery outerInstance, TermsEnum ten } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => this.outerInstance.dataType == NumericType.DOUBLE); + if (Debugging.AssertsEnabled) Debugging.Assert(this.outerInstance.dataType == NumericType.DOUBLE); minBound = (this.outerInstance.min == null) ? INT64_NEGATIVE_INFINITY : NumericUtils.DoubleToSortableInt64(Convert.ToDouble(this.outerInstance.min.Value, CultureInfo.InvariantCulture)); } if (!this.outerInstance.minInclusive && this.outerInstance.min != null) @@ -340,7 +340,7 @@ internal NumericRangeTermsEnum(NumericRangeQuery outerInstance, TermsEnum ten } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => this.outerInstance.dataType == NumericType.DOUBLE); + if (Debugging.AssertsEnabled) Debugging.Assert(this.outerInstance.dataType == NumericType.DOUBLE); maxBound = (this.outerInstance.max == null) ? INT64_POSITIVE_INFINITY : NumericUtils.DoubleToSortableInt64(Convert.ToDouble(this.outerInstance.max, CultureInfo.InvariantCulture)); } if (!this.outerInstance.maxInclusive && this.outerInstance.max != null) @@ -367,7 +367,7 @@ internal NumericRangeTermsEnum(NumericRangeQuery outerInstance, TermsEnum ten } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => this.outerInstance.dataType == NumericType.SINGLE); + if (Debugging.AssertsEnabled) Debugging.Assert(this.outerInstance.dataType == NumericType.SINGLE); minBound = (this.outerInstance.min == null) ? INT32_NEGATIVE_INFINITY : NumericUtils.SingleToSortableInt32(Convert.ToSingle(this.outerInstance.min, CultureInfo.InvariantCulture)); } if (!this.outerInstance.minInclusive && this.outerInstance.min != null) @@ -387,7 +387,7 @@ internal NumericRangeTermsEnum(NumericRangeQuery outerInstance, TermsEnum ten } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => this.outerInstance.dataType == NumericType.SINGLE); + if (Debugging.AssertsEnabled) Debugging.Assert(this.outerInstance.dataType == NumericType.SINGLE); maxBound = (this.outerInstance.max == null) ? INT32_POSITIVE_INFINITY : NumericUtils.SingleToSortableInt32(Convert.ToSingle(this.outerInstance.max, CultureInfo.InvariantCulture)); } if (!this.outerInstance.maxInclusive && this.outerInstance.max != null) @@ -445,10 +445,10 @@ public override sealed void AddRange(BytesRef minPrefixCoded, BytesRef maxPrefix private void NextRange() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => rangeBounds.Count % 2 == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(rangeBounds.Count % 2 == 0); currentLowerBound = rangeBounds.Dequeue(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => currentUpperBound == null || termComp.Compare(currentUpperBound, currentLowerBound) <= 0, () => "The current upper bound must be <= the new lower bound"); + if (Debugging.AssertsEnabled) Debugging.Assert(currentUpperBound == null || termComp.Compare(currentUpperBound, currentLowerBound) <= 0, () => "The current upper bound must be <= the new lower bound"); currentUpperBound = rangeBounds.Dequeue(); } @@ -469,7 +469,7 @@ protected override sealed BytesRef NextSeekTerm(BytesRef term) } // no more sub-range enums available - if (Debugging.AssertsEnabled) Debugging.Assert(() => rangeBounds.Count == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(rangeBounds.Count == 0); currentLowerBound = currentUpperBound = null; return null; } diff --git a/src/Lucene.Net/Search/PhraseQuery.cs b/src/Lucene.Net/Search/PhraseQuery.cs index 3bd6d2e7b6..d5416e35cc 100644 --- a/src/Lucene.Net/Search/PhraseQuery.cs +++ b/src/Lucene.Net/Search/PhraseQuery.cs @@ -337,7 +337,7 @@ public override void Normalize(float queryNorm, float topLevelBoost) public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.terms.Count > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.terms.Count > 0); AtomicReader reader = context.AtomicReader; IBits liveDocs = acceptDocs; PostingsAndFreq[] postingsFreqs = new PostingsAndFreq[outerInstance.terms.Count]; @@ -357,7 +357,7 @@ public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) TermState state = states[i].Get(context.Ord); if (state == null) // term doesnt exist in this segment { - if (Debugging.AssertsEnabled) Debugging.Assert(() => TermNotInReader(reader, t), () => "no termstate found but term exists in reader"); + if (Debugging.AssertsEnabled) Debugging.Assert(TermNotInReader(reader, t), () => "no termstate found but term exists in reader"); return null; } te.SeekExact(t.Bytes, state); @@ -367,7 +367,7 @@ public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) // positions. if (postingsEnum == null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => te.SeekExact(t.Bytes), () => "termstate found but no term exists in reader"); + if (Debugging.AssertsEnabled) Debugging.Assert(te.SeekExact(t.Bytes), () => "termstate found but no term exists in reader"); // term does exist, but has no positions throw new InvalidOperationException("field \"" + t.Field + "\" was indexed without position data; cannot run PhraseQuery (term=" + t.Text() + ")"); } diff --git a/src/Lucene.Net/Search/QueryRescorer.cs b/src/Lucene.Net/Search/QueryRescorer.cs index 3fe94d06f5..e4e7bd0293 100644 --- a/src/Lucene.Net/Search/QueryRescorer.cs +++ b/src/Lucene.Net/Search/QueryRescorer.cs @@ -101,7 +101,7 @@ public override TopDocs Rescore(IndexSearcher searcher, TopDocs firstPassTopDocs else { // Query did not match this doc: - if (Debugging.AssertsEnabled) Debugging.Assert(() => actualDoc > targetDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(actualDoc > targetDoc); hit.Score = Combine(hit.Score, false, 0.0f); } diff --git a/src/Lucene.Net/Search/ReferenceManager.cs b/src/Lucene.Net/Search/ReferenceManager.cs index 1376cb77e1..f8e75c1562 100644 --- a/src/Lucene.Net/Search/ReferenceManager.cs +++ b/src/Lucene.Net/Search/ReferenceManager.cs @@ -117,7 +117,7 @@ public G Acquire() } if (GetRefCount(@ref) == 0 && (object)current == (object)@ref) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => @ref != null); + if (Debugging.AssertsEnabled) Debugging.Assert(@ref != null); /* if we can't increment the reader but we are still the current reference the RM is in a illegal states since we can't make any progress @@ -200,7 +200,7 @@ private void DoMaybeRefresh() G newReference = RefreshIfNeeded(reference); if (newReference != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !ReferenceEquals(newReference, reference), () => "refreshIfNeeded should return null if refresh wasn't needed"); + if (Debugging.AssertsEnabled) Debugging.Assert(!ReferenceEquals(newReference, reference), () => "refreshIfNeeded should return null if refresh wasn't needed"); try { SwapReference(newReference); @@ -311,7 +311,7 @@ protected virtual void AfterMaybeRefresh() /// If the release operation on the given resource throws an public void Release(G reference) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !(reference is null)); + if (Debugging.AssertsEnabled) Debugging.Assert(!(reference is null)); DecRef(reference); } diff --git a/src/Lucene.Net/Search/ReqOptSumScorer.cs b/src/Lucene.Net/Search/ReqOptSumScorer.cs index fd0830513e..fa64e0e093 100644 --- a/src/Lucene.Net/Search/ReqOptSumScorer.cs +++ b/src/Lucene.Net/Search/ReqOptSumScorer.cs @@ -45,8 +45,8 @@ public ReqOptSumScorer(Scorer reqScorer, Scorer optScorer) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => reqScorer != null); - Debugging.Assert(() => optScorer != null); + Debugging.Assert(reqScorer != null); + Debugging.Assert(optScorer != null); } this.reqScorer = reqScorer; this.optScorer = optScorer; diff --git a/src/Lucene.Net/Search/ScoringRewrite.cs b/src/Lucene.Net/Search/ScoringRewrite.cs index b1b5bd7d15..3651a12817 100644 --- a/src/Lucene.Net/Search/ScoringRewrite.cs +++ b/src/Lucene.Net/Search/ScoringRewrite.cs @@ -134,7 +134,7 @@ public override Query Rewrite(IndexReader reader, MultiTermQuery query) { int pos = sort[i]; Term term = new Term(query.Field, col.terms.Get(pos, new BytesRef())); - if (Debugging.AssertsEnabled) Debugging.Assert(() => reader.DocFreq(term) == termStates[pos].DocFreq); + if (Debugging.AssertsEnabled) Debugging.Assert(reader.DocFreq(term) == termStates[pos].DocFreq); AddClause(result, term, termStates[pos].DocFreq, query.Boost * boost[pos], termStates[pos]); } } @@ -173,13 +173,13 @@ public override bool Collect(BytesRef bytes) { int e = terms.Add(bytes); TermState state = termsEnum.GetTermState(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != null); + if (Debugging.AssertsEnabled) Debugging.Assert(state != null); if (e < 0) { // duplicate term: update docFreq int pos = (-e) - 1; array.termState[pos].Register(state, m_readerContext.Ord, termsEnum.DocFreq, termsEnum.TotalTermFreq); - if (Debugging.AssertsEnabled) Debugging.Assert(() => array.boost[pos] == boostAtt.Boost, () => "boost should be equal in all segment TermsEnums"); + if (Debugging.AssertsEnabled) Debugging.Assert(array.boost[pos] == boostAtt.Boost, () => "boost should be equal in all segment TermsEnums"); } else { @@ -209,7 +209,7 @@ public override int[] Init() int[] ord = base.Init(); boost = new float[ArrayUtil.Oversize(ord.Length, RamUsageEstimator.NUM_BYTES_SINGLE)]; termState = new TermContext[ArrayUtil.Oversize(ord.Length, RamUsageEstimator.NUM_BYTES_OBJECT_REF)]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => termState.Length >= ord.Length && boost.Length >= ord.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(termState.Length >= ord.Length && boost.Length >= ord.Length); return ord; } @@ -223,7 +223,7 @@ public override int[] Grow() Array.Copy(termState, 0, tmpTermState, 0, termState.Length); termState = tmpTermState; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => termState.Length >= ord.Length && boost.Length >= ord.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(termState.Length >= ord.Length && boost.Length >= ord.Length); return ord; } diff --git a/src/Lucene.Net/Search/SearcherManager.cs b/src/Lucene.Net/Search/SearcherManager.cs index 7f3a9c6fe1..e49b0a3f0c 100644 --- a/src/Lucene.Net/Search/SearcherManager.cs +++ b/src/Lucene.Net/Search/SearcherManager.cs @@ -120,7 +120,7 @@ protected override void DecRef(IndexSearcher reference) protected override IndexSearcher RefreshIfNeeded(IndexSearcher referenceToRefresh) { IndexReader r = referenceToRefresh.IndexReader; - if (Debugging.AssertsEnabled) Debugging.Assert(() => r is DirectoryReader, () => "searcher's IndexReader should be a DirectoryReader, but got " + r); + if (Debugging.AssertsEnabled) Debugging.Assert(r is DirectoryReader, () => "searcher's IndexReader should be a DirectoryReader, but got " + r); IndexReader newReader = DirectoryReader.OpenIfChanged((DirectoryReader)r); if (newReader == null) { @@ -152,7 +152,7 @@ public bool IsSearcherCurrent() try { IndexReader r = searcher.IndexReader; - if (Debugging.AssertsEnabled) Debugging.Assert(() => r is DirectoryReader, () => "searcher's IndexReader should be a DirectoryReader, but got " + r); + if (Debugging.AssertsEnabled) Debugging.Assert(r is DirectoryReader, () => "searcher's IndexReader should be a DirectoryReader, but got " + r); return ((DirectoryReader)r).IsCurrent(); } finally diff --git a/src/Lucene.Net/Search/Similarities/SimilarityBase.cs b/src/Lucene.Net/Search/Similarities/SimilarityBase.cs index d8f190e13b..2c7b93a9d3 100644 --- a/src/Lucene.Net/Search/Similarities/SimilarityBase.cs +++ b/src/Lucene.Net/Search/Similarities/SimilarityBase.cs @@ -101,7 +101,7 @@ protected internal virtual BasicStats NewStats(string field, float queryBoost) protected internal virtual void FillBasicStats(BasicStats stats, CollectionStatistics collectionStats, TermStatistics termStats) { // #positions(field) must be >= #positions(term) - if (Debugging.AssertsEnabled) Debugging.Assert(() => collectionStats.SumTotalTermFreq == -1 || collectionStats.SumTotalTermFreq >= termStats.TotalTermFreq); + if (Debugging.AssertsEnabled) Debugging.Assert(collectionStats.SumTotalTermFreq == -1 || collectionStats.SumTotalTermFreq >= termStats.TotalTermFreq); long numberOfDocuments = collectionStats.MaxDoc; long docFreq = termStats.DocFreq; diff --git a/src/Lucene.Net/Search/SloppyPhraseScorer.cs b/src/Lucene.Net/Search/SloppyPhraseScorer.cs index 2968233207..e526803c69 100644 --- a/src/Lucene.Net/Search/SloppyPhraseScorer.cs +++ b/src/Lucene.Net/Search/SloppyPhraseScorer.cs @@ -506,7 +506,7 @@ private IList> GatherRptGroups(JCG.LinkedDictionary pp.rptGroup == -1 || pp.rptGroup == g); + if (Debugging.AssertsEnabled) Debugging.Assert(pp.rptGroup == -1 || pp.rptGroup == g); pp.rptGroup = g; } } @@ -682,7 +682,7 @@ public override float GetScore() public override int Advance(int target) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => target > DocID); + if (Debugging.AssertsEnabled) Debugging.Assert(target > DocID); do { if (!AdvanceMin(target)) diff --git a/src/Lucene.Net/Search/SortField.cs b/src/Lucene.Net/Search/SortField.cs index 277304e795..8dc6829f5f 100644 --- a/src/Lucene.Net/Search/SortField.cs +++ b/src/Lucene.Net/Search/SortField.cs @@ -456,7 +456,7 @@ public virtual FieldComparer GetComparer(int numHits, int sortPos) #pragma warning restore 612, 618 case SortFieldType.CUSTOM: - if (Debugging.AssertsEnabled) Debugging.Assert(() => comparerSource != null); + if (Debugging.AssertsEnabled) Debugging.Assert(comparerSource != null); return comparerSource.NewComparer(field, numHits, sortPos, reverse); case SortFieldType.STRING: diff --git a/src/Lucene.Net/Search/SortRescorer.cs b/src/Lucene.Net/Search/SortRescorer.cs index b18ac5905c..1cceeca3ab 100644 --- a/src/Lucene.Net/Search/SortRescorer.cs +++ b/src/Lucene.Net/Search/SortRescorer.cs @@ -92,7 +92,7 @@ public override Explanation Explain(IndexSearcher searcher, Explanation firstPas { TopDocs oneHit = new TopDocs(1, new ScoreDoc[] { new ScoreDoc(docID, firstPassExplanation.Value) }); TopDocs hits = Rescore(searcher, oneHit, 1); - if (Debugging.AssertsEnabled) Debugging.Assert(() => hits.TotalHits == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(hits.TotalHits == 1); // TODO: if we could ask the Sort to explain itself then // we wouldn't need the separate ExpressionRescorer... diff --git a/src/Lucene.Net/Search/Spans/NearSpansOrdered.cs b/src/Lucene.Net/Search/Spans/NearSpansOrdered.cs index 4620933571..f510506dda 100644 --- a/src/Lucene.Net/Search/Spans/NearSpansOrdered.cs +++ b/src/Lucene.Net/Search/Spans/NearSpansOrdered.cs @@ -285,7 +285,7 @@ private bool ToSameDoc() } for (int i = 0; i < subSpansByDoc.Length; i++) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => subSpansByDoc[i].Doc == maxDoc, () => " NearSpansOrdered.toSameDoc() spans " + subSpansByDoc[0] + "\n at doc " + subSpansByDoc[i].Doc + ", but should be at " + maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(subSpansByDoc[i].Doc == maxDoc, () => " NearSpansOrdered.toSameDoc() spans " + subSpansByDoc[0] + "\n at doc " + subSpansByDoc[i].Doc + ", but should be at " + maxDoc); } inSameDoc = true; return true; @@ -298,7 +298,7 @@ private bool ToSameDoc() /// and ends before . internal static bool DocSpansOrdered(Spans spans1, Spans spans2) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => spans1.Doc == spans2.Doc, () => "doc1 " + spans1.Doc + " != doc2 " + spans2.Doc); + if (Debugging.AssertsEnabled) Debugging.Assert(spans1.Doc == spans2.Doc, () => "doc1 " + spans1.Doc + " != doc2 " + spans2.Doc); int start1 = spans1.Start; int start2 = spans2.Start; /* Do not call docSpansOrdered(int,int,int,int) to avoid invoking .end() : */ @@ -409,7 +409,7 @@ private bool ShrinkToAfterShortestMatch() possibleMatchPayloads.UnionWith(possiblePayload); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => prevStart <= matchStart); + if (Debugging.AssertsEnabled) Debugging.Assert(prevStart <= matchStart); if (matchStart > prevEnd) // Only non overlapping spans add to slop. { matchSlop += (matchStart - prevEnd); diff --git a/src/Lucene.Net/Search/Spans/SpanFirstQuery.cs b/src/Lucene.Net/Search/Spans/SpanFirstQuery.cs index bd67ec3ee8..f9158ab509 100644 --- a/src/Lucene.Net/Search/Spans/SpanFirstQuery.cs +++ b/src/Lucene.Net/Search/Spans/SpanFirstQuery.cs @@ -41,7 +41,7 @@ public SpanFirstQuery(SpanQuery match, int end) protected override AcceptStatus AcceptPosition(Spans spans) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => spans.Start != spans.End, () => "start equals end: " + spans.Start); + if (Debugging.AssertsEnabled) Debugging.Assert(spans.Start != spans.End, () => "start equals end: " + spans.Start); if (spans.Start >= m_end) { return AcceptStatus.NO_AND_ADVANCE; diff --git a/src/Lucene.Net/Search/Spans/SpanPositionRangeQuery.cs b/src/Lucene.Net/Search/Spans/SpanPositionRangeQuery.cs index 522ed66ba8..c0a7782da9 100644 --- a/src/Lucene.Net/Search/Spans/SpanPositionRangeQuery.cs +++ b/src/Lucene.Net/Search/Spans/SpanPositionRangeQuery.cs @@ -40,7 +40,7 @@ public SpanPositionRangeQuery(SpanQuery match, int start, int end) protected override AcceptStatus AcceptPosition(Spans spans) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => spans.Start != spans.End); + if (Debugging.AssertsEnabled) Debugging.Assert(spans.Start != spans.End); if (spans.Start >= m_end) { return AcceptStatus.NO_AND_ADVANCE; diff --git a/src/Lucene.Net/Search/Spans/TermSpans.cs b/src/Lucene.Net/Search/Spans/TermSpans.cs index ebf2bec8cf..bdcc8d600e 100644 --- a/src/Lucene.Net/Search/Spans/TermSpans.cs +++ b/src/Lucene.Net/Search/Spans/TermSpans.cs @@ -76,7 +76,7 @@ public override bool Next() public override bool SkipTo(int target) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => target > m_doc); + if (Debugging.AssertsEnabled) Debugging.Assert(target > m_doc); m_doc = m_postings.Advance(target); if (m_doc == DocIdSetIterator.NO_MORE_DOCS) { diff --git a/src/Lucene.Net/Search/TermCollectingRewrite.cs b/src/Lucene.Net/Search/TermCollectingRewrite.cs index b6280ed85f..ca57aa2e43 100644 --- a/src/Lucene.Net/Search/TermCollectingRewrite.cs +++ b/src/Lucene.Net/Search/TermCollectingRewrite.cs @@ -68,7 +68,7 @@ internal void CollectTerms(IndexReader reader, MultiTermQuery query, TermCollect } TermsEnum termsEnum = GetTermsEnum(query, terms, collector.Attributes); - if (Debugging.AssertsEnabled) Debugging.Assert(() => termsEnum != null); + if (Debugging.AssertsEnabled) Debugging.Assert(termsEnum != null); if (termsEnum == TermsEnum.EMPTY) { diff --git a/src/Lucene.Net/Search/TermQuery.cs b/src/Lucene.Net/Search/TermQuery.cs index 54f6732515..773db4e865 100644 --- a/src/Lucene.Net/Search/TermQuery.cs +++ b/src/Lucene.Net/Search/TermQuery.cs @@ -60,7 +60,7 @@ internal sealed class TermWeight : Weight public TermWeight(TermQuery outerInstance, IndexSearcher searcher, TermContext termStates) { this.outerInstance = outerInstance; - if (Debugging.AssertsEnabled) Debugging.Assert(() => termStates != null, () => "TermContext must not be null"); + if (Debugging.AssertsEnabled) Debugging.Assert(termStates != null, () => "TermContext must not be null"); this.termStates = termStates; this.similarity = searcher.Similarity; this.stats = similarity.ComputeWeight(outerInstance.Boost, searcher.CollectionStatistics(outerInstance.term.Field), searcher.TermStatistics(outerInstance.term, termStates)); @@ -85,14 +85,14 @@ public override void Normalize(float queryNorm, float topLevelBoost) public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => termStates.TopReaderContext == ReaderUtil.GetTopLevelContext(context), () => "The top-reader used to create Weight (" + termStates.TopReaderContext + ") is not the same as the current reader's top-reader (" + ReaderUtil.GetTopLevelContext(context)); + if (Debugging.AssertsEnabled) Debugging.Assert(termStates.TopReaderContext == ReaderUtil.GetTopLevelContext(context), () => "The top-reader used to create Weight (" + termStates.TopReaderContext + ") is not the same as the current reader's top-reader (" + ReaderUtil.GetTopLevelContext(context)); TermsEnum termsEnum = GetTermsEnum(context); if (termsEnum == null) { return null; } DocsEnum docs = termsEnum.Docs(acceptDocs, null); - if (Debugging.AssertsEnabled) Debugging.Assert(() => docs != null); + if (Debugging.AssertsEnabled) Debugging.Assert(docs != null); return new TermScorer(this, docs, similarity.GetSimScorer(stats, context)); } @@ -105,7 +105,7 @@ private TermsEnum GetTermsEnum(AtomicReaderContext context) TermState state = termStates.Get(context.Ord); if (state == null) // term is not present in that reader { - if (Debugging.AssertsEnabled) Debugging.Assert(() => TermNotInReader(context.AtomicReader, outerInstance.term), () => "no termstate found but term exists in reader term=" + outerInstance.term); + if (Debugging.AssertsEnabled) Debugging.Assert(TermNotInReader(context.AtomicReader, outerInstance.term), () => "no termstate found but term exists in reader term=" + outerInstance.term); return null; } //System.out.println("LD=" + reader.getLiveDocs() + " set?=" + (reader.getLiveDocs() != null ? reader.getLiveDocs().get(0) : "null")); @@ -170,7 +170,7 @@ public TermQuery(Term t, int docFreq) /// public TermQuery(Term t, TermContext states) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => states != null); + if (Debugging.AssertsEnabled) Debugging.Assert(states != null); term = t; docFreq = states.DocFreq; perReaderTermState = states; diff --git a/src/Lucene.Net/Search/TermScorer.cs b/src/Lucene.Net/Search/TermScorer.cs index 9af6d46fba..c791bc0d4b 100644 --- a/src/Lucene.Net/Search/TermScorer.cs +++ b/src/Lucene.Net/Search/TermScorer.cs @@ -62,7 +62,7 @@ public override int NextDoc() public override float GetScore() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => DocID != NO_MORE_DOCS); + if (Debugging.AssertsEnabled) Debugging.Assert(DocID != NO_MORE_DOCS); return docScorer.Score(docsEnum.DocID, docsEnum.Freq); } diff --git a/src/Lucene.Net/Search/TermStatistics.cs b/src/Lucene.Net/Search/TermStatistics.cs index 9ad51ba51b..efd872e394 100644 --- a/src/Lucene.Net/Search/TermStatistics.cs +++ b/src/Lucene.Net/Search/TermStatistics.cs @@ -39,8 +39,8 @@ public TermStatistics(BytesRef term, long docFreq, long totalTermFreq) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => docFreq >= 0); - Debugging.Assert(() => totalTermFreq == -1 || totalTermFreq >= docFreq); // #positions must be >= #postings + Debugging.Assert(docFreq >= 0); + Debugging.Assert(totalTermFreq == -1 || totalTermFreq >= docFreq); // #positions must be >= #postings } this.term = term; this.docFreq = docFreq; diff --git a/src/Lucene.Net/Search/TopDocs.cs b/src/Lucene.Net/Search/TopDocs.cs index 37f7c01ef7..c9e99375f9 100644 --- a/src/Lucene.Net/Search/TopDocs.cs +++ b/src/Lucene.Net/Search/TopDocs.cs @@ -107,7 +107,7 @@ public ScoreMergeSortQueue(TopDocs[] shardHits) // Returns true if first is < second protected internal override bool LessThan(ShardRef first, ShardRef second) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => first != second); + if (Debugging.AssertsEnabled) Debugging.Assert(first != second); float firstScore = shardHits[first.ShardIndex][first.HitIndex].Score; float secondScore = shardHits[second.ShardIndex][second.HitIndex].Score; @@ -134,7 +134,7 @@ protected internal override bool LessThan(ShardRef first, ShardRef second) { // Tie break in same shard: resolve however the // shard had resolved it: - if (Debugging.AssertsEnabled) Debugging.Assert(() => first.HitIndex != second.HitIndex); + if (Debugging.AssertsEnabled) Debugging.Assert(first.HitIndex != second.HitIndex); return first.HitIndex < second.HitIndex; } } @@ -191,7 +191,7 @@ public MergeSortQueue(Sort sort, TopDocs[] shardHits) // Returns true if first is < second protected internal override bool LessThan(ShardRef first, ShardRef second) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => first != second); + if (Debugging.AssertsEnabled) Debugging.Assert(first != second); FieldDoc firstFD = (FieldDoc)shardHits[first.ShardIndex][first.HitIndex]; FieldDoc secondFD = (FieldDoc)shardHits[second.ShardIndex][second.HitIndex]; //System.out.println(" lessThan:\n first=" + first + " doc=" + firstFD.doc + " score=" + firstFD.score + "\n second=" + second + " doc=" + secondFD.doc + " score=" + secondFD.score); @@ -226,7 +226,7 @@ protected internal override bool LessThan(ShardRef first, ShardRef second) // Tie break in same shard: resolve however the // shard had resolved it: //System.out.println(" return tb " + (first.hitIndex < second.hitIndex)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => first.HitIndex != second.HitIndex); + if (Debugging.AssertsEnabled) Debugging.Assert(first.HitIndex != second.HitIndex); return first.HitIndex < second.HitIndex; } } @@ -305,7 +305,7 @@ public static TopDocs Merge(Sort sort, int start, int size, TopDocs[] shardHits) int hitUpto = 0; while (hitUpto < numIterOnHits) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => queue.Count > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(queue.Count > 0); ShardRef @ref = queue.Pop(); ScoreDoc hit = shardHits[@ref.ShardIndex].ScoreDocs[@ref.HitIndex++]; hit.ShardIndex = @ref.ShardIndex; diff --git a/src/Lucene.Net/Search/TopScoreDocCollector.cs b/src/Lucene.Net/Search/TopScoreDocCollector.cs index 95062d7615..73dfbb2623 100644 --- a/src/Lucene.Net/Search/TopScoreDocCollector.cs +++ b/src/Lucene.Net/Search/TopScoreDocCollector.cs @@ -53,8 +53,8 @@ public override void Collect(int doc) // this collector cannot handle these scores: if (Debugging.AssertsEnabled) { - Debugging.Assert(() => !float.IsNegativeInfinity(score)); - Debugging.Assert(() => !float.IsNaN(score)); + Debugging.Assert(!float.IsNegativeInfinity(score)); + Debugging.Assert(!float.IsNaN(score)); } m_totalHits++; @@ -96,8 +96,8 @@ public override void Collect(int doc) if (Debugging.AssertsEnabled) { // this collector cannot handle these scores: - Debugging.Assert(() => !float.IsNegativeInfinity(score)); - Debugging.Assert(() => !float.IsNaN(score)); + Debugging.Assert(!float.IsNegativeInfinity(score)); + Debugging.Assert(!float.IsNaN(score)); } m_totalHits++; @@ -151,7 +151,7 @@ public override void Collect(int doc) float score = scorer.GetScore(); // this collector cannot handle NaN - if (Debugging.AssertsEnabled) Debugging.Assert(() => !float.IsNaN(score)); + if (Debugging.AssertsEnabled) Debugging.Assert(!float.IsNaN(score)); m_totalHits++; if (score < pqTop.Score) @@ -194,7 +194,7 @@ public override void Collect(int doc) float score = scorer.GetScore(); // this collector cannot handle NaN - if (Debugging.AssertsEnabled) Debugging.Assert(() => !float.IsNaN(score)); + if (Debugging.AssertsEnabled) Debugging.Assert(!float.IsNaN(score)); m_totalHits++; if (score > after.Score || (score == after.Score && doc <= afterDoc)) diff --git a/src/Lucene.Net/Search/TopTermsRewrite.cs b/src/Lucene.Net/Search/TopTermsRewrite.cs index 234a52d9aa..3c0784419d 100644 --- a/src/Lucene.Net/Search/TopTermsRewrite.cs +++ b/src/Lucene.Net/Search/TopTermsRewrite.cs @@ -84,7 +84,7 @@ public override Query Rewrite(IndexReader reader, MultiTermQuery query) foreach (ScoreTerm st in scoreTerms) { Term term = new Term(query.m_field, st.Bytes); - if (Debugging.AssertsEnabled) Debugging.Assert(() => reader.DocFreq(term) == st.TermState.DocFreq, () => "reader DF is " + reader.DocFreq(term) + " vs " + st.TermState.DocFreq + " term=" + term); + if (Debugging.AssertsEnabled) Debugging.Assert(reader.DocFreq(term) == st.TermState.DocFreq, () => "reader DF is " + reader.DocFreq(term) + " vs " + st.TermState.DocFreq + " term=" + term); AddClause(q, term, st.TermState.DocFreq, query.Boost * st.Boost, st.TermState); // add to query } return q; @@ -120,7 +120,7 @@ public override void SetNextEnum(TermsEnum termsEnum) this.termsEnum = termsEnum; this.termComp = termsEnum.Comparer; - if (Debugging.AssertsEnabled) Debugging.Assert(() => CompareToLastTerm(null)); + if (Debugging.AssertsEnabled) Debugging.Assert(CompareToLastTerm(null)); // lazy init the initial ScoreTerm because comparer is not known on ctor: if (st == null) @@ -145,7 +145,7 @@ private bool CompareToLastTerm(BytesRef t) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => termsEnum.Comparer.Compare(lastTerm, t) < 0, () => "lastTerm=" + lastTerm + " t=" + t); + if (Debugging.AssertsEnabled) Debugging.Assert(termsEnum.Comparer.Compare(lastTerm, t) < 0, () => "lastTerm=" + lastTerm + " t=" + t); lastTerm.CopyBytes(t); } return true; @@ -157,7 +157,7 @@ public override bool Collect(BytesRef bytes) // make sure within a single seg we always collect // terms in order - if (Debugging.AssertsEnabled) Debugging.Assert(() => CompareToLastTerm(bytes)); + if (Debugging.AssertsEnabled) Debugging.Assert(CompareToLastTerm(bytes)); //System.out.println("TTR.collect term=" + bytes.utf8ToString() + " boost=" + boost + " ord=" + readerContext.ord); // ignore uncompetitive hits @@ -174,11 +174,11 @@ public override bool Collect(BytesRef bytes) } } TermState state = termsEnum.GetTermState(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => state != null); + if (Debugging.AssertsEnabled) Debugging.Assert(state != null); if (visitedTerms.TryGetValue(bytes, out ScoreTerm t2)) { // if the term is already in the PQ, only update docFreq of term in PQ - if (Debugging.AssertsEnabled) Debugging.Assert(() => t2.Boost == boost, () => "boost should be equal in all segment TermsEnums"); + if (Debugging.AssertsEnabled) Debugging.Assert(t2.Boost == boost, () => "boost should be equal in all segment TermsEnums"); t2.TermState.Register(state, m_readerContext.Ord, termsEnum.DocFreq, termsEnum.TotalTermFreq); } else @@ -187,7 +187,7 @@ public override bool Collect(BytesRef bytes) st.Bytes.CopyBytes(bytes); st.Boost = boost; visitedTerms[st.Bytes] = st; - if (Debugging.AssertsEnabled) Debugging.Assert(() => st.TermState.DocFreq == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(st.TermState.DocFreq == 0); st.TermState.Register(state, m_readerContext.Ord, termsEnum.DocFreq, termsEnum.TotalTermFreq); stQueue.Add(st); // possibly drop entries from queue @@ -201,7 +201,7 @@ public override bool Collect(BytesRef bytes) { st = new ScoreTerm(termComp, new TermContext(m_topReaderContext)); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => stQueue.Count <= maxSize, () => "the PQ size must be limited to maxSize"); + if (Debugging.AssertsEnabled) Debugging.Assert(stQueue.Count <= maxSize, () => "the PQ size must be limited to maxSize"); // set maxBoostAtt with values to help FuzzyTermsEnum to optimize if (stQueue.Count == maxSize) { @@ -247,7 +247,7 @@ public override bool Equals(object obj) private static readonly IComparer scoreTermSortByTermComp = Comparer.Create((st1, st2) => { - if (Debugging.AssertsEnabled) Debugging.Assert(() => st1.TermComp == st2.TermComp, () => "term comparer should not change between segments"); + if (Debugging.AssertsEnabled) Debugging.Assert(st1.TermComp == st2.TermComp, () => "term comparer should not change between segments"); return st1.TermComp.Compare(st1.Bytes, st2.Bytes); }); diff --git a/src/Lucene.Net/Store/BaseDirectory.cs b/src/Lucene.Net/Store/BaseDirectory.cs index 37286f2e56..2502d3c2ed 100644 --- a/src/Lucene.Net/Store/BaseDirectory.cs +++ b/src/Lucene.Net/Store/BaseDirectory.cs @@ -66,7 +66,7 @@ public override void ClearLock(string name) public override void SetLockFactory(LockFactory lockFactory) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => lockFactory != null); + if (Debugging.AssertsEnabled) Debugging.Assert(lockFactory != null); this.m_lockFactory = lockFactory; lockFactory.LockPrefix = this.GetLockID(); } diff --git a/src/Lucene.Net/Store/BufferedIndexInput.cs b/src/Lucene.Net/Store/BufferedIndexInput.cs index 142f6173f0..99fee9039e 100644 --- a/src/Lucene.Net/Store/BufferedIndexInput.cs +++ b/src/Lucene.Net/Store/BufferedIndexInput.cs @@ -80,7 +80,7 @@ public BufferedIndexInput(string resourceDesc, int bufferSize) /// Change the buffer size used by this public void SetBufferSize(int newSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => m_buffer == null || bufferSize == m_buffer.Length, () => "buffer=" + m_buffer + " bufferSize=" + bufferSize + " buffer.length=" + (m_buffer != null ? m_buffer.Length : 0)); + if (Debugging.AssertsEnabled) Debugging.Assert(m_buffer == null || bufferSize == m_buffer.Length, () => "buffer=" + m_buffer + " bufferSize=" + bufferSize + " buffer.length=" + (m_buffer != null ? m_buffer.Length : 0)); if (newSize != bufferSize) { CheckBufferSize(newSize); diff --git a/src/Lucene.Net/Store/ByteArrayDataOutput.cs b/src/Lucene.Net/Store/ByteArrayDataOutput.cs index c2a7dcfcb4..cbdf7af6a9 100644 --- a/src/Lucene.Net/Store/ByteArrayDataOutput.cs +++ b/src/Lucene.Net/Store/ByteArrayDataOutput.cs @@ -66,13 +66,13 @@ public virtual void Reset(byte[] bytes, int offset, int len) public override void WriteByte(byte b) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => pos < limit); + if (Debugging.AssertsEnabled) Debugging.Assert(pos < limit); bytes[pos++] = b; } public override void WriteBytes(byte[] b, int offset, int length) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => pos + length <= limit); + if (Debugging.AssertsEnabled) Debugging.Assert(pos + length <= limit); System.Buffer.BlockCopy(b, offset, bytes, pos, length); pos += length; } diff --git a/src/Lucene.Net/Store/ByteBufferIndexInput.cs b/src/Lucene.Net/Store/ByteBufferIndexInput.cs index 21575c8670..d5aaeff2ad 100644 --- a/src/Lucene.Net/Store/ByteBufferIndexInput.cs +++ b/src/Lucene.Net/Store/ByteBufferIndexInput.cs @@ -88,8 +88,8 @@ internal ByteBufferIndexInput(string resourceDescription, ByteBuffer[] buffers, if (Debugging.AssertsEnabled) { - Debugging.Assert(() => chunkSizePower >= 0 && chunkSizePower <= 30); - Debugging.Assert(() => ((long)((ulong)length >> chunkSizePower)) < int.MaxValue); + Debugging.Assert(chunkSizePower >= 0 && chunkSizePower <= 30); + Debugging.Assert(((long)((ulong)length >> chunkSizePower)) < int.MaxValue); } // LUCENENET specific: MMapIndexInput calls SetBuffers() to populate @@ -304,7 +304,7 @@ private ByteBufferIndexInput BuildSlice(long offset, long length) ByteBufferIndexInput clone = (ByteBufferIndexInput)base.Clone(); clone.isClone = true; // we keep clone.clones, so it shares the same map with original and we have no additional cost on clones - if (Debugging.AssertsEnabled) Debugging.Assert(() => clone.clones == this.clones); + if (Debugging.AssertsEnabled) Debugging.Assert(clone.clones == this.clones); clone.buffers = BuildSlice(buffers, offset, length); clone.offset = (int)(offset & chunkSizeMask); clone.length = length; @@ -394,7 +394,7 @@ protected override void Dispose(bool disposing) #if FEATURE_CONDITIONALWEAKTABLE_ENUMERATOR foreach (var pair in clones) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => pair.Key.isClone); + if (Debugging.AssertsEnabled) Debugging.Assert(pair.Key.isClone); pair.Key.UnsetBuffers(); } this.clones.Clear(); diff --git a/src/Lucene.Net/Store/CompoundFileDirectory.cs b/src/Lucene.Net/Store/CompoundFileDirectory.cs index bf7b4bf061..c5598c43f7 100644 --- a/src/Lucene.Net/Store/CompoundFileDirectory.cs +++ b/src/Lucene.Net/Store/CompoundFileDirectory.cs @@ -120,7 +120,7 @@ public CompoundFileDirectory(Directory directory, string fileName, IOContext con } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !(directory is CompoundFileDirectory), () => "compound file inside of compound file: " + fileName); + if (Debugging.AssertsEnabled) Debugging.Assert(!(directory is CompoundFileDirectory), () => "compound file inside of compound file: " + fileName); this.entries = SENTINEL; this.IsOpen = true; writer = new CompoundFileWriter(directory, fileName); @@ -295,7 +295,7 @@ protected override void Dispose(bool disposing) IsOpen = false; if (writer != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => openForWrite); + if (Debugging.AssertsEnabled) Debugging.Assert(openForWrite); writer.Dispose(); } else @@ -311,7 +311,7 @@ public override IndexInput OpenInput(string name, IOContext context) lock (this) { EnsureOpen(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => !openForWrite); + if (Debugging.AssertsEnabled) Debugging.Assert(!openForWrite); string id = IndexFileNames.StripSegmentName(name); if (!entries.TryGetValue(id, out FileEntry entry) || entry == null) { @@ -415,7 +415,7 @@ public override Lock MakeLock(string name) public override IndexInputSlicer CreateSlicer(string name, IOContext context) { EnsureOpen(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => !openForWrite); + if (Debugging.AssertsEnabled) Debugging.Assert(!openForWrite); string id = IndexFileNames.StripSegmentName(name); if (!entries.TryGetValue(id, out FileEntry entry) || entry == null) { diff --git a/src/Lucene.Net/Store/CompoundFileWriter.cs b/src/Lucene.Net/Store/CompoundFileWriter.cs index 5250880e33..a3b7614417 100644 --- a/src/Lucene.Net/Store/CompoundFileWriter.cs +++ b/src/Lucene.Net/Store/CompoundFileWriter.cs @@ -160,7 +160,7 @@ public void Dispose() closed = true; // open the compound stream GetOutput(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => dataOut != null); + if (Debugging.AssertsEnabled) Debugging.Assert(dataOut != null); CodecUtil.WriteFooter(dataOut); } catch (IOException e) @@ -253,7 +253,7 @@ internal IndexOutput CreateOutput(string name, IOContext context) bool outputLocked = false; try { - if (Debugging.AssertsEnabled) Debugging.Assert(() => name != null, () => "name must not be null"); + if (Debugging.AssertsEnabled) Debugging.Assert(name != null, () => "name must not be null"); if (entries.ContainsKey(name)) { throw new ArgumentException("File " + name + " already exists"); @@ -262,7 +262,7 @@ internal IndexOutput CreateOutput(string name, IOContext context) entry.File = name; entries[name] = entry; string id = IndexFileNames.StripSegmentName(name); - if (Debugging.AssertsEnabled) Debugging.Assert(() => !seenIDs.Contains(id), () => "file=\"" + name + "\" maps to id=\"" + id + "\", which was already written"); + if (Debugging.AssertsEnabled) Debugging.Assert(!seenIDs.Contains(id), () => "file=\"" + name + "\" maps to id=\"" + id + "\", which was already written"); seenIDs.Add(id); DirectCFSIndexOutput @out; @@ -285,7 +285,7 @@ internal IndexOutput CreateOutput(string name, IOContext context) entries.Remove(name); if (outputLocked) // release the output lock if not successful { - if (Debugging.AssertsEnabled) Debugging.Assert(() => outputTaken); + if (Debugging.AssertsEnabled) Debugging.Assert(outputTaken); ReleaseOutputLock(); } } @@ -315,7 +315,7 @@ private void PrunePendingEntries() finally { bool compareAndSet = outputTaken.CompareAndSet(true, false); - if (Debugging.AssertsEnabled) Debugging.Assert(() => compareAndSet); + if (Debugging.AssertsEnabled) Debugging.Assert(compareAndSet); } } } @@ -397,7 +397,7 @@ public override long GetFilePointer() [Obsolete("(4.1) this method will be removed in Lucene 5.0")] public override void Seek(long pos) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !closed); + if (Debugging.AssertsEnabled) Debugging.Assert(!closed); @delegate.Seek(offset + pos); } @@ -405,21 +405,21 @@ public override long Length { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !closed); + if (Debugging.AssertsEnabled) Debugging.Assert(!closed); return @delegate.Length - offset; } } public override void WriteByte(byte b) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !closed); + if (Debugging.AssertsEnabled) Debugging.Assert(!closed); writtenBytes++; @delegate.WriteByte(b); } public override void WriteBytes(byte[] b, int offset, int length) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !closed); + if (Debugging.AssertsEnabled) Debugging.Assert(!closed); writtenBytes += length; @delegate.WriteBytes(b, offset, length); } diff --git a/src/Lucene.Net/Store/DataInput.cs b/src/Lucene.Net/Store/DataInput.cs index fb21389e42..bec94e896a 100644 --- a/src/Lucene.Net/Store/DataInput.cs +++ b/src/Lucene.Net/Store/DataInput.cs @@ -324,7 +324,7 @@ public virtual void SkipBytes(long numBytes) { skipBuffer = new byte[SKIP_BUFFER_SIZE]; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => skipBuffer.Length == SKIP_BUFFER_SIZE); + if (Debugging.AssertsEnabled) Debugging.Assert(skipBuffer.Length == SKIP_BUFFER_SIZE); for (long skipped = 0; skipped < numBytes; ) { var step = (int)Math.Min(SKIP_BUFFER_SIZE, numBytes - skipped); diff --git a/src/Lucene.Net/Store/DataOutput.cs b/src/Lucene.Net/Store/DataOutput.cs index a4a27aa64f..ecedc61ae5 100644 --- a/src/Lucene.Net/Store/DataOutput.cs +++ b/src/Lucene.Net/Store/DataOutput.cs @@ -231,7 +231,7 @@ public virtual void WriteInt64(long i) /// public void WriteVInt64(long i) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => i >= 0L); + if (Debugging.AssertsEnabled) Debugging.Assert(i >= 0L); while ((i & ~0x7FL) != 0L) { WriteByte((byte)unchecked((sbyte)((i & 0x7FL) | 0x80L))); @@ -262,7 +262,7 @@ public virtual void WriteString(string s) /// Copy numBytes bytes from input to ourself. public virtual void CopyBytes(DataInput input, long numBytes) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => numBytes >= 0, () => "numBytes=" + numBytes); + if (Debugging.AssertsEnabled) Debugging.Assert(numBytes >= 0, () => "numBytes=" + numBytes); long left = numBytes; if (copyBuffer == null) { diff --git a/src/Lucene.Net/Store/IOContext.cs b/src/Lucene.Net/Store/IOContext.cs index 7a5240e39e..857ad6052f 100644 --- a/src/Lucene.Net/Store/IOContext.cs +++ b/src/Lucene.Net/Store/IOContext.cs @@ -67,7 +67,7 @@ public IOContext() public IOContext(FlushInfo flushInfo) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => flushInfo != null); + if (Debugging.AssertsEnabled) Debugging.Assert(flushInfo != null); this.Context = UsageContext.FLUSH; this.MergeInfo = null; this.ReadOnce = false; @@ -96,8 +96,8 @@ private IOContext(UsageContext context, MergeInfo mergeInfo) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => context != UsageContext.MERGE || mergeInfo != null, () => "MergeInfo must not be null if context is MERGE"); - Debugging.Assert(() => context != UsageContext.FLUSH, () => "Use IOContext(FlushInfo) to create a FLUSH IOContext"); + Debugging.Assert(context != UsageContext.MERGE || mergeInfo != null, () => "MergeInfo must not be null if context is MERGE"); + Debugging.Assert(context != UsageContext.FLUSH, () => "Use IOContext(FlushInfo) to create a FLUSH IOContext"); } this.Context = context; this.ReadOnce = false; diff --git a/src/Lucene.Net/Store/MMapDirectory.cs b/src/Lucene.Net/Store/MMapDirectory.cs index a469d7234a..3bd6f74b0a 100644 --- a/src/Lucene.Net/Store/MMapDirectory.cs +++ b/src/Lucene.Net/Store/MMapDirectory.cs @@ -111,7 +111,7 @@ public MMapDirectory(DirectoryInfo path, LockFactory lockFactory, int maxChunkSi throw new ArgumentException("Maximum chunk size for mmap must be >0"); } this.chunkSizePower = 31 - maxChunkSize.LeadingZeroCount(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => this.chunkSizePower >= 0 && this.chunkSizePower <= 30); + if (Debugging.AssertsEnabled) Debugging.Assert(this.chunkSizePower >= 0 && this.chunkSizePower <= 30); } /// diff --git a/src/Lucene.Net/Store/NIOFSDirectory.cs b/src/Lucene.Net/Store/NIOFSDirectory.cs index f0658c44b2..2c9e589d8c 100644 --- a/src/Lucene.Net/Store/NIOFSDirectory.cs +++ b/src/Lucene.Net/Store/NIOFSDirectory.cs @@ -234,7 +234,7 @@ protected override void ReadInternal(byte[] b, int offset, int len) if (b == m_buffer && 0 == offset) { // Use our own pre-wrapped byteBuf: - if (Debugging.AssertsEnabled) Debugging.Assert(() => byteBuf != null); + if (Debugging.AssertsEnabled) Debugging.Assert(byteBuf != null); byteBuf.Clear(); byteBuf.Limit = len; bb = byteBuf; @@ -259,7 +259,7 @@ protected override void ReadInternal(byte[] b, int offset, int len) { int toRead = Math.Min(CHUNK_SIZE, readLength); bb.Limit = readOffset + toRead; - if (Debugging.AssertsEnabled) Debugging.Assert(() => bb.Remaining == toRead); + if (Debugging.AssertsEnabled) Debugging.Assert(bb.Remaining == toRead); int i = m_channel.Read(bb, pos); if (i <= 0) // be defensive here, even though we checked before hand, something could have changed { @@ -269,7 +269,7 @@ protected override void ReadInternal(byte[] b, int offset, int len) readOffset += i; readLength -= i; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => readLength == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(readLength == 0); } catch (IOException ioe) { diff --git a/src/Lucene.Net/Store/RAMOutputStream.cs b/src/Lucene.Net/Store/RAMOutputStream.cs index fec7838db0..38c475e1d8 100644 --- a/src/Lucene.Net/Store/RAMOutputStream.cs +++ b/src/Lucene.Net/Store/RAMOutputStream.cs @@ -162,7 +162,7 @@ public override void WriteByte(byte b) public override void WriteBytes(byte[] b, int offset, int len) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => b != null); + if (Debugging.AssertsEnabled) Debugging.Assert(b != null); crc.Update(b, offset, len); while (len > 0) { diff --git a/src/Lucene.Net/Store/SimpleFSDirectory.cs b/src/Lucene.Net/Store/SimpleFSDirectory.cs index 5e80121224..e0b33297df 100644 --- a/src/Lucene.Net/Store/SimpleFSDirectory.cs +++ b/src/Lucene.Net/Store/SimpleFSDirectory.cs @@ -235,7 +235,7 @@ protected override void ReadInternal(byte[] b, int offset, int len) // all we need to do is Read(). total = m_file.Read(b, offset, len); - if (Debugging.AssertsEnabled) Debugging.Assert(() => total == len); + if (Debugging.AssertsEnabled) Debugging.Assert(total == len); } catch (IOException ioe) { diff --git a/src/Lucene.Net/Support/Collections.cs b/src/Lucene.Net/Support/Collections.cs index 468bc5a93a..a5a4d4c5fc 100644 --- a/src/Lucene.Net/Support/Collections.cs +++ b/src/Lucene.Net/Support/Collections.cs @@ -270,7 +270,7 @@ private class ReverseComparer2 : IComparer public ReverseComparer2(IComparer cmp) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => cmp != null); + if (Debugging.AssertsEnabled) Debugging.Assert(cmp != null); this.cmp = cmp; } diff --git a/src/Lucene.Net/Support/Diagnostics/Debugging.cs b/src/Lucene.Net/Support/Diagnostics/Debugging.cs index 32ea1eaa0c..d90842039f 100644 --- a/src/Lucene.Net/Support/Diagnostics/Debugging.cs +++ b/src/Lucene.Net/Support/Diagnostics/Debugging.cs @@ -36,53 +36,54 @@ internal static class Debugging /// public static bool AssertsEnabled = SystemProperties.GetPropertyAsBoolean("assert", false); - ///// - ///// Checks for a condition; if the condition is false, throws an . - ///// - ///// The conditional expression to evaluate. If the condition is true, no exception is thrown. - //[MethodImpl(MethodImplOptions.AggressiveInlining)] - //public static void Assert(bool condition) - //{ - // if (AssertsEnabled && !condition) - // throw new AssertionException(); - //} - - ///// - ///// Checks for a condition; if the is false, throws an with the specified . - ///// - ///// The conditional expression to evaluate. If the condition is true, no exception is thrown. - ///// A delegate to build the message to use. - //[MethodImpl(MethodImplOptions.AggressiveInlining)] - //public static void Assert(bool condition, Func messageFactory) - //{ - // if (AssertsEnabled && !condition) - // throw new AssertionException(messageFactory()); - //} - /// /// Checks for a condition; if the condition is false, throws an . /// - /// A delegate that returns the conditional expression to evaluate. If the condition is true, no exception is thrown. - + /// The conditional expression to evaluate. If the condition is true, no exception is thrown. [MethodImpl(MethodImplOptions.AggressiveInlining)] - public static void Assert(Func conditionFactory) + public static void Assert(bool condition) { - if (AssertsEnabled && !conditionFactory()) + if (AssertsEnabled && !condition) throw new AssertionException(); } /// - /// Checks for a condition if asserts are enabled; if the - /// returns false, throws an with the message returned + /// Checks for a condition; if the is false, throws an with the message returned /// from the specified . /// - /// A delegate that returns the conditional expression to evaluate. If the condition returned from the factory is true, no exception is thrown. + /// The conditional expression to evaluate. If the condition is true, no exception is thrown. /// A delegate to build the message to use. [MethodImpl(MethodImplOptions.AggressiveInlining)] - public static void Assert(Func conditionFactory, Func messageFactory) + public static void Assert(bool condition, Func messageFactory) { - if (AssertsEnabled && !conditionFactory()) + if (AssertsEnabled && !condition) throw new AssertionException(messageFactory()); } + + ///// + ///// Checks for a condition; if the condition is false, throws an . + ///// + ///// A delegate that returns the conditional expression to evaluate. If the condition is true, no exception is thrown. + + //[MethodImpl(MethodImplOptions.AggressiveInlining)] + //public static void Assert(Func conditionFactory) + //{ + // if (AssertsEnabled && !conditionFactory()) + // throw new AssertionException(); + //} + + ///// + ///// Checks for a condition if asserts are enabled; if the + ///// returns false, throws an with the message returned + ///// from the specified . + ///// + ///// A delegate that returns the conditional expression to evaluate. If the condition returned from the factory is true, no exception is thrown. + ///// A delegate to build the message to use. + //[MethodImpl(MethodImplOptions.AggressiveInlining)] + //public static void Assert(Func conditionFactory, Func messageFactory) + //{ + // if (AssertsEnabled && !conditionFactory()) + // throw new AssertionException(messageFactory()); + //} } } diff --git a/src/Lucene.Net/Util/ArrayUtil.cs b/src/Lucene.Net/Util/ArrayUtil.cs index 238c0c6c5d..d762764536 100644 --- a/src/Lucene.Net/Util/ArrayUtil.cs +++ b/src/Lucene.Net/Util/ArrayUtil.cs @@ -270,7 +270,7 @@ public static int GetShrinkSize(int currentSize, int targetSize, int bytesPerEle public static short[] Grow(short[] array, int minSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { short[] newArray = new short[Oversize(minSize, RamUsageEstimator.NUM_BYTES_INT16)]; @@ -290,7 +290,7 @@ public static short[] Grow(short[] array) public static float[] Grow(float[] array, int minSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { float[] newArray = new float[Oversize(minSize, RamUsageEstimator.NUM_BYTES_SINGLE)]; @@ -310,7 +310,7 @@ public static float[] Grow(float[] array) public static double[] Grow(double[] array, int minSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { double[] newArray = new double[Oversize(minSize, RamUsageEstimator.NUM_BYTES_DOUBLE)]; @@ -330,7 +330,7 @@ public static double[] Grow(double[] array) public static short[] Shrink(short[] array, int targetSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); int newSize = GetShrinkSize(array.Length, targetSize, RamUsageEstimator.NUM_BYTES_INT16); if (newSize != array.Length) { @@ -346,7 +346,7 @@ public static short[] Shrink(short[] array, int targetSize) public static int[] Grow(int[] array, int minSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { int[] newArray = new int[Oversize(minSize, RamUsageEstimator.NUM_BYTES_INT32)]; @@ -366,7 +366,7 @@ public static int[] Grow(int[] array) public static int[] Shrink(int[] array, int targetSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); int newSize = GetShrinkSize(array.Length, targetSize, RamUsageEstimator.NUM_BYTES_INT32); if (newSize != array.Length) { @@ -382,7 +382,7 @@ public static int[] Shrink(int[] array, int targetSize) public static long[] Grow(long[] array, int minSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { long[] newArray = new long[Oversize(minSize, RamUsageEstimator.NUM_BYTES_INT64)]; @@ -402,7 +402,7 @@ public static long[] Grow(long[] array) public static long[] Shrink(long[] array, int targetSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); int newSize = GetShrinkSize(array.Length, targetSize, RamUsageEstimator.NUM_BYTES_INT64); if (newSize != array.Length) { @@ -419,7 +419,7 @@ public static long[] Shrink(long[] array, int targetSize) [CLSCompliant(false)] public static sbyte[] Grow(sbyte[] array, int minSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { var newArray = new sbyte[Oversize(minSize, 1)]; @@ -434,7 +434,7 @@ public static sbyte[] Grow(sbyte[] array, int minSize) public static byte[] Grow(byte[] array, int minSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { byte[] newArray = new byte[Oversize(minSize, 1)]; @@ -454,7 +454,7 @@ public static byte[] Grow(byte[] array) public static byte[] Shrink(byte[] array, int targetSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); int newSize = GetShrinkSize(array.Length, targetSize, 1); if (newSize != array.Length) { @@ -470,7 +470,7 @@ public static byte[] Shrink(byte[] array, int targetSize) public static bool[] Grow(bool[] array, int minSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { bool[] newArray = new bool[Oversize(minSize, 1)]; @@ -490,7 +490,7 @@ public static bool[] Grow(bool[] array) public static bool[] Shrink(bool[] array, int targetSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); int newSize = GetShrinkSize(array.Length, targetSize, 1); if (newSize != array.Length) { @@ -506,7 +506,7 @@ public static bool[] Shrink(bool[] array, int targetSize) public static char[] Grow(char[] array, int minSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { char[] newArray = new char[Oversize(minSize, RamUsageEstimator.NUM_BYTES_CHAR)]; @@ -526,7 +526,7 @@ public static char[] Grow(char[] array) public static char[] Shrink(char[] array, int targetSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); int newSize = GetShrinkSize(array.Length, targetSize, RamUsageEstimator.NUM_BYTES_CHAR); if (newSize != array.Length) { @@ -543,7 +543,7 @@ public static char[] Shrink(char[] array, int targetSize) [CLSCompliant(false)] public static int[][] Grow(int[][] array, int minSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { var newArray = new int[Oversize(minSize, RamUsageEstimator.NUM_BYTES_OBJECT_REF)][]; @@ -565,7 +565,7 @@ public static int[][] Grow(int[][] array) [CLSCompliant(false)] public static int[][] Shrink(int[][] array, int targetSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); int newSize = GetShrinkSize(array.Length, targetSize, RamUsageEstimator.NUM_BYTES_OBJECT_REF); if (newSize != array.Length) { @@ -582,7 +582,7 @@ public static int[][] Shrink(int[][] array, int targetSize) [CLSCompliant(false)] public static float[][] Grow(float[][] array, int minSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(minSize >= 0, () => "size must be positive (got " + minSize + "): likely integer overflow?"); if (array.Length < minSize) { float[][] newArray = new float[Oversize(minSize, RamUsageEstimator.NUM_BYTES_OBJECT_REF)][]; @@ -604,7 +604,7 @@ public static float[][] Grow(float[][] array) [CLSCompliant(false)] public static float[][] Shrink(float[][] array, int targetSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); + if (Debugging.AssertsEnabled) Debugging.Assert(targetSize >= 0, () => "size must be positive (got " + targetSize + "): likely integer overflow?"); int newSize = GetShrinkSize(array.Length, targetSize, RamUsageEstimator.NUM_BYTES_OBJECT_REF); if (newSize != array.Length) { @@ -779,7 +779,7 @@ public static int[] ToInt32Array(ICollection ints) } // paranoia: - if (Debugging.AssertsEnabled) Debugging.Assert(() => upto == result.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(upto == result.Length); return result; } diff --git a/src/Lucene.Net/Util/AttributeSource.cs b/src/Lucene.Net/Util/AttributeSource.cs index 3971574589..e4cf9b1304 100644 --- a/src/Lucene.Net/Util/AttributeSource.cs +++ b/src/Lucene.Net/Util/AttributeSource.cs @@ -370,7 +370,7 @@ public void AddAttributeImpl(Attribute att) foreach (var curInterfaceRef in foundInterfaces) { curInterfaceRef.TryGetTarget(out Type curInterface); - if (Debugging.AssertsEnabled) Debugging.Assert(() => curInterface != null, () => "We have a strong reference on the class holding the interfaces, so they should never get evicted"); + if (Debugging.AssertsEnabled) Debugging.Assert(curInterface != null, () => "We have a strong reference on the class holding the interfaces, so they should never get evicted"); // Attribute is a superclass of this interface if (!attributes.ContainsKey(curInterface)) { diff --git a/src/Lucene.Net/Util/Automaton/Automaton.cs b/src/Lucene.Net/Util/Automaton/Automaton.cs index a6b20c1fb0..23e7e5ec91 100644 --- a/src/Lucene.Net/Util/Automaton/Automaton.cs +++ b/src/Lucene.Net/Util/Automaton/Automaton.cs @@ -299,7 +299,7 @@ public virtual void SetNumberedStates(State[] states) public virtual void SetNumberedStates(State[] states, int count) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => count <= states.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(count <= states.Length); // TODO: maybe we can eventually allow for oversizing here... if (count < states.Length) { @@ -550,7 +550,7 @@ public virtual Transition[][] GetSortedTransitions() s.SortTransitions(Transition.COMPARE_BY_MIN_MAX_THEN_DEST); s.TrimTransitionsArray(); transitions[s.number] = s.TransitionsArray; - if (Debugging.AssertsEnabled) Debugging.Assert(() => s.TransitionsArray != null); + if (Debugging.AssertsEnabled) Debugging.Assert(s.TransitionsArray != null); } return transitions; } diff --git a/src/Lucene.Net/Util/Automaton/BasicOperations.cs b/src/Lucene.Net/Util/Automaton/BasicOperations.cs index e5485b96f2..07f5650c29 100644 --- a/src/Lucene.Net/Util/Automaton/BasicOperations.cs +++ b/src/Lucene.Net/Util/Automaton/BasicOperations.cs @@ -721,7 +721,7 @@ private PointTransitions Find(int point) if (count == HASHMAP_CUTOVER) { // switch to HashMap on the fly - if (Debugging.AssertsEnabled) Debugging.Assert(() => map.Count == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(map.Count == 0); for (int i = 0; i < count; i++) { map[points[i].point] = points[i]; @@ -845,7 +845,7 @@ public static void Determinize(Automaton a) if (statesSet.upto > 0) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => lastPoint != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(lastPoint != -1); statesSet.ComputeHash(); @@ -868,7 +868,7 @@ public static void Determinize(Automaton a) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => (accCount > 0) == q.accept, () => "accCount=" + accCount + " vs existing accept=" + q.accept + " states=" + statesSet); + if (Debugging.AssertsEnabled) Debugging.Assert((accCount > 0) == q.accept, () => "accCount=" + accCount + " vs existing accept=" + q.accept + " states=" + statesSet); } r.AddTransition(new Transition(lastPoint, point - 1, q)); @@ -902,7 +902,7 @@ public static void Determinize(Automaton a) points.points[i].starts.count = 0; } points.Reset(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => statesSet.upto == 0, () => "upto=" + statesSet.upto); + if (Debugging.AssertsEnabled) Debugging.Assert(statesSet.upto == 0, () => "upto=" + statesSet.upto); } a.deterministic = true; a.SetNumberedStates(newStatesArray, newStateUpto); diff --git a/src/Lucene.Net/Util/Automaton/CompiledAutomaton.cs b/src/Lucene.Net/Util/Automaton/CompiledAutomaton.cs index d20418c773..eec2d69050 100644 --- a/src/Lucene.Net/Util/Automaton/CompiledAutomaton.cs +++ b/src/Lucene.Net/Util/Automaton/CompiledAutomaton.cs @@ -228,7 +228,7 @@ private BytesRef AddTail(int state, BytesRef term, int idx, int leadLabel) } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => maxTransition != null); + if (Debugging.AssertsEnabled) Debugging.Assert(maxTransition != null); // Append floorLabel int floorLabel; @@ -256,7 +256,7 @@ private BytesRef AddTail(int state, BytesRef term, int idx, int leadLabel) Transition[] transitions = sortedTransitions[state]; if (transitions.Length == 0) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => RunAutomaton.IsAccept(state)); + if (Debugging.AssertsEnabled) Debugging.Assert(RunAutomaton.IsAccept(state)); term.Length = idx; //if (DEBUG) System.out.println(" return " + term.utf8ToString()); return term; @@ -265,7 +265,7 @@ private BytesRef AddTail(int state, BytesRef term, int idx, int leadLabel) { // We are pushing "top" -- so get last label of // last transition: - if (Debugging.AssertsEnabled) Debugging.Assert(() => transitions.Length != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(transitions.Length != 0); Transition lastTransition = transitions[transitions.Length - 1]; if (idx >= term.Bytes.Length) { @@ -364,7 +364,7 @@ public virtual BytesRef Floor(BytesRef input, BytesRef output) Transition[] transitions = sortedTransitions[state]; if (transitions.Length == 0) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => RunAutomaton.IsAccept(state)); + if (Debugging.AssertsEnabled) Debugging.Assert(RunAutomaton.IsAccept(state)); output.Length = idx; //if (DEBUG) System.out.println(" return " + output.utf8ToString()); return output; diff --git a/src/Lucene.Net/Util/Automaton/DaciukMihovAutomatonBuilder.cs b/src/Lucene.Net/Util/Automaton/DaciukMihovAutomatonBuilder.cs index 5a96cddde9..90990fd06f 100644 --- a/src/Lucene.Net/Util/Automaton/DaciukMihovAutomatonBuilder.cs +++ b/src/Lucene.Net/Util/Automaton/DaciukMihovAutomatonBuilder.cs @@ -131,7 +131,7 @@ public override int GetHashCode() /// internal State NewState(int label) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => Array.BinarySearch(labels, label) < 0, () => "State already has transition labeled: " + label); + if (Debugging.AssertsEnabled) Debugging.Assert(Array.BinarySearch(labels, label) < 0, () => "State already has transition labeled: " + label); labels = Arrays.CopyOf(labels, labels.Length + 1); states = Arrays.CopyOf(states, states.Length + 1); @@ -145,7 +145,7 @@ internal State NewState(int label) /// internal State LastChild() // LUCENENET NOTE: Kept this a method because there is another overload { - if (Debugging.AssertsEnabled) Debugging.Assert(() => HasChildren, () => "No outgoing transitions."); + if (Debugging.AssertsEnabled) Debugging.Assert(HasChildren, () => "No outgoing transitions."); return states[states.Length - 1]; } @@ -161,7 +161,7 @@ internal State LastChild(int label) { s = states[index]; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => s == GetState(label)); + if (Debugging.AssertsEnabled) Debugging.Assert(s == GetState(label)); return s; } @@ -171,7 +171,7 @@ internal State LastChild(int label) /// internal void ReplaceLastChild(State state) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => HasChildren, () => "No outgoing transitions."); + if (Debugging.AssertsEnabled) Debugging.Assert(HasChildren, () => "No outgoing transitions."); states[states.Length - 1] = state; } @@ -229,9 +229,9 @@ public void Add(CharsRef current) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => stateRegistry != null, () => "Automaton already built."); - Debugging.Assert(() => previous == null || comparer.Compare(previous, current) <= 0, () => "Input must be in sorted UTF-8 order: " + previous + " >= " + current); - Debugging.Assert(() => SetPrevious(current)); + Debugging.Assert(stateRegistry != null, () => "Automaton already built."); + Debugging.Assert(previous == null || comparer.Compare(previous, current) <= 0, () => "Input must be in sorted UTF-8 order: " + previous + " >= " + current); + Debugging.Assert(SetPrevious(current)); } // Descend in the automaton (find matching prefix). diff --git a/src/Lucene.Net/Util/Automaton/Lev1ParametricDescription.cs b/src/Lucene.Net/Util/Automaton/Lev1ParametricDescription.cs index d0e79358f4..b41ea66c96 100644 --- a/src/Lucene.Net/Util/Automaton/Lev1ParametricDescription.cs +++ b/src/Lucene.Net/Util/Automaton/Lev1ParametricDescription.cs @@ -32,12 +32,12 @@ internal class Lev1ParametricDescription : ParametricDescription internal override int Transition(int absState, int position, int vector) { // null absState should never be passed in - if (Debugging.AssertsEnabled) Debugging.Assert(() => absState != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(absState != -1); // decode absState -> state, offset int state = absState / (m_w + 1); int offset = absState % (m_w + 1); - if (Debugging.AssertsEnabled) Debugging.Assert(() => offset >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(offset >= 0); if (position == m_w) { diff --git a/src/Lucene.Net/Util/Automaton/Lev1TParametricDescription.cs b/src/Lucene.Net/Util/Automaton/Lev1TParametricDescription.cs index 68095e6d4a..9e6f4138f7 100644 --- a/src/Lucene.Net/Util/Automaton/Lev1TParametricDescription.cs +++ b/src/Lucene.Net/Util/Automaton/Lev1TParametricDescription.cs @@ -34,12 +34,12 @@ internal class Lev1TParametricDescription : ParametricDescription internal override int Transition(int absState, int position, int vector) { // null absState should never be passed in - if (Debugging.AssertsEnabled) Debugging.Assert(() => absState != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(absState != -1); // decode absState -> state, offset int state = absState / (m_w + 1); int offset = absState % (m_w + 1); - if (Debugging.AssertsEnabled) Debugging.Assert(() => offset >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(offset >= 0); if (position == m_w) { diff --git a/src/Lucene.Net/Util/Automaton/Lev2ParametricDescription.cs b/src/Lucene.Net/Util/Automaton/Lev2ParametricDescription.cs index c5d74ce894..dc6919c5ff 100644 --- a/src/Lucene.Net/Util/Automaton/Lev2ParametricDescription.cs +++ b/src/Lucene.Net/Util/Automaton/Lev2ParametricDescription.cs @@ -32,12 +32,12 @@ internal class Lev2ParametricDescription : ParametricDescription internal override int Transition(int absState, int position, int vector) { // null absState should never be passed in - if (Debugging.AssertsEnabled) Debugging.Assert(() => absState != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(absState != -1); // decode absState -> state, offset int state = absState / (m_w + 1); int offset = absState % (m_w + 1); - if (Debugging.AssertsEnabled) Debugging.Assert(() => offset >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(offset >= 0); if (position == m_w) { diff --git a/src/Lucene.Net/Util/Automaton/Lev2TParametricDescription.cs b/src/Lucene.Net/Util/Automaton/Lev2TParametricDescription.cs index 08325b7e98..637c107792 100644 --- a/src/Lucene.Net/Util/Automaton/Lev2TParametricDescription.cs +++ b/src/Lucene.Net/Util/Automaton/Lev2TParametricDescription.cs @@ -34,12 +34,12 @@ internal class Lev2TParametricDescription : ParametricDescription internal override int Transition(int absState, int position, int vector) { // null absState should never be passed in - if (Debugging.AssertsEnabled) Debugging.Assert(() => absState != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(absState != -1); // decode absState -> state, offset int state = absState / (m_w + 1); int offset = absState % (m_w + 1); - if (Debugging.AssertsEnabled) Debugging.Assert(() => offset >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(offset >= 0); if (position == m_w) { diff --git a/src/Lucene.Net/Util/Automaton/LevenshteinAutomata.cs b/src/Lucene.Net/Util/Automaton/LevenshteinAutomata.cs index e360036321..1edcf5976a 100644 --- a/src/Lucene.Net/Util/Automaton/LevenshteinAutomata.cs +++ b/src/Lucene.Net/Util/Automaton/LevenshteinAutomata.cs @@ -276,7 +276,7 @@ internal virtual bool IsAccept(int absState) // decode absState -> state, offset int state = absState / (m_w + 1); int offset = absState % (m_w + 1); - if (Debugging.AssertsEnabled) Debugging.Assert(() => offset >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(offset >= 0); return m_w - offset + minErrors[state] <= m_n; } diff --git a/src/Lucene.Net/Util/Automaton/SortedIntSet.cs b/src/Lucene.Net/Util/Automaton/SortedIntSet.cs index bd0f40ea4c..a3b836009b 100644 --- a/src/Lucene.Net/Util/Automaton/SortedIntSet.cs +++ b/src/Lucene.Net/Util/Automaton/SortedIntSet.cs @@ -157,7 +157,7 @@ public void Decr(int num) return; } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(false); } public void ComputeHash() diff --git a/src/Lucene.Net/Util/Automaton/State.cs b/src/Lucene.Net/Util/Automaton/State.cs index 1e49816d68..916703cca9 100644 --- a/src/Lucene.Net/Util/Automaton/State.cs +++ b/src/Lucene.Net/Util/Automaton/State.cs @@ -184,7 +184,7 @@ public virtual bool Accept /// public virtual State Step(int c) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => c >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(c >= 0); for (int i = 0; i < numTransitions; i++) { Transition t = transitionsArray[i]; diff --git a/src/Lucene.Net/Util/Automaton/Transition.cs b/src/Lucene.Net/Util/Automaton/Transition.cs index 84ed6d7f83..0d08537587 100644 --- a/src/Lucene.Net/Util/Automaton/Transition.cs +++ b/src/Lucene.Net/Util/Automaton/Transition.cs @@ -63,7 +63,7 @@ public class Transition /// Destination state. public Transition(int c, State to) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => c >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(c >= 0); min = max = c; this.to = to; } @@ -78,8 +78,8 @@ public Transition(int min, int max, State to) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => min >= 0); - Debugging.Assert(() => max >= 0); + Debugging.Assert(min >= 0); + Debugging.Assert(max >= 0); } if (max < min) { diff --git a/src/Lucene.Net/Util/Automaton/UTF32ToUTF8.cs b/src/Lucene.Net/Util/Automaton/UTF32ToUTF8.cs index 319a8482f3..a0b8867820 100644 --- a/src/Lucene.Net/Util/Automaton/UTF32ToUTF8.cs +++ b/src/Lucene.Net/Util/Automaton/UTF32ToUTF8.cs @@ -179,8 +179,8 @@ private void Build(State start, State end, UTF8Sequence startUTF8, UTF8Sequence { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => startUTF8.len > upto + 1); - Debugging.Assert(() => endUTF8.len > upto + 1); + Debugging.Assert(startUTF8.len > upto + 1); + Debugging.Assert(endUTF8.len > upto + 1); } State n = NewUTF8State(); diff --git a/src/Lucene.Net/Util/BroadWord.cs b/src/Lucene.Net/Util/BroadWord.cs index 6515c67ca0..62a9267a69 100644 --- a/src/Lucene.Net/Util/BroadWord.cs +++ b/src/Lucene.Net/Util/BroadWord.cs @@ -71,7 +71,7 @@ public static int Select(long x, int r) long b = (long)((ulong)(((long)((ulong)SmallerUpTo7_8(s, (r * L8_L)) >> 7)) * L8_L) >> 53); // & (~7L); // Step 3, side ways addition for byte number times 8 long l = r - (((long)((ulong)(s << 8) >> (int)b)) & 0xFFL); // Step 4, byte wise rank, subtract the rank with byte at b-8, or zero for b=0; - if (Debugging.AssertsEnabled) Debugging.Assert(() => 0L <= 1, () => l.ToString(CultureInfo.InvariantCulture)); + if (Debugging.AssertsEnabled) Debugging.Assert(0L <= 1, () => l.ToString(CultureInfo.InvariantCulture)); //assert l < 8 : l; //fails when bit r is not available. // Select bit l from byte (x >>> b): @@ -150,7 +150,7 @@ public static long SmallerUpto15_16(long x, long y) /// The index of the r-th 1 bit in x, or if no such bit exists, 72. public static int SelectNaive(long x, int r) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => r >= 1); + if (Debugging.AssertsEnabled) Debugging.Assert(r >= 1); int s = -1; while ((x != 0L) && (r > 0)) { diff --git a/src/Lucene.Net/Util/ByteBlockPool.cs b/src/Lucene.Net/Util/ByteBlockPool.cs index 94474258bd..093e651414 100644 --- a/src/Lucene.Net/Util/ByteBlockPool.cs +++ b/src/Lucene.Net/Util/ByteBlockPool.cs @@ -354,7 +354,7 @@ public void SetBytesRef(BytesRef term, int textStart) term.Length = (bytes[pos] & 0x7f) + ((bytes[pos + 1] & 0xff) << 7); term.Offset = pos + 2; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => term.Length >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(term.Length >= 0); } /// diff --git a/src/Lucene.Net/Util/BytesRef.cs b/src/Lucene.Net/Util/BytesRef.cs index f12ad79a30..75cfa6d6ec 100644 --- a/src/Lucene.Net/Util/BytesRef.cs +++ b/src/Lucene.Net/Util/BytesRef.cs @@ -88,7 +88,7 @@ public BytesRef(byte[] bytes, int offset, int length) this.bytes = bytes; this.Offset = offset; this.Length = length; - if (Debugging.AssertsEnabled) Debugging.Assert(IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(IsValid()); } /// @@ -140,7 +140,7 @@ public BytesRef(string text) /// unpaired surrogates or invalid UTF16 code units. public void CopyChars(ICharSequence text) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => Offset == 0); // TODO broken if offset != 0 + if (Debugging.AssertsEnabled) Debugging.Assert(Offset == 0); // TODO broken if offset != 0 UnicodeUtil.UTF16toUTF8(text, 0, text.Length, this); } @@ -151,7 +151,7 @@ public void CopyChars(ICharSequence text) /// unpaired surrogates or invalid UTF16 code units. public void CopyChars(string text) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => Offset == 0); // TODO broken if offset != 0 + if (Debugging.AssertsEnabled) Debugging.Assert(Offset == 0); // TODO broken if offset != 0 UnicodeUtil.UTF16toUTF8(text, 0, text.Length, this); } @@ -164,7 +164,7 @@ public void CopyChars(string text) /// Another , should not be null. public bool BytesEquals(BytesRef other) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => other != null); + if (Debugging.AssertsEnabled) Debugging.Assert(other != null); if (Length == other.Length) { var otherUpto = other.Offset; @@ -298,7 +298,7 @@ public void Append(BytesRef other) /// public void Grow(int newLength) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => Offset == 0); // NOTE: senseless if offset != 0 + if (Debugging.AssertsEnabled) Debugging.Assert(Offset == 0); // NOTE: senseless if offset != 0 bytes = ArrayUtil.Grow(bytes, newLength); } @@ -307,7 +307,7 @@ public void Grow(int newLength) public int CompareTo(object other) // LUCENENET specific: Implemented IComparable for FieldComparer { BytesRef br = other as BytesRef; - if (Debugging.AssertsEnabled) Debugging.Assert(() => br != null); + if (Debugging.AssertsEnabled) Debugging.Assert(br != null); return utf8SortedAsUnicodeSortOrder.Compare(this, br); } diff --git a/src/Lucene.Net/Util/BytesRefArray.cs b/src/Lucene.Net/Util/BytesRefArray.cs index e8c7db7345..323f331279 100644 --- a/src/Lucene.Net/Util/BytesRefArray.cs +++ b/src/Lucene.Net/Util/BytesRefArray.cs @@ -97,7 +97,7 @@ public BytesRef Get(BytesRef spare, int index) { int offset = offsets[index]; int length = index == lastElement - 1 ? currentOffset - offset : offsets[index + 1] - offset; - if (Debugging.AssertsEnabled) Debugging.Assert(() => spare.Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(spare.Offset == 0); spare.Grow(length); spare.Length = length; pool.ReadBytes(offset, spare.Bytes, spare.Offset, spare.Length); diff --git a/src/Lucene.Net/Util/BytesRefHash.cs b/src/Lucene.Net/Util/BytesRefHash.cs index 1e45f4ebae..b7d097522a 100644 --- a/src/Lucene.Net/Util/BytesRefHash.cs +++ b/src/Lucene.Net/Util/BytesRefHash.cs @@ -123,8 +123,8 @@ public BytesRef Get(int bytesID, BytesRef @ref) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => bytesStart != null, () => "bytesStart is null - not initialized"); - Debugging.Assert(() => bytesID < bytesStart.Length, () => "bytesID exceeds byteStart len: " + bytesStart.Length); + Debugging.Assert(bytesStart != null, () => "bytesStart is null - not initialized"); + Debugging.Assert(bytesID < bytesStart.Length, () => "bytesID exceeds byteStart len: " + bytesStart.Length); } pool.SetBytesRef(@ref, bytesStart[bytesID]); return @ref; @@ -140,7 +140,7 @@ public BytesRef Get(int bytesID, BytesRef @ref) /// public int[] Compact() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => bytesStart != null, () => "bytesStart is null - not initialized"); + if (Debugging.AssertsEnabled) Debugging.Assert(bytesStart != null, () => "bytesStart is null - not initialized"); int upto = 0; for (int i = 0; i < hashSize; i++) { @@ -155,7 +155,7 @@ public int[] Compact() } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => upto == count); + if (Debugging.AssertsEnabled) Debugging.Assert(upto == count); lastCount = count; return ids; } @@ -201,7 +201,7 @@ protected override void Swap(int i, int j) protected override int Compare(int i, int j) { int id1 = compact[i], id2 = compact[j]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.bytesStart.Length > id1 && outerInstance.bytesStart.Length > id2); + if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.bytesStart.Length > id1 && outerInstance.bytesStart.Length > id2); outerInstance.pool.SetBytesRef(outerInstance.scratch1, outerInstance.bytesStart[id1]); outerInstance.pool.SetBytesRef(scratch2, outerInstance.bytesStart[id2]); return comp.Compare(outerInstance.scratch1, scratch2); @@ -210,14 +210,14 @@ protected override int Compare(int i, int j) protected override void SetPivot(int i) { int id = compact[i]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.bytesStart.Length > id); + if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.bytesStart.Length > id); outerInstance.pool.SetBytesRef(pivot, outerInstance.bytesStart[id]); } protected override int ComparePivot(int j) { int id = compact[j]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => outerInstance.bytesStart.Length > id); + if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.bytesStart.Length > id); outerInstance.pool.SetBytesRef(scratch2, outerInstance.bytesStart[id]); return comp.Compare(pivot, scratch2); } @@ -304,7 +304,7 @@ public void Dispose() /// public int Add(BytesRef bytes) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => bytesStart != null, () => "Bytesstart is null - not initialized"); + if (Debugging.AssertsEnabled) Debugging.Assert(bytesStart != null, () => "Bytesstart is null - not initialized"); int length = bytes.Length; // final position int hashPos = FindHash(bytes); @@ -327,7 +327,7 @@ public int Add(BytesRef bytes) if (count >= bytesStart.Length) { bytesStart = bytesStartArray.Grow(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => count < bytesStart.Length + 1, () => "count: " + count + " len: " + bytesStart.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(count < bytesStart.Length + 1, () => "count: " + count + " len: " + bytesStart.Length); } e = count++; @@ -342,7 +342,7 @@ public int Add(BytesRef bytes) // 1 byte to store length buffer[bufferUpto] = (byte)length; pool.ByteUpto += length + 1; - if (Debugging.AssertsEnabled) Debugging.Assert(() => length >= 0, () => "Length must be positive: " + length); + if (Debugging.AssertsEnabled) Debugging.Assert(length >= 0, () => "Length must be positive: " + length); System.Buffer.BlockCopy(bytes.Bytes, bytes.Offset, buffer, bufferUpto + 1, length); } else @@ -353,7 +353,7 @@ public int Add(BytesRef bytes) pool.ByteUpto += length + 2; System.Buffer.BlockCopy(bytes.Bytes, bytes.Offset, buffer, bufferUpto + 2, length); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => ids[hashPos] == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(ids[hashPos] == -1); ids[hashPos] = e; if (count == hashHalfSize) @@ -380,7 +380,7 @@ public int Find(BytesRef bytes) private int FindHash(BytesRef bytes) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => bytesStart != null, () => "bytesStart is null - not initialized"); + if (Debugging.AssertsEnabled) Debugging.Assert(bytesStart != null, () => "bytesStart is null - not initialized"); int code = DoHash(bytes.Bytes, bytes.Offset, bytes.Length); @@ -412,7 +412,7 @@ private int FindHash(BytesRef bytes) /// public int AddByPoolOffset(int offset) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => bytesStart != null, () => "Bytesstart is null - not initialized"); + if (Debugging.AssertsEnabled) Debugging.Assert(bytesStart != null, () => "Bytesstart is null - not initialized"); // final position int code = offset; int hashPos = offset & hashMask; @@ -434,11 +434,11 @@ public int AddByPoolOffset(int offset) if (count >= bytesStart.Length) { bytesStart = bytesStartArray.Grow(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => count < bytesStart.Length + 1, () => "count: " + count + " len: " + bytesStart.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(count < bytesStart.Length + 1, () => "count: " + count + " len: " + bytesStart.Length); } e = count++; bytesStart[e] = offset; - if (Debugging.AssertsEnabled) Debugging.Assert(() => ids[hashPos] == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(ids[hashPos] == -1); ids[hashPos] = e; if (count == hashHalfSize) @@ -492,7 +492,7 @@ private void Rehash(int newSize, bool hashOnData) } int hashPos = code & newMask; - if (Debugging.AssertsEnabled) Debugging.Assert(() => hashPos >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(hashPos >= 0); if (newHash[hashPos] != -1) { // Conflict; use linear probe to find an open slot @@ -551,8 +551,8 @@ public int ByteStart(int bytesID) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => bytesStart != null, () => "bytesStart is null - not initialized"); - Debugging.Assert(() => bytesID >= 0 && bytesID < count, () => bytesID.ToString()); + Debugging.Assert(bytesStart != null, () => "bytesStart is null - not initialized"); + Debugging.Assert(bytesID >= 0 && bytesID < count, () => bytesID.ToString()); } return bytesStart[bytesID]; } @@ -651,7 +651,7 @@ public override int[] Clear() public override int[] Grow() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => bytesStart != null); + if (Debugging.AssertsEnabled) Debugging.Assert(bytesStart != null); return bytesStart = ArrayUtil.Grow(bytesStart, bytesStart.Length + 1); } diff --git a/src/Lucene.Net/Util/CharsRef.cs b/src/Lucene.Net/Util/CharsRef.cs index a604720498..722727da31 100644 --- a/src/Lucene.Net/Util/CharsRef.cs +++ b/src/Lucene.Net/Util/CharsRef.cs @@ -99,7 +99,7 @@ public CharsRef(char[] chars, int offset, int length) this.chars = chars; this.Offset = offset; this.Length = length; - if (Debugging.AssertsEnabled) Debugging.Assert(IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(IsValid()); } /// @@ -228,7 +228,7 @@ public void CopyChars(CharsRef other) /// public void Grow(int newLength) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(Offset == 0); if (chars.Length < newLength) { chars = ArrayUtil.Grow(chars, newLength); diff --git a/src/Lucene.Net/Util/FilterIterator.cs b/src/Lucene.Net/Util/FilterIterator.cs index 2b16464d9e..c8d9578481 100644 --- a/src/Lucene.Net/Util/FilterIterator.cs +++ b/src/Lucene.Net/Util/FilterIterator.cs @@ -47,7 +47,7 @@ public bool MoveNext() return false; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => nextIsSet); + if (Debugging.AssertsEnabled) Debugging.Assert(nextIsSet); try { current = next; diff --git a/src/Lucene.Net/Util/FixedBitSet.cs b/src/Lucene.Net/Util/FixedBitSet.cs index f0ba370b61..dc8f2700c0 100644 --- a/src/Lucene.Net/Util/FixedBitSet.cs +++ b/src/Lucene.Net/Util/FixedBitSet.cs @@ -259,7 +259,7 @@ public int Cardinality() public bool Get(int index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index + ", numBits=" + numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits, () => "index=" + index + ", numBits=" + numBits); int i = index >> 6; // div 64 // signed shift will keep a negative index and force an // array-index-out-of-bounds-exception, removing the need for an explicit check. @@ -270,7 +270,7 @@ public bool Get(int index) public void Set(int index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index + ", numBits=" + numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits, () => "index=" + index + ", numBits=" + numBits); int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -279,7 +279,7 @@ public void Set(int index) public bool GetAndSet(int index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits); int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -290,7 +290,7 @@ public bool GetAndSet(int index) public void Clear(int index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits); int wordNum = index >> 6; int bit = index & 0x03f; long bitmask = 1L << bit; @@ -299,7 +299,7 @@ public void Clear(int index) public bool GetAndClear(int index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits); int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -314,7 +314,7 @@ public bool GetAndClear(int index) /// public int NextSetBit(int index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index + ", numBits=" + numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits, () => "index=" + index + ", numBits=" + numBits); int i = index >> 6; int subIndex = index & 0x3f; // index within the word long word = bits[i] >> subIndex; // skip all the bits to the right of index @@ -342,7 +342,7 @@ public int NextSetBit(int index) /// public int PrevSetBit(int index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index + " numBits=" + numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits, () => "index=" + index + " numBits=" + numBits); int i = index >> 6; int subIndex = index & 0x3f; // index within the word long word = (bits[i] << (63 - subIndex)); // skip all the bits to the left of index @@ -405,7 +405,7 @@ public void Or(FixedBitSet other) private void Or(long[] otherArr, int otherNumWords) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => otherNumWords <= numWords, () => "numWords=" + numWords + ", otherNumWords=" + otherNumWords); + if (Debugging.AssertsEnabled) Debugging.Assert(otherNumWords <= numWords, () => "numWords=" + numWords + ", otherNumWords=" + otherNumWords); long[] thisArr = this.bits; int pos = Math.Min(numWords, otherNumWords); while (--pos >= 0) @@ -418,7 +418,7 @@ private void Or(long[] otherArr, int otherNumWords) /// this = this XOR other public void Xor(FixedBitSet other) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => other.numWords <= numWords, () => "numWords=" + numWords + ", other.numWords=" + other.numWords); + if (Debugging.AssertsEnabled) Debugging.Assert(other.numWords <= numWords, () => "numWords=" + numWords + ", other.numWords=" + other.numWords); long[] thisBits = this.bits; long[] otherBits = other.bits; int pos = Math.Min(numWords, other.numWords); @@ -579,8 +579,8 @@ public void Flip(int startIndex, int endIndex) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => startIndex >= 0 && startIndex < numBits); - Debugging.Assert(() => endIndex >= 0 && endIndex <= numBits); + Debugging.Assert(startIndex >= 0 && startIndex < numBits); + Debugging.Assert(endIndex >= 0 && endIndex <= numBits); } if (endIndex <= startIndex) { @@ -628,8 +628,8 @@ public void Set(int startIndex, int endIndex) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => startIndex >= 0 && startIndex < numBits); - Debugging.Assert(() => endIndex >= 0 && endIndex <= numBits); + Debugging.Assert(startIndex >= 0 && startIndex < numBits); + Debugging.Assert(endIndex >= 0 && endIndex <= numBits); } if (endIndex <= startIndex) { @@ -663,8 +663,8 @@ public void Clear(int startIndex, int endIndex) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => startIndex >= 0 && startIndex < numBits, () => "startIndex=" + startIndex + ", numBits=" + numBits); - Debugging.Assert(() => endIndex >= 0 && endIndex <= numBits, () => "endIndex=" + endIndex + ", numBits=" + numBits); + Debugging.Assert(startIndex >= 0 && startIndex < numBits, () => "startIndex=" + startIndex + ", numBits=" + numBits); + Debugging.Assert(endIndex >= 0 && endIndex <= numBits, () => "endIndex=" + endIndex + ", numBits=" + numBits); } if (endIndex <= startIndex) { diff --git a/src/Lucene.Net/Util/Fst/Builder.cs b/src/Lucene.Net/Util/Fst/Builder.cs index 37b14d3f42..e363666c7f 100644 --- a/src/Lucene.Net/Util/Fst/Builder.cs +++ b/src/Lucene.Net/Util/Fst/Builder.cs @@ -206,7 +206,7 @@ private CompiledNode CompileNode(UnCompiledNode nodeIn, int tailLength) { node = fst.AddNode(nodeIn); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => node != -2); + if (Debugging.AssertsEnabled) Debugging.Assert(node != -2); nodeIn.Clear(); @@ -372,8 +372,8 @@ public virtual void Add(Int32sRef input, T output) if (Debugging.AssertsEnabled) { - Debugging.Assert(() => lastInput.Length == 0 || input.CompareTo(lastInput) >= 0, () => "inputs are added out of order lastInput=" + lastInput + " vs input=" + input); - Debugging.Assert(() => ValidOutput(output)); + Debugging.Assert(lastInput.Length == 0 || input.CompareTo(lastInput) >= 0, () => "inputs are added out of order lastInput=" + lastInput + " vs input=" + input); + Debugging.Assert(ValidOutput(output)); } //System.out.println("\nadd: " + input); @@ -444,7 +444,7 @@ public virtual void Add(Int32sRef input, T output) UnCompiledNode parentNode = frontier[idx - 1]; T lastOutput = parentNode.GetLastOutput(input.Int32s[input.Offset + idx - 1]); - if (Debugging.AssertsEnabled) Debugging.Assert(() => ValidOutput(lastOutput)); + if (Debugging.AssertsEnabled) Debugging.Assert(ValidOutput(lastOutput)); T commonOutputPrefix; T wordSuffix; @@ -452,9 +452,9 @@ public virtual void Add(Int32sRef input, T output) if (!lastOutput.Equals(NO_OUTPUT)) { commonOutputPrefix = fst.Outputs.Common(output, lastOutput); - if (Debugging.AssertsEnabled) Debugging.Assert(() => ValidOutput(commonOutputPrefix)); + if (Debugging.AssertsEnabled) Debugging.Assert(ValidOutput(commonOutputPrefix)); wordSuffix = fst.Outputs.Subtract(lastOutput, commonOutputPrefix); - if (Debugging.AssertsEnabled) Debugging.Assert(() => ValidOutput(wordSuffix)); + if (Debugging.AssertsEnabled) Debugging.Assert(ValidOutput(wordSuffix)); parentNode.SetLastOutput(input.Int32s[input.Offset + idx - 1], commonOutputPrefix); node.PrependOutput(wordSuffix); } @@ -464,7 +464,7 @@ public virtual void Add(Int32sRef input, T output) } output = fst.Outputs.Subtract(output, commonOutputPrefix); - if (Debugging.AssertsEnabled) Debugging.Assert(() => ValidOutput(output)); + if (Debugging.AssertsEnabled) Debugging.Assert(ValidOutput(output)); } if (lastInput.Length == input.Length && prefixLenPlus1 == 1 + input.Length) @@ -662,18 +662,18 @@ public S GetLastOutput(int labelToMatch) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => NumArcs > 0); - Debugging.Assert(() => Arcs[NumArcs - 1].Label == labelToMatch); + Debugging.Assert(NumArcs > 0); + Debugging.Assert(Arcs[NumArcs - 1].Label == labelToMatch); } return Arcs[NumArcs - 1].Output; } public void AddArc(int label, INode target) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => label >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(label >= 0); if (NumArcs != 0) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => label > Arcs[NumArcs - 1].Label, () => "arc[-1].Label=" + Arcs[NumArcs - 1].Label + " new label=" + label + " numArcs=" + NumArcs); + if (Debugging.AssertsEnabled) Debugging.Assert(label > Arcs[NumArcs - 1].Label, () => "arc[-1].Label=" + Arcs[NumArcs - 1].Label + " new label=" + label + " numArcs=" + NumArcs); } if (NumArcs == Arcs.Length) { @@ -694,9 +694,9 @@ public void AddArc(int label, INode target) public void ReplaceLast(int labelToMatch, INode target, S nextFinalOutput, bool isFinal) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => NumArcs > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(NumArcs > 0); Arc arc = Arcs[NumArcs - 1]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.Label == labelToMatch, () => "arc.Label=" + arc.Label + " vs " + labelToMatch); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.Label == labelToMatch, () => "arc.Label=" + arc.Label + " vs " + labelToMatch); arc.Target = target; //assert target.Node != -2; arc.NextFinalOutput = nextFinalOutput; @@ -707,9 +707,9 @@ public void DeleteLast(int label, INode target) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => NumArcs > 0); - Debugging.Assert(() => label == Arcs[NumArcs - 1].Label); - Debugging.Assert(() => target == Arcs[NumArcs - 1].Target); + Debugging.Assert(NumArcs > 0); + Debugging.Assert(label == Arcs[NumArcs - 1].Label); + Debugging.Assert(target == Arcs[NumArcs - 1].Target); } NumArcs--; } @@ -718,29 +718,29 @@ public void SetLastOutput(int labelToMatch, S newOutput) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => Owner.ValidOutput(newOutput)); - Debugging.Assert(() => NumArcs > 0); + Debugging.Assert(Owner.ValidOutput(newOutput)); + Debugging.Assert(NumArcs > 0); } Arc arc = Arcs[NumArcs - 1]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.Label == labelToMatch); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.Label == labelToMatch); arc.Output = newOutput; } // pushes an output prefix forward onto all arcs public void PrependOutput(S outputPrefix) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => Owner.ValidOutput(outputPrefix)); + if (Debugging.AssertsEnabled) Debugging.Assert(Owner.ValidOutput(outputPrefix)); for (int arcIdx = 0; arcIdx < NumArcs; arcIdx++) { Arcs[arcIdx].Output = Owner.Fst.Outputs.Add(outputPrefix, Arcs[arcIdx].Output); - if (Debugging.AssertsEnabled) Debugging.Assert(() => Owner.ValidOutput(Arcs[arcIdx].Output)); + if (Debugging.AssertsEnabled) Debugging.Assert(Owner.ValidOutput(Arcs[arcIdx].Output)); } if (IsFinal) { Output = Owner.Fst.Outputs.Add(outputPrefix, Output); - if (Debugging.AssertsEnabled) Debugging.Assert(() => Owner.ValidOutput(Output)); + if (Debugging.AssertsEnabled) Debugging.Assert(Owner.ValidOutput(Output)); } } } diff --git a/src/Lucene.Net/Util/Fst/ByteSequenceOutputs.cs b/src/Lucene.Net/Util/Fst/ByteSequenceOutputs.cs index e203edff5b..373055c4a5 100644 --- a/src/Lucene.Net/Util/Fst/ByteSequenceOutputs.cs +++ b/src/Lucene.Net/Util/Fst/ByteSequenceOutputs.cs @@ -44,8 +44,8 @@ public override BytesRef Common(BytesRef output1, BytesRef output2) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => output1 != null); - Debugging.Assert(() => output2 != null); + Debugging.Assert(output1 != null); + Debugging.Assert(output2 != null); } int pos1 = output1.Offset; @@ -86,8 +86,8 @@ public override BytesRef Subtract(BytesRef output, BytesRef inc) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => output != null); - Debugging.Assert(() => inc != null); + Debugging.Assert(output != null); + Debugging.Assert(inc != null); } if (inc == NO_OUTPUT) { @@ -103,8 +103,8 @@ public override BytesRef Subtract(BytesRef output, BytesRef inc) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => inc.Length < output.Length, () => "inc.length=" + inc.Length + " vs output.length=" + output.Length); - Debugging.Assert(() => inc.Length > 0); + Debugging.Assert(inc.Length < output.Length, () => "inc.length=" + inc.Length + " vs output.length=" + output.Length); + Debugging.Assert(inc.Length > 0); } return new BytesRef(output.Bytes, output.Offset + inc.Length, output.Length - inc.Length); } @@ -114,8 +114,8 @@ public override BytesRef Add(BytesRef prefix, BytesRef output) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => prefix != null); - Debugging.Assert(() => output != null); + Debugging.Assert(prefix != null); + Debugging.Assert(output != null); } if (prefix == NO_OUTPUT) { @@ -127,8 +127,8 @@ public override BytesRef Add(BytesRef prefix, BytesRef output) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => prefix.Length > 0); - if (Debugging.AssertsEnabled) Debugging.Assert(() => output.Length > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(prefix.Length > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(output.Length > 0); BytesRef result = new BytesRef(prefix.Length + output.Length); Array.Copy(prefix.Bytes, prefix.Offset, result.Bytes, 0, prefix.Length); Array.Copy(output.Bytes, output.Offset, result.Bytes, prefix.Length, output.Length); @@ -139,7 +139,7 @@ public override BytesRef Add(BytesRef prefix, BytesRef output) public override void Write(BytesRef prefix, DataOutput @out) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => prefix != null); + if (Debugging.AssertsEnabled) Debugging.Assert(prefix != null); @out.WriteVInt32(prefix.Length); @out.WriteBytes(prefix.Bytes, prefix.Offset, prefix.Length); } diff --git a/src/Lucene.Net/Util/Fst/BytesRefFSTEnum.cs b/src/Lucene.Net/Util/Fst/BytesRefFSTEnum.cs index ad1813fac4..d28ba82779 100644 --- a/src/Lucene.Net/Util/Fst/BytesRefFSTEnum.cs +++ b/src/Lucene.Net/Util/Fst/BytesRefFSTEnum.cs @@ -86,7 +86,7 @@ public BytesRefFSTEnum.InputOutput SeekExact(BytesRef target) m_targetLength = target.Length; if (base.DoSeekExact()) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => m_upto == 1 + target.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(m_upto == 1 + target.Length); return SetResult(); } else diff --git a/src/Lucene.Net/Util/Fst/BytesStore.cs b/src/Lucene.Net/Util/Fst/BytesStore.cs index 2a8813aaa8..95cf11f03f 100644 --- a/src/Lucene.Net/Util/Fst/BytesStore.cs +++ b/src/Lucene.Net/Util/Fst/BytesStore.cs @@ -131,7 +131,7 @@ public override void WriteBytes(byte[] b, int offset, int len) internal virtual void WriteBytes(long dest, byte[] b, int offset, int len) { //System.out.println(" BS.writeBytes dest=" + dest + " offset=" + offset + " len=" + len); - if (Debugging.AssertsEnabled) Debugging.Assert(() => dest + len <= Position, () => "dest=" + dest + " pos=" + Position + " len=" + len); + if (Debugging.AssertsEnabled) Debugging.Assert(dest + len <= Position, () => "dest=" + dest + " pos=" + Position + " len=" + len); // Note: weird: must go "backwards" because copyBytes // calls us with overlapping src/dest. If we @@ -198,7 +198,7 @@ internal virtual void WriteBytes(long dest, byte[] b, int offset, int len) public virtual void CopyBytes(long src, long dest, int len) { //System.out.println("BS.copyBytes src=" + src + " dest=" + dest + " len=" + len); - if (Debugging.AssertsEnabled) Debugging.Assert(() => src < dest); + if (Debugging.AssertsEnabled) Debugging.Assert(src < dest); // Note: weird: must go "backwards" because copyBytes // calls us with overlapping src/dest. If we @@ -289,8 +289,8 @@ public virtual void Reverse(long srcPos, long destPos) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => srcPos < destPos); - Debugging.Assert(() => destPos < Position); + Debugging.Assert(srcPos < destPos); + Debugging.Assert(destPos < Position); } //System.out.println("reverse src=" + srcPos + " dest=" + destPos); @@ -360,8 +360,8 @@ public virtual void Truncate(long newLen) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => newLen <= Position); - Debugging.Assert(() => newLen >= 0); + Debugging.Assert(newLen <= Position); + Debugging.Assert(newLen >= 0); } int blockIndex = (int)(newLen >> blockBits); nextWrite = (int)(newLen & blockMask); @@ -379,7 +379,7 @@ public virtual void Truncate(long newLen) { current = blocks[blockIndex]; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => newLen == Position); + if (Debugging.AssertsEnabled) Debugging.Assert(newLen == Position); } public virtual void Finish() @@ -475,7 +475,7 @@ public override long Position nextBuffer = bufferIndex + 1; current = outerInstance.blocks[bufferIndex]; nextRead = (int)(value & outerInstance.blockMask); - if (Debugging.AssertsEnabled) Debugging.Assert(() => this.Position == value, () => "pos=" + value + " Position=" + this.Position); + if (Debugging.AssertsEnabled) Debugging.Assert(this.Position == value, () => "pos=" + value + " Position=" + this.Position); } } @@ -548,7 +548,7 @@ public override long Position nextBuffer = bufferIndex - 1; current = outerInstance.blocks[bufferIndex]; nextRead = (int)(value & outerInstance.blockMask); - if (Debugging.AssertsEnabled) Debugging.Assert(() => this.Position == value, () => "value=" + value + " this.Position=" + this.Position); + if (Debugging.AssertsEnabled) Debugging.Assert(this.Position == value, () => "value=" + value + " this.Position=" + this.Position); } } diff --git a/src/Lucene.Net/Util/Fst/CharSequenceOutputs.cs b/src/Lucene.Net/Util/Fst/CharSequenceOutputs.cs index ceedf894ba..f3685a0aca 100644 --- a/src/Lucene.Net/Util/Fst/CharSequenceOutputs.cs +++ b/src/Lucene.Net/Util/Fst/CharSequenceOutputs.cs @@ -44,8 +44,8 @@ public override CharsRef Common(CharsRef output1, CharsRef output2) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => output1 != null); - Debugging.Assert(() => output2 != null); + Debugging.Assert(output1 != null); + Debugging.Assert(output2 != null); } int pos1 = output1.Offset; @@ -86,8 +86,8 @@ public override CharsRef Subtract(CharsRef output, CharsRef inc) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => output != null); - Debugging.Assert(() => inc != null); + Debugging.Assert(output != null); + Debugging.Assert(inc != null); } if (inc == NO_OUTPUT) { @@ -103,8 +103,8 @@ public override CharsRef Subtract(CharsRef output, CharsRef inc) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => inc.Length < output.Length, () => "inc.Length=" + inc.Length + " vs output.Length=" + output.Length); - Debugging.Assert(() => inc.Length > 0); + Debugging.Assert(inc.Length < output.Length, () => "inc.Length=" + inc.Length + " vs output.Length=" + output.Length); + Debugging.Assert(inc.Length > 0); } return new CharsRef(output.Chars, output.Offset + inc.Length, output.Length - inc.Length); } @@ -114,8 +114,8 @@ public override CharsRef Add(CharsRef prefix, CharsRef output) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => prefix != null); - Debugging.Assert(() => output != null); + Debugging.Assert(prefix != null); + Debugging.Assert(output != null); } if (prefix == NO_OUTPUT) { @@ -129,8 +129,8 @@ public override CharsRef Add(CharsRef prefix, CharsRef output) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => prefix.Length > 0); - Debugging.Assert(() => output.Length > 0); + Debugging.Assert(prefix.Length > 0); + Debugging.Assert(output.Length > 0); } var result = new CharsRef(prefix.Length + output.Length); Array.Copy(prefix.Chars, prefix.Offset, result.Chars, 0, prefix.Length); @@ -142,7 +142,7 @@ public override CharsRef Add(CharsRef prefix, CharsRef output) public override void Write(CharsRef prefix, DataOutput @out) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => prefix != null); + if (Debugging.AssertsEnabled) Debugging.Assert(prefix != null); @out.WriteVInt32(prefix.Length); // TODO: maybe UTF8? for (int idx = 0; idx < prefix.Length; idx++) diff --git a/src/Lucene.Net/Util/Fst/FST.cs b/src/Lucene.Net/Util/Fst/FST.cs index bee159ecf5..d0a73ea2ad 100644 --- a/src/Lucene.Net/Util/Fst/FST.cs +++ b/src/Lucene.Net/Util/Fst/FST.cs @@ -378,8 +378,8 @@ private void CacheRootArcs() if (Debugging.AssertsEnabled) { - Debugging.Assert(() => SetAssertingRootArcs(cachedRootArcs)); - Debugging.Assert(AssertRootArcs); + Debugging.Assert(SetAssertingRootArcs(cachedRootArcs)); + Debugging.Assert(AssertRootArcs()); } } @@ -393,7 +393,7 @@ public void ReadRootArcs(FST.Arc[] arcs) ReadFirstRealTargetArc(arc.Target, arc, @in); while (true) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.Label != FST.END_LABEL); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.Label != FST.END_LABEL); if (arc.Label < cachedRootArcs.Length) { arcs[arc.Label] = (new FST.Arc()).CopyFrom(arc); @@ -420,37 +420,37 @@ private bool SetAssertingRootArcs(FST.Arc[] arcs) // Only called from assert private bool AssertRootArcs() { - Debugging.Assert(() => cachedRootArcs != null); - Debugging.Assert(() => assertingCachedRootArcs != null); + Debugging.Assert(cachedRootArcs != null); + Debugging.Assert(assertingCachedRootArcs != null); for (int i = 0; i < cachedRootArcs.Length; i++) { FST.Arc root = cachedRootArcs[i]; FST.Arc asserting = assertingCachedRootArcs[i]; if (root != null) { - Debugging.Assert(() => root.ArcIdx == asserting.ArcIdx); - Debugging.Assert(() => root.BytesPerArc == asserting.BytesPerArc); - Debugging.Assert(() => root.Flags == asserting.Flags); - Debugging.Assert(() => root.Label == asserting.Label); - Debugging.Assert(() => root.NextArc == asserting.NextArc); + Debugging.Assert(root.ArcIdx == asserting.ArcIdx); + Debugging.Assert(root.BytesPerArc == asserting.BytesPerArc); + Debugging.Assert(root.Flags == asserting.Flags); + Debugging.Assert(root.Label == asserting.Label); + Debugging.Assert(root.NextArc == asserting.NextArc); // LUCENENET NOTE: In .NET, IEnumerable will not equal another identical IEnumerable // because it checks for reference equality, not that the list contents // are the same. StructuralEqualityComparer.Default.Equals() will make that check. - Debugging.Assert(() => typeof(T).IsValueType + Debugging.Assert(typeof(T).IsValueType ? JCG.EqualityComparer.Default.Equals(root.NextFinalOutput, asserting.NextFinalOutput) : StructuralEqualityComparer.Default.Equals(root.NextFinalOutput, asserting.NextFinalOutput)); - Debugging.Assert(() => root.Node == asserting.Node); - Debugging.Assert(() => root.NumArcs == asserting.NumArcs); - Debugging.Assert(() => typeof(T).IsValueType + Debugging.Assert(root.Node == asserting.Node); + Debugging.Assert(root.NumArcs == asserting.NumArcs); + Debugging.Assert(typeof(T).IsValueType ? JCG.EqualityComparer.Default.Equals(root.Output, asserting.Output) : StructuralEqualityComparer.Default.Equals(root.Output, asserting.Output)); - Debugging.Assert(() => root.PosArcsStart == asserting.PosArcsStart); - Debugging.Assert(() => root.Target == asserting.Target); + Debugging.Assert(root.PosArcsStart == asserting.PosArcsStart); + Debugging.Assert(root.Target == asserting.Target); } else { - Debugging.Assert(() => root == null && asserting == null); + Debugging.Assert(root == null && asserting == null); } } return true; @@ -584,15 +584,15 @@ public void Save(FileInfo file) // LUCENENET NOTE: static Read() was moved into the FST class private void WriteLabel(DataOutput @out, int v) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => v >= 0, () => "v=" + v); + if (Debugging.AssertsEnabled) Debugging.Assert(v >= 0, () => "v=" + v); if (inputType == FST.INPUT_TYPE.BYTE1) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => v <= 255, () => "v=" + v); + if (Debugging.AssertsEnabled) Debugging.Assert(v <= 255, () => "v=" + v); @out.WriteByte((byte)(sbyte)v); } else if (inputType == FST.INPUT_TYPE.BYTE2) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => v <= 65535, () => "v=" + v); + if (Debugging.AssertsEnabled) Debugging.Assert(v <= 65535, () => "v=" + v); @out.WriteInt16((short)v); } else @@ -696,7 +696,7 @@ internal long AddNode(Builder.UnCompiledNode nodeIn) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.NextFinalOutput.Equals(NO_OUTPUT)); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.NextFinalOutput.Equals(NO_OUTPUT)); } bool targetHasArcs = target.Node > 0; @@ -735,7 +735,7 @@ internal long AddNode(Builder.UnCompiledNode nodeIn) if (targetHasArcs && (flags & FST.BIT_TARGET_NEXT) == 0) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => target.Node > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(target.Node > 0); //System.out.println(" write target"); bytes.WriteVInt64(target.Node); } @@ -775,7 +775,7 @@ internal long AddNode(Builder.UnCompiledNode nodeIn) if (doFixedArray) { const int MAX_HEADER_SIZE = 11; // header(byte) + numArcs(vint) + numBytes(vint) - if (Debugging.AssertsEnabled) Debugging.Assert(() => maxBytesPerArc > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(maxBytesPerArc > 0); // 2nd pass just "expands" all arcs to take up a fixed // byte size @@ -795,7 +795,7 @@ internal long AddNode(Builder.UnCompiledNode nodeIn) // expand the arcs in place, backwards long srcPos = bytes.Position; long destPos = fixedArrayStart + nodeIn.NumArcs * maxBytesPerArc; - if (Debugging.AssertsEnabled) Debugging.Assert(() => destPos >= srcPos); + if (Debugging.AssertsEnabled) Debugging.Assert(destPos >= srcPos); if (destPos > srcPos) { bytes.SkipBytes((int)(destPos - srcPos)); @@ -807,7 +807,7 @@ internal long AddNode(Builder.UnCompiledNode nodeIn) if (srcPos != destPos) { //System.out.println(" copy len=" + bytesPerArc[arcIdx]); - if (Debugging.AssertsEnabled) Debugging.Assert(() => destPos > srcPos, () => "destPos=" + destPos + " srcPos=" + srcPos + " arcIdx=" + arcIdx + " maxBytesPerArc=" + maxBytesPerArc + " bytesPerArc[arcIdx]=" + bytesPerArc[arcIdx] + " nodeIn.numArcs=" + nodeIn.NumArcs); + if (Debugging.AssertsEnabled) Debugging.Assert(destPos > srcPos, () => "destPos=" + destPos + " srcPos=" + srcPos + " arcIdx=" + arcIdx + " maxBytesPerArc=" + maxBytesPerArc + " bytesPerArc[arcIdx]=" + bytesPerArc[arcIdx] + " nodeIn.numArcs=" + nodeIn.NumArcs); bytes.CopyBytes(srcPos, destPos, bytesPerArc[arcIdx]); } } @@ -893,7 +893,7 @@ public FST.Arc ReadLastTargetArc(FST.Arc follow, FST.Arc arc, FST.Bytes if (!TargetHasArcs(follow)) { //System.out.println(" end node"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => follow.IsFinal); + if (Debugging.AssertsEnabled) Debugging.Assert(follow.IsFinal); arc.Label = FST.END_LABEL; arc.Target = FST.FINAL_END_NODE; arc.Output = follow.NextFinalOutput; @@ -960,7 +960,7 @@ public FST.Arc ReadLastTargetArc(FST.Arc follow, FST.Arc arc, FST.Bytes arc.NextArc = @in.Position; } ReadNextRealArc(arc, @in); - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.IsLast); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.IsLast); return arc; } } @@ -1094,7 +1094,7 @@ public FST.Arc ReadNextArc(FST.Arc arc, FST.BytesReader @in) /// public int ReadNextArcLabel(FST.Arc arc, FST.BytesReader @in) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !arc.IsLast); + if (Debugging.AssertsEnabled) Debugging.Assert(!arc.IsLast); if (arc.Label == FST.END_LABEL) { @@ -1159,7 +1159,7 @@ public FST.Arc ReadNextRealArc(FST.Arc arc, FST.BytesReader @in) { // arcs are at fixed entries arc.ArcIdx++; - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.ArcIdx < arc.NumArcs); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.ArcIdx < arc.NumArcs); @in.Position = arc.PosArcsStart; @in.SkipBytes(arc.ArcIdx * arc.BytesPerArc); } @@ -1226,7 +1226,7 @@ public FST.Arc ReadNextRealArc(FST.Arc arc, FST.BytesReader @in) else { arc.Target = arc.Node - 1; - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.Target > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.Target > 0); } } else @@ -1302,7 +1302,7 @@ public FST.Arc FindTargetArc(int labelToMatch, FST.Arc follow, FST.Arc { // LUCENE-5152: detect tricky cases where caller // modified previously returned cached root-arcs: - if (Debugging.AssertsEnabled) Debugging.Assert(AssertRootArcs); + if (Debugging.AssertsEnabled) Debugging.Assert(AssertRootArcs()); FST.Arc result = cachedRootArcs[labelToMatch]; if (result == null) { @@ -1829,7 +1829,7 @@ internal FST Pack(int minInCountDeref, int maxDerefNodes, float acceptableOve } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.NextFinalOutput.Equals(NO_OUTPUT)); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.NextFinalOutput.Equals(NO_OUTPUT)); } if (!TargetHasArcs(arc)) { @@ -1872,7 +1872,7 @@ internal FST Pack(int minInCountDeref, int maxDerefNodes, float acceptableOve absPtr = 0; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => flags != FST.ARCS_AS_FIXED_ARRAY); + if (Debugging.AssertsEnabled) Debugging.Assert(flags != FST.ARCS_AS_FIXED_ARRAY); writer.WriteByte((byte)(sbyte)flags); fst.WriteLabel(writer, arc.Label); @@ -1994,7 +1994,7 @@ internal FST Pack(int minInCountDeref, int maxDerefNodes, float acceptableOve // order) so nodes should only point forward to // other nodes because we only produce acyclic FSTs // w/ nodes only pointing "forwards": - if (Debugging.AssertsEnabled) Debugging.Assert(() => !negDelta); + if (Debugging.AssertsEnabled) Debugging.Assert(!negDelta); //System.out.println("TOT wasted=" + totWasted); // Converged! break; @@ -2025,9 +2025,9 @@ internal FST Pack(int minInCountDeref, int maxDerefNodes, float acceptableOve if (Debugging.AssertsEnabled) { - Debugging.Assert(() => fst.nodeCount == nodeCount, () => "fst.nodeCount=" + fst.nodeCount + " nodeCount=" + nodeCount); - Debugging.Assert(() => fst.arcCount == arcCount); - Debugging.Assert(() => fst.arcWithOutputCount == arcWithOutputCount, () => "fst.arcWithOutputCount=" + fst.arcWithOutputCount + " arcWithOutputCount=" + arcWithOutputCount); + Debugging.Assert(fst.nodeCount == nodeCount, () => "fst.nodeCount=" + fst.nodeCount + " nodeCount=" + nodeCount); + Debugging.Assert(fst.arcCount == arcCount); + Debugging.Assert(fst.arcWithOutputCount == arcWithOutputCount, () => "fst.arcWithOutputCount=" + fst.arcWithOutputCount + " arcWithOutputCount=" + arcWithOutputCount); } fst.bytes.Finish(); @@ -2343,7 +2343,7 @@ public NodeQueue(int topN) protected internal override bool LessThan(NodeAndInCount a, NodeAndInCount b) { int cmp = a.CompareTo(b); - if (Debugging.AssertsEnabled) Debugging.Assert(() => cmp != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(cmp != 0); return cmp < 0; } } diff --git a/src/Lucene.Net/Util/Fst/FSTEnum.cs b/src/Lucene.Net/Util/Fst/FSTEnum.cs index ae43d3bf57..cecd46e00b 100644 --- a/src/Lucene.Net/Util/Fst/FSTEnum.cs +++ b/src/Lucene.Net/Util/Fst/FSTEnum.cs @@ -201,8 +201,8 @@ protected virtual void DoSeekCeil() m_fst.ReadNextRealArc(arc, @in); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => arc.ArcIdx == mid); - Debugging.Assert(() => arc.Label == targetLabel, () => "arc.label=" + arc.Label + " vs targetLabel=" + targetLabel + " mid=" + mid); + Debugging.Assert(arc.ArcIdx == mid); + Debugging.Assert(arc.Label == targetLabel, () => "arc.label=" + arc.Label + " vs targetLabel=" + targetLabel + " mid=" + mid); } m_output[m_upto] = m_fst.Outputs.Add(m_output[m_upto - 1], arc.Output); if (targetLabel == FST.END_LABEL) @@ -220,7 +220,7 @@ protected virtual void DoSeekCeil() // Dead end arc.ArcIdx = arc.NumArcs - 2; m_fst.ReadNextRealArc(arc, @in); - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.IsLast); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.IsLast); // Dead end (target is after the last arc); // rollback to last fork then push m_upto--; @@ -245,7 +245,7 @@ protected virtual void DoSeekCeil() { arc.ArcIdx = (low > high ? low : high) - 1; m_fst.ReadNextRealArc(arc, @in); - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.Label > targetLabel); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.Label > targetLabel); PushFirst(); return; } @@ -375,8 +375,8 @@ protected virtual void DoSeekFloor() m_fst.ReadNextRealArc(arc, @in); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => arc.ArcIdx == mid); - Debugging.Assert(() => arc.Label == targetLabel, () => "arc.label=" + arc.Label + " vs targetLabel=" + targetLabel + " mid=" + mid); + Debugging.Assert(arc.ArcIdx == mid); + Debugging.Assert(arc.Label == targetLabel, () => "arc.label=" + arc.Label + " vs targetLabel=" + targetLabel + " mid=" + mid); } m_output[m_upto] = m_fst.Outputs.Add(m_output[m_upto - 1], arc.Output); if (targetLabel == FST.END_LABEL) @@ -434,8 +434,8 @@ protected virtual void DoSeekFloor() bool check = arc.IsLast || m_fst.ReadNextArcLabel(arc, @in) > targetLabel; if (Debugging.AssertsEnabled) { - Debugging.Assert(() => check); - Debugging.Assert(() => arc.Label < targetLabel, () => "arc.label=" + arc.Label + " vs targetLabel=" + targetLabel); + Debugging.Assert(check); + Debugging.Assert(arc.Label < targetLabel, () => "arc.label=" + arc.Label + " vs targetLabel=" + targetLabel); } PushLast(); return; @@ -583,7 +583,7 @@ private void Incr() private void PushFirst() { FST.Arc arc = m_arcs[m_upto]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc != null); + if (Debugging.AssertsEnabled) Debugging.Assert(arc != null); while (true) { @@ -610,7 +610,7 @@ private void PushFirst() private void PushLast() { FST.Arc arc = m_arcs[m_upto]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc != null); + if (Debugging.AssertsEnabled) Debugging.Assert(arc != null); while (true) { diff --git a/src/Lucene.Net/Util/Fst/IntSequenceOutputs.cs b/src/Lucene.Net/Util/Fst/IntSequenceOutputs.cs index 2486563f02..59ad8605d3 100644 --- a/src/Lucene.Net/Util/Fst/IntSequenceOutputs.cs +++ b/src/Lucene.Net/Util/Fst/IntSequenceOutputs.cs @@ -46,8 +46,8 @@ public override Int32sRef Common(Int32sRef output1, Int32sRef output2) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => output1 != null); - Debugging.Assert(() => output2 != null); + Debugging.Assert(output1 != null); + Debugging.Assert(output2 != null); } int pos1 = output1.Offset; @@ -88,8 +88,8 @@ public override Int32sRef Subtract(Int32sRef output, Int32sRef inc) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => output != null); - Debugging.Assert(() => inc != null); + Debugging.Assert(output != null); + Debugging.Assert(inc != null); } if (inc == NO_OUTPUT) { @@ -105,8 +105,8 @@ public override Int32sRef Subtract(Int32sRef output, Int32sRef inc) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => inc.Length < output.Length, () => "inc.length=" + inc.Length + " vs output.length=" + output.Length); - Debugging.Assert(() => inc.Length > 0); + Debugging.Assert(inc.Length < output.Length, () => "inc.length=" + inc.Length + " vs output.length=" + output.Length); + Debugging.Assert(inc.Length > 0); } return new Int32sRef(output.Int32s, output.Offset + inc.Length, output.Length - inc.Length); } @@ -116,8 +116,8 @@ public override Int32sRef Add(Int32sRef prefix, Int32sRef output) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => prefix != null); - Debugging.Assert(() => output != null); + Debugging.Assert(prefix != null); + Debugging.Assert(output != null); } if (prefix == NO_OUTPUT) { @@ -131,8 +131,8 @@ public override Int32sRef Add(Int32sRef prefix, Int32sRef output) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => prefix.Length > 0); - Debugging.Assert(() => output.Length > 0); + Debugging.Assert(prefix.Length > 0); + Debugging.Assert(output.Length > 0); } Int32sRef result = new Int32sRef(prefix.Length + output.Length); Array.Copy(prefix.Int32s, prefix.Offset, result.Int32s, 0, prefix.Length); @@ -144,7 +144,7 @@ public override Int32sRef Add(Int32sRef prefix, Int32sRef output) public override void Write(Int32sRef prefix, DataOutput @out) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => prefix != null); + if (Debugging.AssertsEnabled) Debugging.Assert(prefix != null); @out.WriteVInt32(prefix.Length); for (int idx = 0; idx < prefix.Length; idx++) { diff --git a/src/Lucene.Net/Util/Fst/IntsRefFSTEnum.cs b/src/Lucene.Net/Util/Fst/IntsRefFSTEnum.cs index 11fb3da316..206831bd33 100644 --- a/src/Lucene.Net/Util/Fst/IntsRefFSTEnum.cs +++ b/src/Lucene.Net/Util/Fst/IntsRefFSTEnum.cs @@ -88,7 +88,7 @@ public Int32sRefFSTEnum.InputOutput SeekExact(Int32sRef target) m_targetLength = target.Length; if (base.DoSeekExact()) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => m_upto == 1 + target.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(m_upto == 1 + target.Length); return SetResult(); } else diff --git a/src/Lucene.Net/Util/Fst/NoOutputs.cs b/src/Lucene.Net/Util/Fst/NoOutputs.cs index f96864461f..2a871d76cb 100644 --- a/src/Lucene.Net/Util/Fst/NoOutputs.cs +++ b/src/Lucene.Net/Util/Fst/NoOutputs.cs @@ -66,8 +66,8 @@ public override object Common(object output1, object output2) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => output1 == NO_OUTPUT); - Debugging.Assert(() => output2 == NO_OUTPUT); + Debugging.Assert(output1 == NO_OUTPUT); + Debugging.Assert(output2 == NO_OUTPUT); } return NO_OUTPUT; } @@ -76,8 +76,8 @@ public override object Subtract(object output, object inc) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => output == NO_OUTPUT); - Debugging.Assert(() => inc == NO_OUTPUT); + Debugging.Assert(output == NO_OUTPUT); + Debugging.Assert(inc == NO_OUTPUT); } return NO_OUTPUT; } @@ -86,8 +86,8 @@ public override object Add(object prefix, object output) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => prefix == NO_OUTPUT, () => "got " + prefix); - Debugging.Assert(() => output == NO_OUTPUT); + Debugging.Assert(prefix == NO_OUTPUT, () => "got " + prefix); + Debugging.Assert(output == NO_OUTPUT); } return NO_OUTPUT; } @@ -97,8 +97,8 @@ public override object Merge(object first, object second) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => first == NO_OUTPUT); - Debugging.Assert(() => second == NO_OUTPUT); + Debugging.Assert(first == NO_OUTPUT); + Debugging.Assert(second == NO_OUTPUT); } return NO_OUTPUT; } diff --git a/src/Lucene.Net/Util/Fst/NodeHash.cs b/src/Lucene.Net/Util/Fst/NodeHash.cs index 8b05c88278..c54685067d 100644 --- a/src/Lucene.Net/Util/Fst/NodeHash.cs +++ b/src/Lucene.Net/Util/Fst/NodeHash.cs @@ -162,7 +162,7 @@ public long Add(Builder.UnCompiledNode nodeIn) // freeze & add long node = fst.AddNode(nodeIn); //System.out.println(" now freeze node=" + node); - if (Debugging.AssertsEnabled) Debugging.Assert(() => Hash(node) == h, () => "frozenHash=" + Hash(node) + " vs h=" + h); + if (Debugging.AssertsEnabled) Debugging.Assert(Hash(node) == h, () => "frozenHash=" + Hash(node) + " vs h=" + h); count++; table.Set(pos, node); // Rehash at 2/3 occupancy: diff --git a/src/Lucene.Net/Util/Fst/PairOutputs.cs b/src/Lucene.Net/Util/Fst/PairOutputs.cs index ca85eaa279..664dd88aa3 100644 --- a/src/Lucene.Net/Util/Fst/PairOutputs.cs +++ b/src/Lucene.Net/Util/Fst/PairOutputs.cs @@ -86,7 +86,7 @@ public virtual Pair NewPair(A a, B b) else { var p = new Pair(a, b); - if (Debugging.AssertsEnabled) Debugging.Assert(() => Valid(p)); + if (Debugging.AssertsEnabled) Debugging.Assert(Valid(p)); return p; } } @@ -128,8 +128,8 @@ public override Pair Common(Pair pair1, Pair pair2) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => Valid(pair1)); - Debugging.Assert(() => Valid(pair2)); + Debugging.Assert(Valid(pair1)); + Debugging.Assert(Valid(pair2)); } return NewPair(outputs1.Common(pair1.Output1, pair2.Output1), outputs2.Common(pair1.Output2, pair2.Output2)); } @@ -138,8 +138,8 @@ public override Pair Subtract(Pair output, Pair inc) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => Valid(output)); - Debugging.Assert(() => Valid(inc)); + Debugging.Assert(Valid(output)); + Debugging.Assert(Valid(inc)); } return NewPair(outputs1.Subtract(output.Output1, inc.Output1), outputs2.Subtract(output.Output2, inc.Output2)); } @@ -148,15 +148,15 @@ public override Pair Add(Pair prefix, Pair output) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => Valid(prefix)); - Debugging.Assert(() => Valid(output)); + Debugging.Assert(Valid(prefix)); + Debugging.Assert(Valid(output)); } return NewPair(outputs1.Add(prefix.Output1, output.Output1), outputs2.Add(prefix.Output2, output.Output2)); } public override void Write(Pair output, DataOutput writer) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => Valid(output)); + if (Debugging.AssertsEnabled) Debugging.Assert(Valid(output)); outputs1.Write(output.Output1, writer); outputs2.Write(output.Output2, writer); } @@ -172,7 +172,7 @@ public override Pair Read(DataInput @in) public override string OutputToString(Pair output) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => Valid(output)); + if (Debugging.AssertsEnabled) Debugging.Assert(Valid(output)); return ""; } diff --git a/src/Lucene.Net/Util/Fst/PositiveIntOutputs.cs b/src/Lucene.Net/Util/Fst/PositiveIntOutputs.cs index 0825c97a40..caeb9ca557 100644 --- a/src/Lucene.Net/Util/Fst/PositiveIntOutputs.cs +++ b/src/Lucene.Net/Util/Fst/PositiveIntOutputs.cs @@ -47,8 +47,8 @@ private PositiveInt32Outputs() { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => Valid(output1)); - Debugging.Assert(() => Valid(output2)); + Debugging.Assert(Valid(output1)); + Debugging.Assert(Valid(output2)); } if (output1 == NO_OUTPUT || output2 == NO_OUTPUT) { @@ -58,8 +58,8 @@ private PositiveInt32Outputs() { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => output1 > 0); - Debugging.Assert(() => output2 > 0); + Debugging.Assert(output1 > 0); + Debugging.Assert(output2 > 0); } return Math.Min(output1.Value, output2.Value); } @@ -69,9 +69,9 @@ private PositiveInt32Outputs() { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => Valid(output)); - Debugging.Assert(() => Valid(inc)); - Debugging.Assert(() => output >= inc); + Debugging.Assert(Valid(output)); + Debugging.Assert(Valid(inc)); + Debugging.Assert(output >= inc); } if (inc == NO_OUTPUT) @@ -92,8 +92,8 @@ private PositiveInt32Outputs() { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => Valid(prefix)); - Debugging.Assert(() => Valid(output)); + Debugging.Assert(Valid(prefix)); + Debugging.Assert(Valid(output)); } if (prefix == NO_OUTPUT) { @@ -111,7 +111,7 @@ private PositiveInt32Outputs() public override void Write(long? output, DataOutput @out) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => Valid(output)); + if (Debugging.AssertsEnabled) Debugging.Assert(Valid(output)); @out.WriteVInt64(output.Value); } @@ -130,8 +130,8 @@ public override void Write(long? output, DataOutput @out) private bool Valid(long? o) { - Debugging.Assert(() => o != null, () => "PositiveIntOutput precondition fail"); - Debugging.Assert(() => o == NO_OUTPUT || o > 0, () => "o=" + o); + Debugging.Assert(o != null, () => "PositiveIntOutput precondition fail"); + Debugging.Assert(o == NO_OUTPUT || o > 0, () => "o=" + o); return true; } diff --git a/src/Lucene.Net/Util/Fst/Util.cs b/src/Lucene.Net/Util/Fst/Util.cs index ed59751da8..e440f0ee17 100644 --- a/src/Lucene.Net/Util/Fst/Util.cs +++ b/src/Lucene.Net/Util/Fst/Util.cs @@ -74,7 +74,7 @@ public static T Get(FST fst, Int32sRef input) /// public static T Get(FST fst, BytesRef input) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => fst.InputType == FST.INPUT_TYPE.BYTE1); + if (Debugging.AssertsEnabled) Debugging.Assert(fst.InputType == FST.INPUT_TYPE.BYTE1); var fstReader = fst.GetBytesReader(); @@ -385,7 +385,7 @@ public TopNSearcher(FST fst, int topN, int maxQueueDepth, IComparer compar /// protected virtual void AddIfCompetitive(FSTPath path) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => queue != null); + if (Debugging.AssertsEnabled) Debugging.Assert(queue != null); T cost = fst.Outputs.Add(path.Cost, path.Arc.Output); //System.out.println(" addIfCompetitive queue.size()=" + queue.size() + " path=" + path + " + label=" + path.arc.label); @@ -408,7 +408,7 @@ protected virtual void AddIfCompetitive(FSTPath path) path.Input.Length--; // We should never see dups: - if (Debugging.AssertsEnabled) Debugging.Assert(() => cmp != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(cmp != 0); if (cmp < 0) { @@ -596,7 +596,7 @@ public virtual TopResults Search() fst.ReadNextArc(path.Arc, fstReader); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => foundZero); + if (Debugging.AssertsEnabled) Debugging.Assert(foundZero); if (queue != null) { @@ -917,7 +917,7 @@ public static void ToDot(FST fst, TextWriter @out, bool sameRank, bool lab arcColor = "black"; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => arc.Label != FST.END_LABEL); + if (Debugging.AssertsEnabled) Debugging.Assert(arc.Label != FST.END_LABEL); @out.Write(" " + node + " -> " + arc.Target + " [label=\"" + PrintableLabel(arc.Label) + outs + "\"" + (arc.IsFinal ? " style=\"bold\"" : "") + " color=\"" + arcColor + "\"]\n"); // Break the loop if we're on the last arc of this state. @@ -1068,7 +1068,7 @@ public static BytesRef ToBytesRef(Int32sRef input, BytesRef scratch) { int value = input.Int32s[i + input.Offset]; // NOTE: we allow -128 to 255 - if (Debugging.AssertsEnabled) Debugging.Assert(() => value >= sbyte.MinValue && value <= 255, () => "value " + value + " doesn't fit into byte"); + if (Debugging.AssertsEnabled) Debugging.Assert(value >= sbyte.MinValue && value <= 255, () => "value " + value + " doesn't fit into byte"); scratch.Bytes[i] = (byte)value; } scratch.Length = input.Length; diff --git a/src/Lucene.Net/Util/IndexableBinaryStringTools.cs b/src/Lucene.Net/Util/IndexableBinaryStringTools.cs index 51a877d676..19d876a354 100644 --- a/src/Lucene.Net/Util/IndexableBinaryStringTools.cs +++ b/src/Lucene.Net/Util/IndexableBinaryStringTools.cs @@ -137,7 +137,7 @@ public static void Encode(byte[] inputArray, int inputOffset, int inputLength, c [CLSCompliant(false)] public static void Encode(sbyte[] inputArray, int inputOffset, int inputLength, char[] outputArray, int outputOffset, int outputLength) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => outputLength == GetEncodedLength(inputArray, inputOffset, inputLength)); + if (Debugging.AssertsEnabled) Debugging.Assert(outputLength == GetEncodedLength(inputArray, inputOffset, inputLength)); if (inputLength > 0) { int inputByteNum = inputOffset; @@ -217,7 +217,7 @@ public static void Decode(char[] inputArray, int inputOffset, int inputLength, b [CLSCompliant(false)] public static void Decode(char[] inputArray, int inputOffset, int inputLength, sbyte[] outputArray, int outputOffset, int outputLength) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => outputLength == GetDecodedLength(inputArray, inputOffset, inputLength)); + if (Debugging.AssertsEnabled) Debugging.Assert(outputLength == GetDecodedLength(inputArray, inputOffset, inputLength)); int numInputChars = inputLength - 1; int numOutputBytes = outputLength; diff --git a/src/Lucene.Net/Util/InfoStream.cs b/src/Lucene.Net/Util/InfoStream.cs index 020a072463..3899476e1c 100644 --- a/src/Lucene.Net/Util/InfoStream.cs +++ b/src/Lucene.Net/Util/InfoStream.cs @@ -42,7 +42,7 @@ private sealed class NoOutput : InfoStream { public override void Message(string component, string message) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => false, () => "message() should not be called when isEnabled returns false"); + if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "message() should not be called when isEnabled returns false"); } public override bool IsEnabled(string component) diff --git a/src/Lucene.Net/Util/IntBlockPool.cs b/src/Lucene.Net/Util/IntBlockPool.cs index a1a05dfde7..b26582b6dc 100644 --- a/src/Lucene.Net/Util/IntBlockPool.cs +++ b/src/Lucene.Net/Util/IntBlockPool.cs @@ -243,7 +243,7 @@ private int NewSlice(int size) if (Int32Upto > INT32_BLOCK_SIZE - size) { NextBuffer(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => AssertSliceBuffer(buffer)); + if (Debugging.AssertsEnabled) Debugging.Assert(AssertSliceBuffer(buffer)); } int upto = Int32Upto; @@ -292,7 +292,7 @@ private int AllocSlice(int[] slice, int sliceOffset) if (Int32Upto > INT32_BLOCK_SIZE - newSize) { NextBuffer(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => AssertSliceBuffer(buffer)); + if (Debugging.AssertsEnabled) Debugging.Assert(AssertSliceBuffer(buffer)); } int newUpto = Int32Upto; @@ -337,7 +337,7 @@ public virtual void Reset(int sliceOffset) public virtual void WriteInt32(int value) { int[] ints = pool.buffers[offset >> INT32_BLOCK_SHIFT]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => ints != null); + if (Debugging.AssertsEnabled) Debugging.Assert(ints != null); int relativeOffset = offset & INT32_BLOCK_MASK; if (ints[relativeOffset] != 0) { @@ -427,7 +427,7 @@ public bool IsEndOfSlice { get { - if (Debugging.AssertsEnabled) Debugging.Assert(() => upto + bufferOffset <= end); + if (Debugging.AssertsEnabled) Debugging.Assert(upto + bufferOffset <= end); return upto + bufferOffset == end; } } @@ -442,8 +442,8 @@ public int ReadInt32() { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => !IsEndOfSlice); - Debugging.Assert(() => upto <= limit); + Debugging.Assert(!IsEndOfSlice); + Debugging.Assert(upto <= limit); } if (upto == limit) { @@ -468,7 +468,7 @@ private void NextSlice() if (nextIndex + newSize >= end) { // We are advancing to the final slice - if (Debugging.AssertsEnabled) Debugging.Assert(() => end - nextIndex > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(end - nextIndex > 0); limit = end - bufferOffset; } else diff --git a/src/Lucene.Net/Util/IntsRef.cs b/src/Lucene.Net/Util/IntsRef.cs index 0ee2174bb2..f5effcf575 100644 --- a/src/Lucene.Net/Util/IntsRef.cs +++ b/src/Lucene.Net/Util/IntsRef.cs @@ -101,7 +101,7 @@ public Int32sRef(int[] ints, int offset, int length) this.ints = ints; this.Offset = offset; this.Length = length; - if (Debugging.AssertsEnabled) Debugging.Assert(IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(IsValid()); } /// @@ -222,7 +222,7 @@ public void CopyInt32s(Int32sRef other) /// public void Grow(int newLength) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(Offset == 0); if (ints.Length < newLength) { ints = ArrayUtil.Grow(ints, newLength); diff --git a/src/Lucene.Net/Util/LongBitSet.cs b/src/Lucene.Net/Util/LongBitSet.cs index 8d2d8508a2..a2ecf93d00 100644 --- a/src/Lucene.Net/Util/LongBitSet.cs +++ b/src/Lucene.Net/Util/LongBitSet.cs @@ -122,7 +122,7 @@ public long Cardinality() public bool Get(long index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits, () => "index=" + index); int i = (int)(index >> 6); // div 64 // signed shift will keep a negative index and force an // array-index-out-of-bounds-exception, removing the need for an explicit check. @@ -133,7 +133,7 @@ public bool Get(long index) public void Set(long index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index + " numBits=" + numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits, () => "index=" + index + " numBits=" + numBits); int wordNum = (int)(index >> 6); // div 64 int bit = (int)(index & 0x3f); // mod 64 long bitmask = 1L << bit; @@ -142,7 +142,7 @@ public void Set(long index) public bool GetAndSet(long index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits); int wordNum = (int)(index >> 6); // div 64 int bit = (int)(index & 0x3f); // mod 64 long bitmask = 1L << bit; @@ -153,7 +153,7 @@ public bool GetAndSet(long index) public void Clear(long index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits); int wordNum = (int)(index >> 6); int bit = (int)(index & 0x03f); long bitmask = 1L << bit; @@ -162,7 +162,7 @@ public void Clear(long index) public bool GetAndClear(long index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits); int wordNum = (int)(index >> 6); // div 64 int bit = (int)(index & 0x3f); // mod 64 long bitmask = 1L << bit; @@ -177,7 +177,7 @@ public bool GetAndClear(long index) /// public long NextSetBit(long index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits); int i = (int)(index >> 6); int subIndex = (int)(index & 0x3f); // index within the word long word = bits[i] >> subIndex; // skip all the bits to the right of index @@ -205,7 +205,7 @@ public long NextSetBit(long index) /// public long PrevSetBit(long index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits, () => "index=" + index + " numBits=" + numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits, () => "index=" + index + " numBits=" + numBits); int i = (int)(index >> 6); int subIndex = (int)(index & 0x3f); // index within the word long word = (bits[i] << (63 - subIndex)); // skip all the bits to the left of index @@ -231,7 +231,7 @@ public long PrevSetBit(long index) /// this = this OR other public void Or(Int64BitSet other) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => other.numWords <= numWords, () => "numWords=" + numWords + ", other.numWords=" + other.numWords); + if (Debugging.AssertsEnabled) Debugging.Assert(other.numWords <= numWords, () => "numWords=" + numWords + ", other.numWords=" + other.numWords); int pos = Math.Min(numWords, other.numWords); while (--pos >= 0) { @@ -243,7 +243,7 @@ public void Or(Int64BitSet other) /// this = this XOR other public void Xor(Int64BitSet other) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => other.numWords <= numWords, () => "numWords=" + numWords + ", other.numWords=" + other.numWords); + if (Debugging.AssertsEnabled) Debugging.Assert(other.numWords <= numWords, () => "numWords=" + numWords + ", other.numWords=" + other.numWords); int pos = Math.Min(numWords, other.numWords); while (--pos >= 0) { @@ -305,8 +305,8 @@ public void Flip(long startIndex, long endIndex) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => startIndex >= 0 && startIndex < numBits); - Debugging.Assert(() => endIndex >= 0 && endIndex <= numBits); + Debugging.Assert(startIndex >= 0 && startIndex < numBits); + Debugging.Assert(endIndex >= 0 && endIndex <= numBits); } if (endIndex <= startIndex) { @@ -353,8 +353,8 @@ public void Set(long startIndex, long endIndex) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => startIndex >= 0 && startIndex < numBits); - Debugging.Assert(() => endIndex >= 0 && endIndex <= numBits); + Debugging.Assert(startIndex >= 0 && startIndex < numBits); + Debugging.Assert(endIndex >= 0 && endIndex <= numBits); } if (endIndex <= startIndex) { @@ -387,8 +387,8 @@ public void Clear(long startIndex, long endIndex) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => startIndex >= 0 && startIndex < numBits); - Debugging.Assert(() => endIndex >= 0 && endIndex <= numBits); + Debugging.Assert(startIndex >= 0 && startIndex < numBits); + Debugging.Assert(endIndex >= 0 && endIndex <= numBits); } if (endIndex <= startIndex) { diff --git a/src/Lucene.Net/Util/LongsRef.cs b/src/Lucene.Net/Util/LongsRef.cs index efc1896703..2776788ecf 100644 --- a/src/Lucene.Net/Util/LongsRef.cs +++ b/src/Lucene.Net/Util/LongsRef.cs @@ -101,7 +101,7 @@ public Int64sRef(long[] longs, int offset, int length) this.longs = longs; this.Offset = offset; this.Length = length; - if (Debugging.AssertsEnabled) Debugging.Assert(IsValid); + if (Debugging.AssertsEnabled) Debugging.Assert(IsValid()); } /// @@ -222,7 +222,7 @@ public void CopyInt64s(Int64sRef other) /// public void Grow(int newLength) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => Offset == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(Offset == 0); if (longs.Length < newLength) { longs = ArrayUtil.Grow(longs, newLength); diff --git a/src/Lucene.Net/Util/MergedIterator.cs b/src/Lucene.Net/Util/MergedIterator.cs index 5ff2d09871..f5530bc909 100644 --- a/src/Lucene.Net/Util/MergedIterator.cs +++ b/src/Lucene.Net/Util/MergedIterator.cs @@ -113,7 +113,7 @@ public void Dispose() private void PullTop() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => numTop == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(numTop == 0); top[numTop++] = queue.Pop(); if (removeDuplicates) { diff --git a/src/Lucene.Net/Util/OfflineSorter.cs b/src/Lucene.Net/Util/OfflineSorter.cs index c9ec9f38e3..0df01e6eb3 100644 --- a/src/Lucene.Net/Util/OfflineSorter.cs +++ b/src/Lucene.Net/Util/OfflineSorter.cs @@ -369,7 +369,7 @@ private FileInfo SortPartition(/*int len*/) // LUCENENET NOTE: made private, sin IBytesRefIterator iter = buffer.GetIterator(comparer); while ((spare = iter.Next()) != null) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => spare.Length <= ushort.MaxValue); + if (Debugging.AssertsEnabled) Debugging.Assert(spare.Length <= ushort.MaxValue); @out.Write(spare); } } @@ -534,7 +534,7 @@ private static BinaryWriterDataOutput NewBinaryWriterDataOutput(FileInfo file) /// public virtual void Write(BytesRef @ref) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => @ref != null); + if (Debugging.AssertsEnabled) Debugging.Assert(@ref != null); Write(@ref.Bytes, @ref.Offset, @ref.Length); } @@ -556,9 +556,9 @@ public virtual void Write(byte[] bytes, int off, int len) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => bytes != null); - Debugging.Assert(() => off >= 0 && off + len <= bytes.Length); - Debugging.Assert(() => len >= 0); + Debugging.Assert(bytes != null); + Debugging.Assert(off >= 0 && off + len <= bytes.Length); + Debugging.Assert(len >= 0); } os.WriteInt16((short)len); os.WriteBytes(bytes, off, len); // LUCENENET NOTE: We call WriteBytes, since there is no Write() on Lucene's version of DataOutput @@ -654,7 +654,7 @@ public virtual byte[] Read() } #pragma warning restore CA1031 // Do not catch general exception types - if (Debugging.AssertsEnabled) Debugging.Assert(() => length >= 0, () => "Sanity: sequence length < 0: " + length); + if (Debugging.AssertsEnabled) Debugging.Assert(length >= 0, () => "Sanity: sequence length < 0: " + length); byte[] result = new byte[length]; inputStream.ReadBytes(result, 0, length); return result; diff --git a/src/Lucene.Net/Util/OpenBitSet.cs b/src/Lucene.Net/Util/OpenBitSet.cs index e0d65c69ec..e31fed41d8 100644 --- a/src/Lucene.Net/Util/OpenBitSet.cs +++ b/src/Lucene.Net/Util/OpenBitSet.cs @@ -200,7 +200,7 @@ public virtual bool Get(int index) /// public virtual bool FastGet(int index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits); int i = index >> 6; // div 64 // signed shift will keep a negative index and force an // array-index-out-of-bounds-exception, removing the need for an explicit check. @@ -230,7 +230,7 @@ public virtual bool Get(long index) /// public virtual bool FastGet(long index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits); int i = (int)(index >> 6); // div 64 int bit = (int)index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -255,7 +255,7 @@ public boolean get1(int index) { /// public virtual int GetBit(int index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits); int i = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 return ((int)((long)((ulong)m_bits[i] >> bit))) & 0x01; @@ -286,7 +286,7 @@ public virtual void Set(long index) /// public virtual void FastSet(int index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits); int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -299,7 +299,7 @@ public virtual void FastSet(int index) /// public virtual void FastSet(long index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits); int wordNum = (int)(index >> 6); int bit = (int)index & 0x3f; long bitmask = 1L << bit; @@ -354,7 +354,7 @@ protected virtual int ExpandingWordNum(long index) /// public virtual void FastClear(int index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits); int wordNum = index >> 6; int bit = index & 0x03f; long bitmask = 1L << bit; @@ -374,7 +374,7 @@ public virtual void FastClear(int index) /// public virtual void FastClear(long index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits); int wordNum = (int)(index >> 6); // div 64 int bit = (int)index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -493,7 +493,7 @@ public virtual void Clear(long startIndex, long endIndex) /// public virtual bool GetAndSet(int index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits); int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -508,7 +508,7 @@ public virtual bool GetAndSet(int index) /// public virtual bool GetAndSet(long index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits); int wordNum = (int)(index >> 6); // div 64 int bit = (int)index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -523,7 +523,7 @@ public virtual bool GetAndSet(long index) /// public virtual void FastFlip(int index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits); int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -536,7 +536,7 @@ public virtual void FastFlip(int index) /// public virtual void FastFlip(long index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits); int wordNum = (int)(index >> 6); // div 64 int bit = (int)index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -559,7 +559,7 @@ public virtual void Flip(long index) /// public virtual bool FlipAndGet(int index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits); int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -573,7 +573,7 @@ public virtual bool FlipAndGet(int index) /// public virtual bool FlipAndGet(long index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < numBits); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits); int wordNum = (int)(index >> 6); // div 64 int bit = (int)index & 0x3f; // mod 64 long bitmask = 1L << bit; @@ -913,7 +913,7 @@ public virtual void Union(OpenBitSet other) // https://github.com/apache/lucenenet/pull/154 int oldLen = m_wlen; EnsureCapacityWords(newLen); - if (Debugging.AssertsEnabled) Debugging.Assert(() => (numBits = Math.Max(other.numBits, numBits)) >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert((numBits = Math.Max(other.numBits, numBits)) >= 0); long[] thisArr = this.m_bits; long[] otherArr = other.m_bits; @@ -953,7 +953,7 @@ public virtual void Xor(OpenBitSet other) // https://github.com/apache/lucenenet/pull/154 int oldLen = m_wlen; EnsureCapacityWords(newLen); - if (Debugging.AssertsEnabled) Debugging.Assert(() => (numBits = Math.Max(other.numBits, numBits)) >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert((numBits = Math.Max(other.numBits, numBits)) >= 0); long[] thisArr = this.m_bits; long[] otherArr = other.m_bits; @@ -1011,7 +1011,7 @@ public virtual void EnsureCapacityWords(int numWords) { m_bits = ArrayUtil.Grow(m_bits, numWords); m_wlen = numWords; - if (Debugging.AssertsEnabled) Debugging.Assert(() => (this.numBits = Math.Max(this.numBits, numWords << 6)) >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert((this.numBits = Math.Max(this.numBits, numWords << 6)) >= 0); } /// @@ -1023,7 +1023,7 @@ public virtual void EnsureCapacity(long numBits) EnsureCapacityWords(Bits2words(numBits)); // ensureCapacityWords sets numBits to a multiple of 64, but we want to set // it to exactly what the app asked. - if (Debugging.AssertsEnabled) Debugging.Assert(() => (this.numBits = Math.Max(this.numBits, numBits)) >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert((this.numBits = Math.Max(this.numBits, numBits)) >= 0); } /// diff --git a/src/Lucene.Net/Util/PForDeltaDocIdSet.cs b/src/Lucene.Net/Util/PForDeltaDocIdSet.cs index 6f5479a981..23150f34d2 100644 --- a/src/Lucene.Net/Util/PForDeltaDocIdSet.cs +++ b/src/Lucene.Net/Util/PForDeltaDocIdSet.cs @@ -63,7 +63,7 @@ static PForDeltaDocIdSet() for (int i = 1; i < ITERATIONS.Length; ++i) { DECODERS[i] = PackedInt32s.GetDecoder(PackedInt32s.Format.PACKED, PackedInt32s.VERSION_CURRENT, i); - if (Debugging.AssertsEnabled) Debugging.Assert(() => BLOCK_SIZE % DECODERS[i].ByteValueCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(BLOCK_SIZE % DECODERS[i].ByteValueCount == 0); ITERATIONS[i] = BLOCK_SIZE / DECODERS[i].ByteValueCount; BYTE_BLOCK_COUNTS[i] = ITERATIONS[i] * DECODERS[i].ByteBlockCount; maxByteBLockCount = Math.Max(maxByteBLockCount, DECODERS[i].ByteBlockCount); @@ -212,7 +212,7 @@ internal virtual int ComputeOptimalNumberOfBits() } } this.bitsPerException = actualBitsPerValue - bitsPerValue; - if (Debugging.AssertsEnabled) Debugging.Assert(() => bufferSize < BLOCK_SIZE || numExceptions < bufferSize); + if (Debugging.AssertsEnabled) Debugging.Assert(bufferSize < BLOCK_SIZE || numExceptions < bufferSize); return blockSize; } @@ -231,7 +231,7 @@ internal virtual void PforEncode() buffer[i] &= mask; } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => ex == numExceptions); + if (Debugging.AssertsEnabled) Debugging.Assert(ex == numExceptions); Arrays.Fill(exceptions, numExceptions, BLOCK_SIZE, 0); } @@ -245,7 +245,7 @@ internal virtual void PforEncode() if (numExceptions > 0) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerException > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerException > 0); data.WriteByte((byte)(sbyte)numExceptions); data.WriteByte((byte)(sbyte)bitsPerException); PackedInt32s.IEncoder encoder = PackedInt32s.GetEncoder(PackedInt32s.Format.PACKED, PackedInt32s.VERSION_CURRENT, bitsPerException); @@ -316,18 +316,18 @@ internal virtual void EncodeBlock() ++numBlocks; - if (Debugging.AssertsEnabled) Debugging.Assert(() => data.Length - originalLength == blockSize, () => (data.Length - originalLength) + " <> " + blockSize); + if (Debugging.AssertsEnabled) Debugging.Assert(data.Length - originalLength == blockSize, () => (data.Length - originalLength) + " <> " + blockSize); } /// /// Build the instance. public virtual PForDeltaDocIdSet Build() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => bufferSize < BLOCK_SIZE); + if (Debugging.AssertsEnabled) Debugging.Assert(bufferSize < BLOCK_SIZE); if (cardinality == 0) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => previousDoc == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(previousDoc == -1); return EMPTY; } @@ -469,7 +469,7 @@ internal virtual void PforDecompress(byte token) internal virtual void UnaryDecompress(byte token) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => (token & HAS_EXCEPTIONS) == 0); + if (Debugging.AssertsEnabled) Debugging.Assert((token & HAS_EXCEPTIONS) == 0); int docID = this.docID; for (int i = 0; i < BLOCK_SIZE; ) { @@ -505,7 +505,7 @@ internal virtual void DecompressBlock() internal virtual void SkipBlock() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => i == BLOCK_SIZE); + if (Debugging.AssertsEnabled) Debugging.Assert(i == BLOCK_SIZE); DecompressBlock(); docID = nextDocs[BLOCK_SIZE - 1]; } @@ -527,8 +527,8 @@ internal virtual int ForwardBinarySearch(int target) int lo = Math.Max(blockIdx / indexInterval, 0), hi = lo + 1; if (Debugging.AssertsEnabled) { - Debugging.Assert(() => blockIdx == -1 || docIDs.Get(lo) <= docID); - Debugging.Assert(() => lo + 1 == docIDs.Count || docIDs.Get(lo + 1) > docID); + Debugging.Assert(blockIdx == -1 || docIDs.Get(lo) <= docID); + Debugging.Assert(lo + 1 == docIDs.Count || docIDs.Get(lo + 1) > docID); } while (true) { @@ -562,15 +562,15 @@ internal virtual int ForwardBinarySearch(int target) } if (Debugging.AssertsEnabled) { - Debugging.Assert(() => docIDs.Get(hi) <= target); - Debugging.Assert(() => hi + 1 == docIDs.Count || docIDs.Get(hi + 1) > target); + Debugging.Assert(docIDs.Get(hi) <= target); + Debugging.Assert(hi + 1 == docIDs.Count || docIDs.Get(hi + 1) > target); } return hi; } public override int Advance(int target) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => target > docID); + if (Debugging.AssertsEnabled) Debugging.Assert(target > docID); if (nextDocs[BLOCK_SIZE - 1] < target) { // not in the next block, now use the index diff --git a/src/Lucene.Net/Util/Packed/AbstractAppendingLongBuffer.cs b/src/Lucene.Net/Util/Packed/AbstractAppendingLongBuffer.cs index e0c22dac68..d2f993a814 100644 --- a/src/Lucene.Net/Util/Packed/AbstractAppendingLongBuffer.cs +++ b/src/Lucene.Net/Util/Packed/AbstractAppendingLongBuffer.cs @@ -110,7 +110,7 @@ internal virtual void Grow(int newBlockCount) public override sealed long Get(long index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < Count); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < Count); int block = (int)(index >> pageShift); int element = (int)(index & pageMask); return Get(block, element); @@ -125,9 +125,9 @@ public int Get(long index, long[] arr, int off, int len) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < Count); - Debugging.Assert(() => off + len <= arr.Length); + Debugging.Assert(len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(index >= 0 && index < Count); + Debugging.Assert(off + len <= arr.Length); } int block = (int)(index >> pageShift); @@ -196,7 +196,7 @@ internal void FillValues() /// Return the next long in the buffer. public long Next() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => HasNext); + if (Debugging.AssertsEnabled) Debugging.Assert(HasNext); long result = currentValues[pOff++]; if (pOff == currentCount) { diff --git a/src/Lucene.Net/Util/Packed/AbstractBlockPackedWriter.cs b/src/Lucene.Net/Util/Packed/AbstractBlockPackedWriter.cs index 264ea28d64..388bc4de39 100644 --- a/src/Lucene.Net/Util/Packed/AbstractBlockPackedWriter.cs +++ b/src/Lucene.Net/Util/Packed/AbstractBlockPackedWriter.cs @@ -72,7 +72,7 @@ protected AbstractBlockPackedWriter(DataOutput @out, int blockSize) // LUCENENET /// Reset this writer to wrap . The block size remains unchanged. public virtual void Reset(DataOutput @out) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => @out != null); + if (Debugging.AssertsEnabled) Debugging.Assert(@out != null); this.m_out = @out; m_off = 0; m_ord = 0L; diff --git a/src/Lucene.Net/Util/Packed/AbstractPagedMutable.cs b/src/Lucene.Net/Util/Packed/AbstractPagedMutable.cs index f1df1bfa56..874ff9a6c9 100644 --- a/src/Lucene.Net/Util/Packed/AbstractPagedMutable.cs +++ b/src/Lucene.Net/Util/Packed/AbstractPagedMutable.cs @@ -86,7 +86,7 @@ internal int IndexInPage(long index) public override sealed long Get(long index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < size); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < size); int pageIndex = PageIndex(index); int indexInPage = IndexInPage(index); return subMutables[pageIndex].Get(indexInPage); @@ -96,7 +96,7 @@ public override sealed long Get(long index) /// Set value at . public void Set(long index, long value) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < size); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < size); int pageIndex = PageIndex(index); int indexInPage = IndexInPage(index); subMutables[pageIndex].Set(indexInPage, value); @@ -150,7 +150,7 @@ public T Resize(long newSize) /// Similar to . public T Grow(long minSize) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => minSize >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(minSize >= 0); if (minSize <= Count) { T result = (T)this; diff --git a/src/Lucene.Net/Util/Packed/BlockPackedReader.cs b/src/Lucene.Net/Util/Packed/BlockPackedReader.cs index d4750a57c4..f32c4ca9df 100644 --- a/src/Lucene.Net/Util/Packed/BlockPackedReader.cs +++ b/src/Lucene.Net/Util/Packed/BlockPackedReader.cs @@ -83,7 +83,7 @@ public BlockPackedReader(IndexInput @in, int packedIntsVersion, int blockSize, l public override long Get(long index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < valueCount); int block = (int)((long)((ulong)index >> blockShift)); int idx = (int)(index & blockMask); return (minValues == null ? 0 : minValues[block]) + subReaders[block].Get(idx); diff --git a/src/Lucene.Net/Util/Packed/BlockPackedReaderIterator.cs b/src/Lucene.Net/Util/Packed/BlockPackedReaderIterator.cs index c2e25bca95..2e3803bd87 100644 --- a/src/Lucene.Net/Util/Packed/BlockPackedReaderIterator.cs +++ b/src/Lucene.Net/Util/Packed/BlockPackedReaderIterator.cs @@ -129,7 +129,7 @@ public BlockPackedReaderIterator(DataInput @in, int packedIntsVersion, int block public void Reset(DataInput @in, long valueCount) { this.@in = @in; - if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(valueCount >= 0); this.valueCount = valueCount; off = blockSize; ord = 0; @@ -139,7 +139,7 @@ public void Reset(DataInput @in, long valueCount) /// Skip exactly values. public void Skip(long count) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => count >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(count >= 0); if (ord + count > valueCount || ord + count < 0) { throw new EndOfStreamException(); @@ -156,7 +156,7 @@ public void Skip(long count) } // 2. skip as many blocks as necessary - if (Debugging.AssertsEnabled) Debugging.Assert(() => off == blockSize); + if (Debugging.AssertsEnabled) Debugging.Assert(off == blockSize); while (count >= blockSize) { int token = @in.ReadByte() & 0xFF; @@ -180,7 +180,7 @@ public void Skip(long count) } // 3. skip last values - if (Debugging.AssertsEnabled) Debugging.Assert(() => count < blockSize); + if (Debugging.AssertsEnabled) Debugging.Assert(count < blockSize); Refill(); ord += count; off += (int)count; @@ -229,7 +229,7 @@ public long Next() /// Read between 1 and values. public Int64sRef Next(int count) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => count > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(count > 0); if (ord == valueCount) { throw new EndOfStreamException(); @@ -259,7 +259,7 @@ private void Refill() throw new IOException("Corrupted"); } long minValue = minEquals0 ? 0L : ZigZagDecode(1L + ReadVInt64(@in)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => minEquals0 || minValue != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(minEquals0 || minValue != 0); if (bitsPerValue == 0) { diff --git a/src/Lucene.Net/Util/Packed/BlockPackedWriter.cs b/src/Lucene.Net/Util/Packed/BlockPackedWriter.cs index 30a13be05a..c6f579a2cf 100644 --- a/src/Lucene.Net/Util/Packed/BlockPackedWriter.cs +++ b/src/Lucene.Net/Util/Packed/BlockPackedWriter.cs @@ -71,7 +71,7 @@ public BlockPackedWriter(DataOutput @out, int blockSize) [MethodImpl(MethodImplOptions.NoInlining)] protected override void Flush() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => m_off > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(m_off > 0); long min = long.MaxValue, max = long.MinValue; for (int i = 0; i < m_off; ++i) { diff --git a/src/Lucene.Net/Util/Packed/BulkOperation.cs b/src/Lucene.Net/Util/Packed/BulkOperation.cs index a49d896efa..63281d4b33 100644 --- a/src/Lucene.Net/Util/Packed/BulkOperation.cs +++ b/src/Lucene.Net/Util/Packed/BulkOperation.cs @@ -164,12 +164,12 @@ public static BulkOperation Of(PackedInt32s.Format format, int bitsPerValue) { if (format == PackedInt32s.Format.PACKED) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => packedBulkOps[bitsPerValue - 1] != null); + if (Debugging.AssertsEnabled) Debugging.Assert(packedBulkOps[bitsPerValue - 1] != null); return packedBulkOps[bitsPerValue - 1]; } else if (format == PackedInt32s.Format.PACKED_SINGLE_BLOCK) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => packedSingleBlockBulkOps[bitsPerValue - 1] != null); + if (Debugging.AssertsEnabled) Debugging.Assert(packedSingleBlockBulkOps[bitsPerValue - 1] != null); return packedSingleBlockBulkOps[bitsPerValue - 1]; } else diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked.cs index f39c87e140..a887472197 100644 --- a/src/Lucene.Net/Util/Packed/BulkOperationPacked.cs +++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked.cs @@ -36,7 +36,7 @@ internal class BulkOperationPacked : BulkOperation public BulkOperationPacked(int bitsPerValue) { this.bitsPerValue = bitsPerValue; - if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64); + if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue > 0 && bitsPerValue <= 64); int blocks = bitsPerValue; while ((blocks & 1) == 0) { @@ -62,7 +62,7 @@ public BulkOperationPacked(int bitsPerValue) this.mask = (1L << bitsPerValue) - 1; } this.intMask = (int)mask; - if (Debugging.AssertsEnabled) Debugging.Assert(() => longValueCount * bitsPerValue == 64 * longBlockCount); + if (Debugging.AssertsEnabled) Debugging.Assert(longValueCount * bitsPerValue == 64 * longBlockCount); } /// @@ -125,7 +125,7 @@ public override void Decode(byte[] blocks, int blocksOffset, long[] values, int nextValue = (bytes & ((1L << bits) - 1)) << bitsLeft; } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsLeft == bitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(bitsLeft == bitsPerValue); } public override void Decode(long[] blocks, int blocksOffset, int[] values, int valuesOffset, int iterations) @@ -178,7 +178,7 @@ public override void Decode(byte[] blocks, int blocksOffset, int[] values, int v nextValue = (bytes & ((1 << bits) - 1)) << bitsLeft; } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsLeft == bitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(bitsLeft == bitsPerValue); } public override void Encode(long[] values, int valuesOffset, long[] blocks, int blocksOffset, int iterations) @@ -244,7 +244,7 @@ public override void Encode(long[] values, int valuesOffset, byte[] blocks, int for (int i = 0; i < byteValueCount * iterations; ++i) { long v = values[valuesOffset++]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerValue == 64 || PackedInt32s.BitsRequired(v) <= bitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue == 64 || PackedInt32s.BitsRequired(v) <= bitsPerValue); if (bitsPerValue < bitsLeft) { // just buffer @@ -266,7 +266,7 @@ public override void Encode(long[] values, int valuesOffset, byte[] blocks, int nextBlock = (int)((v & ((1L << bits) - 1)) << bitsLeft); } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsLeft == 8); + if (Debugging.AssertsEnabled) Debugging.Assert(bitsLeft == 8); } public override void Encode(int[] values, int valuesOffset, byte[] blocks, int blocksOffset, int iterations) @@ -276,7 +276,7 @@ public override void Encode(int[] values, int valuesOffset, byte[] blocks, int b for (int i = 0; i < byteValueCount * iterations; ++i) { int v = values[valuesOffset++]; - if (Debugging.AssertsEnabled) Debugging.Assert(() => PackedInt32s.BitsRequired(v & 0xFFFFFFFFL) <= bitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(PackedInt32s.BitsRequired(v & 0xFFFFFFFFL) <= bitsPerValue); if (bitsPerValue < bitsLeft) { // just buffer @@ -298,7 +298,7 @@ public override void Encode(int[] values, int valuesOffset, byte[] blocks, int b nextBlock = (v & ((1 << bits) - 1)) << bitsLeft; } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsLeft == 8); + if (Debugging.AssertsEnabled) Debugging.Assert(bitsLeft == 8); } } } \ No newline at end of file diff --git a/src/Lucene.Net/Util/Packed/Direct16.cs b/src/Lucene.Net/Util/Packed/Direct16.cs index ad72a39a09..f39d8eba92 100644 --- a/src/Lucene.Net/Util/Packed/Direct16.cs +++ b/src/Lucene.Net/Util/Packed/Direct16.cs @@ -90,9 +90,9 @@ public override int Get(int index, long[] arr, int off, int len) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); - Debugging.Assert(() => off + len <= arr.Length); + Debugging.Assert(len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(index >= 0 && index < m_valueCount); + Debugging.Assert(off + len <= arr.Length); } int gets = Math.Min(m_valueCount - index, len); @@ -107,9 +107,9 @@ public override int Set(int index, long[] arr, int off, int len) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); - Debugging.Assert(() => off + len <= arr.Length); + Debugging.Assert(len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(index >= 0 && index < m_valueCount); + Debugging.Assert(off + len <= arr.Length); } int sets = Math.Min(m_valueCount - index, len); @@ -122,7 +122,7 @@ public override int Set(int index, long[] arr, int off, int len) public override void Fill(int fromIndex, int toIndex, long val) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => val == (val & 0xFFFFL)); + if (Debugging.AssertsEnabled) Debugging.Assert(val == (val & 0xFFFFL)); Arrays.Fill(values, fromIndex, toIndex, (short)val); } } diff --git a/src/Lucene.Net/Util/Packed/Direct32.cs b/src/Lucene.Net/Util/Packed/Direct32.cs index d2d1379d78..96b6206b71 100644 --- a/src/Lucene.Net/Util/Packed/Direct32.cs +++ b/src/Lucene.Net/Util/Packed/Direct32.cs @@ -90,9 +90,9 @@ public override int Get(int index, long[] arr, int off, int len) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); - Debugging.Assert(() => off + len <= arr.Length); + Debugging.Assert(len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(index >= 0 && index < m_valueCount); + Debugging.Assert(off + len <= arr.Length); } int gets = Math.Min(m_valueCount - index, len); @@ -107,9 +107,9 @@ public override int Set(int index, long[] arr, int off, int len) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); - Debugging.Assert(() => off + len <= arr.Length); + Debugging.Assert(len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(index >= 0 && index < m_valueCount); + Debugging.Assert(off + len <= arr.Length); } int sets = Math.Min(m_valueCount - index, len); @@ -122,7 +122,7 @@ public override int Set(int index, long[] arr, int off, int len) public override void Fill(int fromIndex, int toIndex, long val) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => val == (val & 0xFFFFFFFFL)); + if (Debugging.AssertsEnabled) Debugging.Assert(val == (val & 0xFFFFFFFFL)); Arrays.Fill(values, fromIndex, toIndex, (int)val); } } diff --git a/src/Lucene.Net/Util/Packed/Direct64.cs b/src/Lucene.Net/Util/Packed/Direct64.cs index 1635302b08..add39f8ec3 100644 --- a/src/Lucene.Net/Util/Packed/Direct64.cs +++ b/src/Lucene.Net/Util/Packed/Direct64.cs @@ -84,9 +84,9 @@ public override int Get(int index, long[] arr, int off, int len) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); - Debugging.Assert(() => off + len <= arr.Length); + Debugging.Assert(len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(index >= 0 && index < m_valueCount); + Debugging.Assert(off + len <= arr.Length); } int gets = Math.Min(m_valueCount - index, len); @@ -98,9 +98,9 @@ public override int Set(int index, long[] arr, int off, int len) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); - Debugging.Assert(() => off + len <= arr.Length); + Debugging.Assert(len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(index >= 0 && index < m_valueCount); + Debugging.Assert(off + len <= arr.Length); } int sets = Math.Min(m_valueCount - index, len); diff --git a/src/Lucene.Net/Util/Packed/Direct8.cs b/src/Lucene.Net/Util/Packed/Direct8.cs index 67b1bce068..404afaaa3c 100644 --- a/src/Lucene.Net/Util/Packed/Direct8.cs +++ b/src/Lucene.Net/Util/Packed/Direct8.cs @@ -87,9 +87,9 @@ public override int Get(int index, long[] arr, int off, int len) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); - Debugging.Assert(() => off + len <= arr.Length); + Debugging.Assert(len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(index >= 0 && index < m_valueCount); + Debugging.Assert(off + len <= arr.Length); } int gets = Math.Min(m_valueCount - index, len); @@ -104,9 +104,9 @@ public override int Set(int index, long[] arr, int off, int len) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); - Debugging.Assert(() => off + len <= arr.Length); + Debugging.Assert(len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(index >= 0 && index < m_valueCount); + Debugging.Assert(off + len <= arr.Length); } int sets = Math.Min(m_valueCount - index, len); @@ -119,7 +119,7 @@ public override int Set(int index, long[] arr, int off, int len) public override void Fill(int fromIndex, int toIndex, long val) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => val == (val & 0xFFL)); + if (Debugging.AssertsEnabled) Debugging.Assert(val == (val & 0xFFL)); Arrays.Fill(values, fromIndex, toIndex, (byte)val); } } diff --git a/src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs b/src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs index b0273ac250..dcc9514a13 100644 --- a/src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs +++ b/src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs @@ -126,7 +126,7 @@ private static long UnPackValue(long[] longArray, int numBits, long packIndex, l /// The low value for the current decoding index. private long CurrentLowValue() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => ((efIndex >= 0) && (efIndex < numEncoded)), () => $"efIndex {efIndex.ToString(CultureInfo.InvariantCulture)}"); + if (Debugging.AssertsEnabled) Debugging.Assert(((efIndex >= 0) && (efIndex < numEncoded)), () => $"efIndex {efIndex.ToString(CultureInfo.InvariantCulture)}"); return UnPackValue(efEncoder.lowerLongs, efEncoder.numLowBits, efIndex, efEncoder.lowerBitsMask); } @@ -249,7 +249,7 @@ public virtual long NextValue() /// public virtual bool AdvanceToIndex(long index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index > efIndex); + if (Debugging.AssertsEnabled) Debugging.Assert(index > efIndex); if (index >= numEncoded) { efIndex = numEncoded; @@ -257,7 +257,7 @@ public virtual bool AdvanceToIndex(long index) } if (!ToAfterCurrentHighBit()) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(false); } /* CHECKME: Add a (binary) search in the upperZeroBitPositions here. */ int curSetBits = curHighLong.PopCount(); @@ -275,7 +275,7 @@ public virtual bool AdvanceToIndex(long index) */ if (!ToAfterCurrentHighBit()) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => false); + if (Debugging.AssertsEnabled) Debugging.Assert(false); } ToNextHighValue(); } @@ -312,7 +312,7 @@ public virtual long AdvanceToValue(long target) indexEntryIndex = numIndexEntries - 1; // no further than last index entry } long indexHighValue = (indexEntryIndex + 1) * efEncoder.indexInterval; - if (Debugging.AssertsEnabled) Debugging.Assert(() => indexHighValue <= highTarget); + if (Debugging.AssertsEnabled) Debugging.Assert(indexHighValue <= highTarget); if (indexHighValue > (setBitForIndex - efIndex)) // advance to just after zero bit position of index entry. { setBitForIndex = UnPackValue(efEncoder.upperZeroBitPositionIndex, efEncoder.nIndexEntryBits, indexEntryIndex, indexMask); @@ -321,7 +321,7 @@ public virtual long AdvanceToValue(long target) upperLong = efEncoder.upperLongs[highIndex]; curHighLong = (long)((ulong)upperLong >> ((int)(setBitForIndex & ((sizeof(long) * 8) - 1)))); // may contain the unary 1 bit for efIndex } - if (Debugging.AssertsEnabled) Debugging.Assert(() => efIndex < numEncoded); // there is a high value to be found. + if (Debugging.AssertsEnabled) Debugging.Assert(efIndex < numEncoded); // there is a high value to be found. } int curSetBits = curHighLong.PopCount(); // shifted right. @@ -337,7 +337,7 @@ public virtual long AdvanceToValue(long target) } setBitForIndex += (sizeof(long) * 8) - (setBitForIndex & ((sizeof(long) * 8) - 1)); // highIndex = (int)(setBitForIndex >>> LOG2_LONG_SIZE); - if (Debugging.AssertsEnabled) Debugging.Assert(() => (highIndex + 1) == (int)((long)((ulong)setBitForIndex >> LOG2_INT64_SIZE))); + if (Debugging.AssertsEnabled) Debugging.Assert((highIndex + 1) == (int)((long)((ulong)setBitForIndex >> LOG2_INT64_SIZE))); highIndex += 1; upperLong = efEncoder.upperLongs[highIndex]; curHighLong = upperLong; @@ -348,7 +348,7 @@ public virtual long AdvanceToValue(long target) while (curHighLong == 0L) { setBitForIndex += (sizeof(long) * 8) - (setBitForIndex & ((sizeof(long) * 8) - 1)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => (highIndex + 1) == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE)); + if (Debugging.AssertsEnabled) Debugging.Assert((highIndex + 1) == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE)); highIndex += 1; upperLong = efEncoder.upperLongs[highIndex]; curHighLong = upperLong; @@ -356,12 +356,12 @@ public virtual long AdvanceToValue(long target) // curHighLong has enough clear bits to reach highTarget, has at least 1 set bit, and may not have enough set bits. int rank = (int)(highTarget - (setBitForIndex - efIndex)); // the rank of the zero bit for highValue. - if (Debugging.AssertsEnabled) Debugging.Assert(() => (rank <= (sizeof(long) * 8)), () => ("rank " + rank)); + if (Debugging.AssertsEnabled) Debugging.Assert((rank <= (sizeof(long) * 8)), () => ("rank " + rank)); if (rank >= 1) { long invCurHighLong = ~curHighLong; int clearBitForValue = (rank <= 8) ? BroadWord.SelectNaive(invCurHighLong, rank) : BroadWord.Select(invCurHighLong, rank); - if (Debugging.AssertsEnabled) Debugging.Assert(() => clearBitForValue <= ((sizeof(long) * 8) - 1)); + if (Debugging.AssertsEnabled) Debugging.Assert(clearBitForValue <= ((sizeof(long) * 8) - 1)); setBitForIndex += clearBitForValue + 1; // the high bit just before setBitForIndex is zero int oneBitsBeforeClearBit = clearBitForValue - rank + 1; efIndex += oneBitsBeforeClearBit; // the high bit at setBitForIndex and belongs to the unary code for efIndex @@ -372,14 +372,14 @@ public virtual long AdvanceToValue(long target) if ((setBitForIndex & ((sizeof(long) * 8) - 1)) == 0L) // exhausted curHighLong { - if (Debugging.AssertsEnabled) Debugging.Assert(() => (highIndex + 1) == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE)); + if (Debugging.AssertsEnabled) Debugging.Assert((highIndex + 1) == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE)); highIndex += 1; upperLong = efEncoder.upperLongs[highIndex]; curHighLong = upperLong; } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => highIndex == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE)); + if (Debugging.AssertsEnabled) Debugging.Assert(highIndex == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE)); curHighLong = (long)((ulong)upperLong >> ((int)(setBitForIndex & ((sizeof(long) * 8) - 1)))); } // curHighLong has enough clear bits to reach highTarget, and may not have enough set bits. @@ -387,14 +387,14 @@ public virtual long AdvanceToValue(long target) while (curHighLong == 0L) { setBitForIndex += (sizeof(long) * 8) - (setBitForIndex & ((sizeof(long) * 8) - 1)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => (highIndex + 1) == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE)); + if (Debugging.AssertsEnabled) Debugging.Assert((highIndex + 1) == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE)); highIndex += 1; upperLong = efEncoder.upperLongs[highIndex]; curHighLong = upperLong; } } setBitForIndex += curHighLong.TrailingZeroCount(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => (setBitForIndex - efIndex) >= highTarget); // highTarget reached + if (Debugging.AssertsEnabled) Debugging.Assert((setBitForIndex - efIndex) >= highTarget); // highTarget reached // Linear search also with low values long currentValue = CombineHighLowValues((setBitForIndex - efIndex), CurrentLowValue()); diff --git a/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs b/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs index 86db1cfaba..19b38db641 100644 --- a/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs +++ b/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs @@ -178,7 +178,7 @@ public EliasFanoEncoder(long numValues, long upperBound, long indexInterval) this.lowerLongs = new long[(int)numLongsForLowBits]; long numHighBitsClear = (long)((ulong)((this.upperBound > 0) ? this.upperBound : 0) >> this.numLowBits); - if (Debugging.AssertsEnabled) Debugging.Assert(() => numHighBitsClear <= (2 * this.numValues)); + if (Debugging.AssertsEnabled) Debugging.Assert(numHighBitsClear <= (2 * this.numValues)); long numHighBitsSet = this.numValues; long numLongsForHighBits = NumInt64sForBits(numHighBitsClear + numHighBitsSet); @@ -220,7 +220,7 @@ public EliasFanoEncoder(long numValues, long upperBound) /// private static long NumInt64sForBits(long numBits) // Note: int version in FixedBitSet.bits2words() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => numBits >= 0, () => numBits.ToString(CultureInfo.InvariantCulture)); + if (Debugging.AssertsEnabled) Debugging.Assert(numBits >= 0, () => numBits.ToString(CultureInfo.InvariantCulture)); return (long)((ulong)(numBits + (sizeof(long) * 8 - 1)) >> LOG2_INT64_SIZE); } diff --git a/src/Lucene.Net/Util/Packed/GrowableWriter.cs b/src/Lucene.Net/Util/Packed/GrowableWriter.cs index 635a363775..4b4cc2a27d 100644 --- a/src/Lucene.Net/Util/Packed/GrowableWriter.cs +++ b/src/Lucene.Net/Util/Packed/GrowableWriter.cs @@ -76,7 +76,7 @@ private void EnsureCapacity(long value) return; } int bitsRequired = value < 0 ? 64 : PackedInt32s.BitsRequired(value); - if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsRequired > current.BitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(bitsRequired > current.BitsPerValue); int valueCount = Count; PackedInt32s.Mutable next = PackedInt32s.GetMutable(valueCount, bitsRequired, acceptableOverheadRatio); PackedInt32s.Copy(current, 0, next, 0, valueCount, PackedInt32s.DEFAULT_BUFFER_SIZE); diff --git a/src/Lucene.Net/Util/Packed/MonotonicAppendingLongBuffer.cs b/src/Lucene.Net/Util/Packed/MonotonicAppendingLongBuffer.cs index df969a1ab8..869d78a2d1 100644 --- a/src/Lucene.Net/Util/Packed/MonotonicAppendingLongBuffer.cs +++ b/src/Lucene.Net/Util/Packed/MonotonicAppendingLongBuffer.cs @@ -137,7 +137,7 @@ internal override void Grow(int newBlockCount) internal override void PackPendingValues() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => pendingOff > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(pendingOff > 0); minValues[valuesOff] = pending[0]; averages[valuesOff] = pendingOff == 1 ? 0 : (float)(pending[pendingOff - 1] - pending[0]) / (pendingOff - 1); diff --git a/src/Lucene.Net/Util/Packed/MonotonicBlockPackedReader.cs b/src/Lucene.Net/Util/Packed/MonotonicBlockPackedReader.cs index 643db0cdb9..3acd831e51 100644 --- a/src/Lucene.Net/Util/Packed/MonotonicBlockPackedReader.cs +++ b/src/Lucene.Net/Util/Packed/MonotonicBlockPackedReader.cs @@ -78,7 +78,7 @@ public MonotonicBlockPackedReader(IndexInput @in, int packedIntsVersion, int blo public override long Get(long index) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < valueCount); int block = (int)((long)((ulong)index >> blockShift)); int idx = (int)(index & blockMask); // LUCENENET NOTE: IMPORTANT: The cast to float is critical here for it to work in x86 diff --git a/src/Lucene.Net/Util/Packed/MonotonicBlockPackedWriter.cs b/src/Lucene.Net/Util/Packed/MonotonicBlockPackedWriter.cs index eecb2556c2..5176fbd9a2 100644 --- a/src/Lucene.Net/Util/Packed/MonotonicBlockPackedWriter.cs +++ b/src/Lucene.Net/Util/Packed/MonotonicBlockPackedWriter.cs @@ -66,14 +66,14 @@ public MonotonicBlockPackedWriter(DataOutput @out, int blockSize) public override void Add(long l) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => l >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(l >= 0); base.Add(l); } [MethodImpl(MethodImplOptions.NoInlining)] protected override void Flush() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => m_off > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(m_off > 0); // TODO: perform a true linear regression? long min = m_values[0]; diff --git a/src/Lucene.Net/Util/Packed/Packed16ThreeBlocks.cs b/src/Lucene.Net/Util/Packed/Packed16ThreeBlocks.cs index c7d26fdd28..c235b2cbac 100644 --- a/src/Lucene.Net/Util/Packed/Packed16ThreeBlocks.cs +++ b/src/Lucene.Net/Util/Packed/Packed16ThreeBlocks.cs @@ -71,9 +71,9 @@ public override int Get(int index, long[] arr, int off, int len) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); - Debugging.Assert(() => off + len <= arr.Length); + Debugging.Assert(len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(index >= 0 && index < m_valueCount); + Debugging.Assert(off + len <= arr.Length); } int gets = Math.Min(m_valueCount - index, len); @@ -96,9 +96,9 @@ public override int Set(int index, long[] arr, int off, int len) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); - Debugging.Assert(() => off + len <= arr.Length); + Debugging.Assert(len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(index >= 0 && index < m_valueCount); + Debugging.Assert(off + len <= arr.Length); } int sets = Math.Min(m_valueCount - index, len); diff --git a/src/Lucene.Net/Util/Packed/Packed64.cs b/src/Lucene.Net/Util/Packed/Packed64.cs index f3415dbe5b..655d610f2b 100644 --- a/src/Lucene.Net/Util/Packed/Packed64.cs +++ b/src/Lucene.Net/Util/Packed/Packed64.cs @@ -174,10 +174,10 @@ public override long Get(int index) public override int Get(int index, long[] arr, int off, int len) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < m_valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(len > 0, () => "len must be > 0 (got " + len + ")"); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < m_valueCount); len = Math.Min(len, m_valueCount - index); - if (Debugging.AssertsEnabled) Debugging.Assert(() => off + len <= arr.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(off + len <= arr.Length); int originalIndex = index; PackedInt32s.IDecoder decoder = BulkOperation.Of(PackedInt32s.Format.PACKED, m_bitsPerValue); @@ -198,15 +198,15 @@ public override int Get(int index, long[] arr, int off, int len) } // bulk get - if (Debugging.AssertsEnabled) Debugging.Assert(() => index % decoder.Int64ValueCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(index % decoder.Int64ValueCount == 0); int blockIndex = (int)((ulong)((long)index * m_bitsPerValue) >> BLOCK_BITS); - if (Debugging.AssertsEnabled) Debugging.Assert(() => (((long)index * m_bitsPerValue) & MOD_MASK) == 0); + if (Debugging.AssertsEnabled) Debugging.Assert((((long)index * m_bitsPerValue) & MOD_MASK) == 0); int iterations = len / decoder.Int64ValueCount; decoder.Decode(blocks, blockIndex, arr, off, iterations); int gotValues = iterations * decoder.Int64ValueCount; index += gotValues; len -= gotValues; - if (Debugging.AssertsEnabled) Debugging.Assert(() => len >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(len >= 0); if (index > originalIndex) { @@ -216,7 +216,7 @@ public override int Get(int index, long[] arr, int off, int len) else { // no progress so far => already at a block boundary but no full block to get - if (Debugging.AssertsEnabled) Debugging.Assert(() => index == originalIndex); + if (Debugging.AssertsEnabled) Debugging.Assert(index == originalIndex); return base.Get(index, arr, off, len); } } @@ -242,10 +242,10 @@ public override void Set(int index, long value) public override int Set(int index, long[] arr, int off, int len) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - if (Debugging.AssertsEnabled) Debugging.Assert(() => index >= 0 && index < m_valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(len > 0, () => "len must be > 0 (got " + len + ")"); + if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < m_valueCount); len = Math.Min(len, m_valueCount - index); - if (Debugging.AssertsEnabled) Debugging.Assert(() => off + len <= arr.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(off + len <= arr.Length); int originalIndex = index; PackedInt32s.IEncoder encoder = BulkOperation.Of(PackedInt32s.Format.PACKED, m_bitsPerValue); @@ -266,15 +266,15 @@ public override int Set(int index, long[] arr, int off, int len) } // bulk set - if (Debugging.AssertsEnabled) Debugging.Assert(() => index % encoder.Int64ValueCount == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(index % encoder.Int64ValueCount == 0); int blockIndex = (int)((ulong)((long)index * m_bitsPerValue) >> BLOCK_BITS); - if (Debugging.AssertsEnabled) Debugging.Assert(() => (((long)index * m_bitsPerValue) & MOD_MASK) == 0); + if (Debugging.AssertsEnabled) Debugging.Assert((((long)index * m_bitsPerValue) & MOD_MASK) == 0); int iterations = len / encoder.Int64ValueCount; encoder.Encode(arr, off, blocks, blockIndex, iterations); int setValues = iterations * encoder.Int64ValueCount; index += setValues; len -= setValues; - if (Debugging.AssertsEnabled) Debugging.Assert(() => len >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(len >= 0); if (index > originalIndex) { @@ -284,7 +284,7 @@ public override int Set(int index, long[] arr, int off, int len) else { // no progress so far => already at a block boundary but no full block to get - if (Debugging.AssertsEnabled) Debugging.Assert(() => index == originalIndex); + if (Debugging.AssertsEnabled) Debugging.Assert(index == originalIndex); return base.Set(index, arr, off, len); } } @@ -308,8 +308,8 @@ public override void Fill(int fromIndex, int toIndex, long val) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => PackedInt32s.BitsRequired(val) <= BitsPerValue); - Debugging.Assert(() => fromIndex <= toIndex); + Debugging.Assert(PackedInt32s.BitsRequired(val) <= BitsPerValue); + Debugging.Assert(fromIndex <= toIndex); } // minimum number of values that use an exact number of full blocks @@ -332,7 +332,7 @@ public override void Fill(int fromIndex, int toIndex, long val) Set(fromIndex++, val); } } - if (Debugging.AssertsEnabled) Debugging.Assert(() => fromIndex % nAlignedValues == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(fromIndex % nAlignedValues == 0); // compute the long[] blocks for nAlignedValues consecutive values and // use them to set as many values as possible without applying any mask @@ -346,7 +346,7 @@ public override void Fill(int fromIndex, int toIndex, long val) values.Set(i, val); } nAlignedValuesBlocks = values.blocks; - if (Debugging.AssertsEnabled) Debugging.Assert(() => nAlignedBlocks <= nAlignedValuesBlocks.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(nAlignedBlocks <= nAlignedValuesBlocks.Length); } int startBlock = (int)((ulong)((long)fromIndex * m_bitsPerValue) >> 6); int endBlock = (int)((ulong)((long)toIndex * m_bitsPerValue) >> 6); diff --git a/src/Lucene.Net/Util/Packed/Packed64SingleBlock.cs b/src/Lucene.Net/Util/Packed/Packed64SingleBlock.cs index d6499c75ee..103a24abcf 100644 --- a/src/Lucene.Net/Util/Packed/Packed64SingleBlock.cs +++ b/src/Lucene.Net/Util/Packed/Packed64SingleBlock.cs @@ -50,7 +50,7 @@ private static int RequiredCapacity(int valueCount, int valuesPerBlock) internal Packed64SingleBlock(int valueCount, int bitsPerValue) : base(valueCount, bitsPerValue) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => IsSupported(bitsPerValue)); + if (Debugging.AssertsEnabled) Debugging.Assert(IsSupported(bitsPerValue)); int valuesPerBlock = 64 / bitsPerValue; blocks = new long[RequiredCapacity(valueCount, valuesPerBlock)]; } @@ -73,11 +73,11 @@ public override int Get(int index, long[] arr, int off, int len) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(index >= 0 && index < m_valueCount); } len = Math.Min(len, m_valueCount - index); - if (Debugging.AssertsEnabled) Debugging.Assert(() => off + len <= arr.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(off + len <= arr.Length); int originalIndex = index; @@ -98,12 +98,12 @@ public override int Get(int index, long[] arr, int off, int len) } // bulk get - if (Debugging.AssertsEnabled) Debugging.Assert(() => index % valuesPerBlock == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(index % valuesPerBlock == 0); PackedInt32s.IDecoder decoder = BulkOperation.Of(PackedInt32s.Format.PACKED_SINGLE_BLOCK, m_bitsPerValue); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => decoder.Int64BlockCount == 1); - Debugging.Assert(() => decoder.Int64ValueCount == valuesPerBlock); + Debugging.Assert(decoder.Int64BlockCount == 1); + Debugging.Assert(decoder.Int64ValueCount == valuesPerBlock); } int blockIndex = index / valuesPerBlock; int nblocks = (index + len) / valuesPerBlock - blockIndex; @@ -121,7 +121,7 @@ public override int Get(int index, long[] arr, int off, int len) { // no progress so far => already at a block boundary but no full block to // get - if (Debugging.AssertsEnabled) Debugging.Assert(() => index == originalIndex); + if (Debugging.AssertsEnabled) Debugging.Assert(index == originalIndex); return base.Get(index, arr, off, len); } } @@ -130,11 +130,11 @@ public override int Set(int index, long[] arr, int off, int len) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); + Debugging.Assert(len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(index >= 0 && index < m_valueCount); } len = Math.Min(len, m_valueCount - index); - if (Debugging.AssertsEnabled) Debugging.Assert(() => off + len <= arr.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(off + len <= arr.Length); int originalIndex = index; @@ -155,10 +155,10 @@ public override int Set(int index, long[] arr, int off, int len) } // bulk set - if (Debugging.AssertsEnabled) Debugging.Assert(() => index % valuesPerBlock == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(index % valuesPerBlock == 0); BulkOperation op = BulkOperation.Of(PackedInt32s.Format.PACKED_SINGLE_BLOCK, m_bitsPerValue); - if (Debugging.AssertsEnabled) Debugging.Assert(() => op.Int64BlockCount == 1); - if (Debugging.AssertsEnabled) Debugging.Assert(() => op.Int64ValueCount == valuesPerBlock); + if (Debugging.AssertsEnabled) Debugging.Assert(op.Int64BlockCount == 1); + if (Debugging.AssertsEnabled) Debugging.Assert(op.Int64ValueCount == valuesPerBlock); int blockIndex = index / valuesPerBlock; int nblocks = (index + len) / valuesPerBlock - blockIndex; op.Encode(arr, off, blocks, blockIndex, nblocks); @@ -175,7 +175,7 @@ public override int Set(int index, long[] arr, int off, int len) { // no progress so far => already at a block boundary but no full block to // set - if (Debugging.AssertsEnabled) Debugging.Assert(() => index == originalIndex); + if (Debugging.AssertsEnabled) Debugging.Assert(index == originalIndex); return base.Set(index, arr, off, len); } } @@ -184,9 +184,9 @@ public override void Fill(int fromIndex, int toIndex, long val) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => fromIndex >= 0); - Debugging.Assert(() => fromIndex <= toIndex); - Debugging.Assert(() => PackedInt32s.BitsRequired(val) <= m_bitsPerValue); + Debugging.Assert(fromIndex >= 0); + Debugging.Assert(fromIndex <= toIndex); + Debugging.Assert(PackedInt32s.BitsRequired(val) <= m_bitsPerValue); } int valuesPerBlock = 64 / m_bitsPerValue; @@ -206,13 +206,13 @@ public override void Fill(int fromIndex, int toIndex, long val) { Set(fromIndex++, val); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => fromIndex % valuesPerBlock == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(fromIndex % valuesPerBlock == 0); } // bulk set of the inner blocks int fromBlock = fromIndex / valuesPerBlock; int toBlock = toIndex / valuesPerBlock; - if (Debugging.AssertsEnabled) Debugging.Assert(() => fromBlock * valuesPerBlock == fromIndex); + if (Debugging.AssertsEnabled) Debugging.Assert(fromBlock * valuesPerBlock == fromIndex); long blockValue = 0L; for (int i = 0; i < valuesPerBlock; ++i) diff --git a/src/Lucene.Net/Util/Packed/Packed8ThreeBlocks.cs b/src/Lucene.Net/Util/Packed/Packed8ThreeBlocks.cs index ff27983460..31f8012f67 100644 --- a/src/Lucene.Net/Util/Packed/Packed8ThreeBlocks.cs +++ b/src/Lucene.Net/Util/Packed/Packed8ThreeBlocks.cs @@ -68,9 +68,9 @@ public override int Get(int index, long[] arr, int off, int len) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); - Debugging.Assert(() => off + len <= arr.Length); + Debugging.Assert(len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(index >= 0 && index < m_valueCount); + Debugging.Assert(off + len <= arr.Length); } int gets = Math.Min(m_valueCount - index, len); @@ -93,9 +93,9 @@ public override int Set(int index, long[] arr, int off, int len) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < m_valueCount); - Debugging.Assert(() => off + len <= arr.Length); + Debugging.Assert(len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(index >= 0 && index < m_valueCount); + Debugging.Assert(off + len <= arr.Length); } int sets = Math.Min(m_valueCount - index, len); diff --git a/src/Lucene.Net/Util/Packed/PackedDataInput.cs b/src/Lucene.Net/Util/Packed/PackedDataInput.cs index 52eeb8769a..c5d944566c 100644 --- a/src/Lucene.Net/Util/Packed/PackedDataInput.cs +++ b/src/Lucene.Net/Util/Packed/PackedDataInput.cs @@ -53,7 +53,7 @@ public PackedDataInput(DataInput @in) /// public long ReadInt64(int bitsPerValue) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => bitsPerValue.ToString(CultureInfo.InvariantCulture)); + if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue > 0 && bitsPerValue <= 64, () => bitsPerValue.ToString(CultureInfo.InvariantCulture)); long r = 0; while (bitsPerValue > 0) { diff --git a/src/Lucene.Net/Util/Packed/PackedDataOutput.cs b/src/Lucene.Net/Util/Packed/PackedDataOutput.cs index 168fd7b67f..39e13daa49 100644 --- a/src/Lucene.Net/Util/Packed/PackedDataOutput.cs +++ b/src/Lucene.Net/Util/Packed/PackedDataOutput.cs @@ -53,7 +53,7 @@ public PackedDataOutput(DataOutput @out) /// public void WriteInt64(long value, int bitsPerValue) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerValue == 64 || (value >= 0 && value <= PackedInt32s.MaxValue(bitsPerValue))); + if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue == 64 || (value >= 0 && value <= PackedInt32s.MaxValue(bitsPerValue))); while (bitsPerValue > 0) { if (remainingBits == 0) diff --git a/src/Lucene.Net/Util/Packed/PackedInts.cs b/src/Lucene.Net/Util/Packed/PackedInts.cs index 5bd0b9b00c..cd1ef19bdb 100644 --- a/src/Lucene.Net/Util/Packed/PackedInts.cs +++ b/src/Lucene.Net/Util/Packed/PackedInts.cs @@ -142,7 +142,7 @@ public override bool IsSupported(int bitsPerValue) /// public override float OverheadPerValue(int bitsPerValue) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => IsSupported(bitsPerValue)); + if (Debugging.AssertsEnabled) Debugging.Assert(IsSupported(bitsPerValue)); int valuesPerBlock = 64 / bitsPerValue; int overhead = 64 % bitsPerValue; return (float)overhead / valuesPerBlock; @@ -205,7 +205,7 @@ internal Format(int id) /// public virtual long ByteCount(int packedIntsVersion, int valueCount, int bitsPerValue) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerValue >= 0 && bitsPerValue <= 64, () => bitsPerValue.ToString(CultureInfo.InvariantCulture)); + if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue >= 0 && bitsPerValue <= 64, () => bitsPerValue.ToString(CultureInfo.InvariantCulture)); // assume long-aligned return 8L * Int64Count(packedIntsVersion, valueCount, bitsPerValue); } @@ -218,9 +218,9 @@ public virtual long ByteCount(int packedIntsVersion, int valueCount, int bitsPer /// public virtual int Int64Count(int packedIntsVersion, int valueCount, int bitsPerValue) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerValue >= 0 && bitsPerValue <= 64, () => bitsPerValue.ToString(CultureInfo.InvariantCulture)); + if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue >= 0 && bitsPerValue <= 64, () => bitsPerValue.ToString(CultureInfo.InvariantCulture)); long byteCount = ByteCount(packedIntsVersion, valueCount, bitsPerValue); - if (Debugging.AssertsEnabled) Debugging.Assert(() => byteCount < 8L * int.MaxValue); + if (Debugging.AssertsEnabled) Debugging.Assert(byteCount < 8L * int.MaxValue); if ((byteCount % 8) == 0) return (int)(byteCount / 8); else @@ -241,7 +241,7 @@ public virtual bool IsSupported(int bitsPerValue) /// public virtual float OverheadPerValue(int bitsPerValue) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => IsSupported(bitsPerValue)); + if (Debugging.AssertsEnabled) Debugging.Assert(IsSupported(bitsPerValue)); return 0f; } @@ -250,7 +250,7 @@ public virtual float OverheadPerValue(int bitsPerValue) /// public virtual float OverheadRatio(int bitsPerValue) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => IsSupported(bitsPerValue)); + if (Debugging.AssertsEnabled) Debugging.Assert(IsSupported(bitsPerValue)); return OverheadPerValue(bitsPerValue) / bitsPerValue; } } @@ -533,9 +533,9 @@ public virtual int Get(int index, long[] arr, int off, int len) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < Count); - Debugging.Assert(() => off + len <= arr.Length); + Debugging.Assert(len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(index >= 0 && index < Count); + Debugging.Assert(off + len <= arr.Length); } int gets = Math.Min(Count - index, len); @@ -575,7 +575,7 @@ public virtual int Get(int index, long[] arr, int off, int len) /// public virtual object GetArray() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !HasArray); + if (Debugging.AssertsEnabled) Debugging.Assert(!HasArray); return null; } @@ -635,7 +635,7 @@ protected ReaderIterator(int valueCount, int bitsPerValue, DataInput @in) public virtual long Next() { Int64sRef nextValues = Next(1); - if (Debugging.AssertsEnabled) Debugging.Assert(() => nextValues.Length > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(nextValues.Length > 0); long result = nextValues.Int64s[nextValues.Offset]; ++nextValues.Offset; --nextValues.Length; @@ -674,11 +674,11 @@ public virtual int Set(int index, long[] arr, int off, int len) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < Count); + Debugging.Assert(len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(index >= 0 && index < Count); } len = Math.Min(len, Count - index); - if (Debugging.AssertsEnabled) Debugging.Assert(() => off + len <= arr.Length); + if (Debugging.AssertsEnabled) Debugging.Assert(off + len <= arr.Length); for (int i = index, o = off, end = index + len; i < end; ++i, ++o) { @@ -695,8 +695,8 @@ public virtual void Fill(int fromIndex, int toIndex, long val) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => val <= MaxValue(BitsPerValue)); - Debugging.Assert(() => fromIndex <= toIndex); + Debugging.Assert(val <= MaxValue(BitsPerValue)); + Debugging.Assert(fromIndex <= toIndex); } for (int i = fromIndex; i < toIndex; ++i) { @@ -746,7 +746,7 @@ internal abstract class ReaderImpl : Reader protected ReaderImpl(int valueCount, int bitsPerValue) { this.m_bitsPerValue = bitsPerValue; - if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); this.m_valueCount = valueCount; } @@ -765,7 +765,7 @@ public abstract class MutableImpl : Mutable protected MutableImpl(int valueCount, int bitsPerValue) { this.m_valueCount = valueCount; - if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); this.m_bitsPerValue = bitsPerValue; } @@ -796,8 +796,8 @@ public override int Get(int index, long[] arr, int off, int len) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => len > 0, () => "len must be > 0 (got " + len + ")"); - Debugging.Assert(() => index >= 0 && index < valueCount); + Debugging.Assert(len > 0, () => "len must be > 0 (got " + len + ")"); + Debugging.Assert(index >= 0 && index < valueCount); } len = Math.Min(len, valueCount - index); Arrays.Fill(arr, off, off + len, 0); @@ -829,8 +829,8 @@ protected Writer(DataOutput @out, int valueCount, int bitsPerValue) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => bitsPerValue <= 64); - Debugging.Assert(() => valueCount >= 0 || valueCount == -1); + Debugging.Assert(bitsPerValue <= 64); + Debugging.Assert(valueCount >= 0 || valueCount == -1); } this.m_out = @out; this.m_valueCount = valueCount; @@ -839,7 +839,7 @@ protected Writer(DataOutput @out, int valueCount, int bitsPerValue) internal virtual void WriteHeader() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => m_valueCount != -1); + if (Debugging.AssertsEnabled) Debugging.Assert(m_valueCount != -1); CodecUtil.WriteHeader(m_out, CODEC_NAME, VERSION_CURRENT); m_out.WriteVInt32(m_bitsPerValue); m_out.WriteVInt32(m_valueCount); @@ -986,7 +986,7 @@ public static Reader GetReader(DataInput @in) { int version = CodecUtil.CheckHeader(@in, CODEC_NAME, VERSION_START, VERSION_CURRENT); int bitsPerValue = @in.ReadVInt32(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); int valueCount = @in.ReadVInt32(); Format format = Format.ById(@in.ReadVInt32()); @@ -1028,7 +1028,7 @@ public static IReaderIterator GetReaderIterator(DataInput @in, int mem) { int version = CodecUtil.CheckHeader(@in, CODEC_NAME, VERSION_START, VERSION_CURRENT); int bitsPerValue = @in.ReadVInt32(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); int valueCount = @in.ReadVInt32(); Format format = Format.ById(@in.ReadVInt32()); return GetReaderIteratorNoHeader(@in, format, version, valueCount, bitsPerValue, mem); @@ -1064,7 +1064,7 @@ public static Reader GetDirectReaderNoHeader(IndexInput @in, Format format, int long byteCount = format.ByteCount(version, valueCount, bitsPerValue); if (byteCount != format.ByteCount(VERSION_CURRENT, valueCount, bitsPerValue)) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => version == VERSION_START); + if (Debugging.AssertsEnabled) Debugging.Assert(version == VERSION_START); long endPointer = @in.GetFilePointer() + byteCount; // Some consumers of direct readers assume that reading the last value // will make the underlying IndexInput go to the end of the packed @@ -1151,7 +1151,7 @@ public static Reader GetDirectReader(IndexInput @in) { int version = CodecUtil.CheckHeader(@in, CODEC_NAME, VERSION_START, VERSION_CURRENT); int bitsPerValue = @in.ReadVInt32(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); int valueCount = @in.ReadVInt32(); Format format = Format.ById(@in.ReadVInt32()); return GetDirectReaderNoHeader(@in, format, version, valueCount, bitsPerValue); @@ -1190,7 +1190,7 @@ public static Mutable GetMutable(int valueCount, int bitsPerValue, float accepta /// public static Mutable GetMutable(int valueCount, int bitsPerValue, PackedInt32s.Format format) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(valueCount >= 0); if (format == PackedInt32s.Format.PACKED_SINGLE_BLOCK) { @@ -1317,7 +1317,7 @@ public static Writer GetWriterNoHeader(DataOutput @out, Format format, int value /// If there is a low-level I/O error. public static Writer GetWriter(DataOutput @out, int valueCount, int bitsPerValue, float acceptableOverheadRatio) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(valueCount >= 0); FormatAndBits formatAndBits = FastestFormatAndBits(valueCount, bitsPerValue, acceptableOverheadRatio); Writer writer = GetWriterNoHeader(@out, formatAndBits.Format, valueCount, formatAndBits.BitsPerValue, DEFAULT_BUFFER_SIZE); @@ -1364,8 +1364,8 @@ public static void Copy(Reader src, int srcPos, Mutable dest, int destPos, int l { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => srcPos + len <= src.Count); - Debugging.Assert(() => destPos + len <= dest.Count); + Debugging.Assert(srcPos + len <= src.Count); + Debugging.Assert(destPos + len <= dest.Count); } int capacity = (int)((uint)mem >> 3); if (capacity == 0) @@ -1387,17 +1387,17 @@ public static void Copy(Reader src, int srcPos, Mutable dest, int destPos, int l /// Same as but using a pre-allocated buffer. internal static void Copy(Reader src, int srcPos, Mutable dest, int destPos, int len, long[] buf) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => buf.Length > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(buf.Length > 0); int remaining = 0; while (len > 0) { int read = src.Get(srcPos, buf, remaining, Math.Min(len, buf.Length - remaining)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => read > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(read > 0); srcPos += read; len -= read; remaining += read; int written = dest.Set(destPos, buf, 0, remaining); - if (Debugging.AssertsEnabled) Debugging.Assert(() => written > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(written > 0); destPos += written; if (written < remaining) { @@ -1428,7 +1428,7 @@ public static Header ReadHeader(DataInput @in) { int version = CodecUtil.CheckHeader(@in, CODEC_NAME, VERSION_START, VERSION_CURRENT); int bitsPerValue = @in.ReadVInt32(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue > 0 && bitsPerValue <= 64, () => "bitsPerValue=" + bitsPerValue); int valueCount = @in.ReadVInt32(); Format format = Format.ById(@in.ReadVInt32()); return new Header(format, valueCount, bitsPerValue, version); diff --git a/src/Lucene.Net/Util/Packed/PackedReaderIterator.cs b/src/Lucene.Net/Util/Packed/PackedReaderIterator.cs index 7b3bad99ed..71d388d529 100644 --- a/src/Lucene.Net/Util/Packed/PackedReaderIterator.cs +++ b/src/Lucene.Net/Util/Packed/PackedReaderIterator.cs @@ -41,7 +41,7 @@ internal PackedReaderIterator(PackedInt32s.Format format, int packedIntsVersion, this.packedIntsVersion = packedIntsVersion; bulkOperation = BulkOperation.Of(format, bitsPerValue); iterations = Iterations(mem); - if (Debugging.AssertsEnabled) Debugging.Assert(() => valueCount == 0 || iterations > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(valueCount == 0 || iterations > 0); nextBlocks = new byte[iterations * bulkOperation.ByteBlockCount]; nextValues = new Int64sRef(new long[iterations * bulkOperation.ByteValueCount], 0, 0); nextValues.Offset = nextValues.Int64s.Length; @@ -63,9 +63,9 @@ public override Int64sRef Next(int count) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => nextValues.Length >= 0); - Debugging.Assert(() => count > 0); - Debugging.Assert(() => nextValues.Offset + nextValues.Length <= nextValues.Int64s.Length); + Debugging.Assert(nextValues.Length >= 0); + Debugging.Assert(count > 0); + Debugging.Assert(nextValues.Offset + nextValues.Length <= nextValues.Int64s.Length); } nextValues.Offset += nextValues.Length; diff --git a/src/Lucene.Net/Util/Packed/PackedWriter.cs b/src/Lucene.Net/Util/Packed/PackedWriter.cs index 5023566b87..8590948b9c 100644 --- a/src/Lucene.Net/Util/Packed/PackedWriter.cs +++ b/src/Lucene.Net/Util/Packed/PackedWriter.cs @@ -58,8 +58,8 @@ public override void Add(long v) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => m_bitsPerValue == 64 || (v >= 0 && v <= PackedInt32s.MaxValue(m_bitsPerValue)), () => m_bitsPerValue.ToString(CultureInfo.InvariantCulture)); - Debugging.Assert(() => !finished); + Debugging.Assert(m_bitsPerValue == 64 || (v >= 0 && v <= PackedInt32s.MaxValue(m_bitsPerValue)), () => m_bitsPerValue.ToString(CultureInfo.InvariantCulture)); + Debugging.Assert(!finished); } if (m_valueCount != -1 && written >= m_valueCount) { @@ -75,7 +75,7 @@ public override void Add(long v) public override void Finish() { - if (Debugging.AssertsEnabled) Debugging.Assert(() => !finished); + if (Debugging.AssertsEnabled) Debugging.Assert(!finished); if (m_valueCount != -1) { while (written < m_valueCount) diff --git a/src/Lucene.Net/Util/Packed/PagedMutable.cs b/src/Lucene.Net/Util/Packed/PagedMutable.cs index 83560f51e5..2a08f45df1 100644 --- a/src/Lucene.Net/Util/Packed/PagedMutable.cs +++ b/src/Lucene.Net/Util/Packed/PagedMutable.cs @@ -58,7 +58,7 @@ internal PagedMutable(long size, int pageSize, int bitsPerValue, PackedInt32s.Fo protected override Mutable NewMutable(int valueCount, int bitsPerValue) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => this.bitsPerValue >= bitsPerValue); + if (Debugging.AssertsEnabled) Debugging.Assert(this.bitsPerValue >= bitsPerValue); return PackedInt32s.GetMutable(valueCount, this.bitsPerValue, format); } diff --git a/src/Lucene.Net/Util/PagedBytes.cs b/src/Lucene.Net/Util/PagedBytes.cs index b7231bd635..6b0c6f9be1 100644 --- a/src/Lucene.Net/Util/PagedBytes.cs +++ b/src/Lucene.Net/Util/PagedBytes.cs @@ -98,8 +98,8 @@ public void FillSlice(BytesRef b, long start, int length) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => length >= 0, () => "length=" + length); - Debugging.Assert(() => length <= blockSize + 1, () => "length=" + length); + Debugging.Assert(length >= 0, () => "length=" + length); + Debugging.Assert(length <= blockSize + 1, () => "length=" + length); } b.Length = length; if (length == 0) @@ -148,7 +148,7 @@ public void Fill(BytesRef b, long start) { b.Length = ((block[offset] & 0x7f) << 8) | (block[1 + offset] & 0xff); b.Offset = offset + 2; - if (Debugging.AssertsEnabled) Debugging.Assert(() => b.Length > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(b.Length > 0); } } @@ -166,7 +166,7 @@ public long RamBytesUsed() /// public PagedBytes(int blockBits) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => blockBits > 0 && blockBits <= 31, () => blockBits.ToString(CultureInfo.InvariantCulture)); + if (Debugging.AssertsEnabled) Debugging.Assert(blockBits > 0 && blockBits <= 31, () => blockBits.ToString(CultureInfo.InvariantCulture)); this.blockSize = 1 << blockBits; this.blockBits = blockBits; blockMask = blockSize - 1; @@ -226,7 +226,7 @@ public void Copy(BytesRef bytes, BytesRef @out) currentBlock = new byte[blockSize]; upto = 0; //left = blockSize; // LUCENENET: Unnecessary assignment - if (Debugging.AssertsEnabled) Debugging.Assert(() => bytes.Length <= blockSize); + if (Debugging.AssertsEnabled) Debugging.Assert(bytes.Length <= blockSize); // TODO: we could also support variable block sizes } @@ -380,7 +380,7 @@ public override byte ReadByte() public override void ReadBytes(byte[] b, int offset, int len) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => b.Length >= offset + len); + if (Debugging.AssertsEnabled) Debugging.Assert(b.Length >= offset + len); int offsetEnd = offset + len; while (true) { @@ -436,7 +436,7 @@ public override void WriteByte(byte b) public override void WriteBytes(byte[] b, int offset, int length) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => b.Length >= offset + length); + if (Debugging.AssertsEnabled) Debugging.Assert(b.Length >= offset + length); if (length == 0) { return; diff --git a/src/Lucene.Net/Util/QueryBuilder.cs b/src/Lucene.Net/Util/QueryBuilder.cs index 6b0617371b..ab9e73059e 100644 --- a/src/Lucene.Net/Util/QueryBuilder.cs +++ b/src/Lucene.Net/Util/QueryBuilder.cs @@ -186,7 +186,7 @@ public virtual bool EnablePositionIncrements /// Slop factor for phrase/multiphrase queries. protected Query CreateFieldQuery(Analyzer analyzer, Occur @operator, string field, string queryText, bool quoted, int phraseSlop) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => @operator == Occur.SHOULD || @operator == Occur.MUST); + if (Debugging.AssertsEnabled) Debugging.Assert(@operator == Occur.SHOULD || @operator == Occur.MUST); // Use the analyzer to get all the tokens, and then build a TermQuery, // PhraseQuery, or nothing based on the term count CachingTokenFilter buffer = null; @@ -263,7 +263,7 @@ protected Query CreateFieldQuery(Analyzer analyzer, Occur @operator, string fiel try { bool hasNext = buffer.IncrementToken(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => hasNext == true); + if (Debugging.AssertsEnabled) Debugging.Assert(hasNext == true); termAtt.FillBytesRef(); } catch (IOException) @@ -289,7 +289,7 @@ protected Query CreateFieldQuery(Analyzer analyzer, Occur @operator, string fiel try { bool hasNext = buffer.IncrementToken(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => hasNext == true); + if (Debugging.AssertsEnabled) Debugging.Assert(hasNext == true); termAtt.FillBytesRef(); } catch (IOException) @@ -311,7 +311,7 @@ protected Query CreateFieldQuery(Analyzer analyzer, Occur @operator, string fiel try { bool hasNext = buffer.IncrementToken(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => hasNext == true); + if (Debugging.AssertsEnabled) Debugging.Assert(hasNext == true); termAtt.FillBytesRef(); } catch (IOException) @@ -354,7 +354,7 @@ protected Query CreateFieldQuery(Analyzer analyzer, Occur @operator, string fiel try { bool hasNext = buffer.IncrementToken(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => hasNext == true); + if (Debugging.AssertsEnabled) Debugging.Assert(hasNext == true); termAtt.FillBytesRef(); if (posIncrAtt != null) { @@ -405,7 +405,7 @@ protected Query CreateFieldQuery(Analyzer analyzer, Occur @operator, string fiel try { bool hasNext = buffer.IncrementToken(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => hasNext == true); + if (Debugging.AssertsEnabled) Debugging.Assert(hasNext == true); termAtt.FillBytesRef(); if (posIncrAtt != null) { diff --git a/src/Lucene.Net/Util/RamUsageEstimator.cs b/src/Lucene.Net/Util/RamUsageEstimator.cs index ab06ccfc7c..b5e75e08a3 100644 --- a/src/Lucene.Net/Util/RamUsageEstimator.cs +++ b/src/Lucene.Net/Util/RamUsageEstimator.cs @@ -781,8 +781,8 @@ public IdentityHashSet(int initialCapacity, float loadFactor) if (Debugging.AssertsEnabled) { - Debugging.Assert(() => initialCapacity > 0, () => "Initial capacity must be between (0, " + int.MaxValue + "]."); - Debugging.Assert(() => loadFactor > 0 && loadFactor < 1, () => "Load factor must be between (0, 1)."); + Debugging.Assert(initialCapacity > 0, () => "Initial capacity must be between (0, " + int.MaxValue + "]."); + Debugging.Assert(loadFactor > 0 && loadFactor < 1, () => "Load factor must be between (0, 1)."); } this.LoadFactor = loadFactor; AllocateBuffers(RoundCapacity(initialCapacity)); @@ -793,7 +793,7 @@ public IdentityHashSet(int initialCapacity, float loadFactor) /// public bool Add(KType e) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => e != null, () => "Null keys not allowed."); + if (Debugging.AssertsEnabled) Debugging.Assert(e != null, () => "Null keys not allowed."); if (Assigned >= resizeThreshold) { @@ -867,7 +867,7 @@ private void ExpandAndRehash() { object[] oldKeys = this.keys; - if (Debugging.AssertsEnabled) Debugging.Assert(() => Assigned >= resizeThreshold); + if (Debugging.AssertsEnabled) Debugging.Assert(Assigned >= resizeThreshold); AllocateBuffers(NextCapacity(keys.Length)); /* @@ -908,8 +908,8 @@ private int NextCapacity(int current) // LUCENENET NOTE: made private, since pro { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => current > 0 && ((current & (current - 1)) == 0), () => "Capacity must be a power of two."); - Debugging.Assert(() => (current << 1) > 0, () => "Maximum capacity exceeded (" + ((int)((uint)0x80000000 >> 1)) + ")."); + Debugging.Assert(current > 0 && ((current & (current - 1)) == 0), () => "Capacity must be a power of two."); + Debugging.Assert((current << 1) > 0, () => "Maximum capacity exceeded (" + ((int)((uint)0x80000000 >> 1)) + ")."); } if (current < MIN_CAPACITY / 2) diff --git a/src/Lucene.Net/Util/RecyclingByteBlockAllocator.cs b/src/Lucene.Net/Util/RecyclingByteBlockAllocator.cs index 7143119206..737ca9b279 100644 --- a/src/Lucene.Net/Util/RecyclingByteBlockAllocator.cs +++ b/src/Lucene.Net/Util/RecyclingByteBlockAllocator.cs @@ -109,7 +109,7 @@ public override void RecycleByteBlocks(byte[][] blocks, int start, int end) blocks[i] = null; } bytesUsed.AddAndGet(-(end - stop) * m_blockSize); - if (Debugging.AssertsEnabled) Debugging.Assert(() => bytesUsed.Get() >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(bytesUsed.Get() >= 0); } /// The number of currently buffered blocks. @@ -129,7 +129,7 @@ public override void RecycleByteBlocks(byte[][] blocks, int start, int end) /// The number of actually removed buffers. public int FreeBlocks(int num) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => num >= 0, () => "free blocks must be >= 0 but was: " + num); + if (Debugging.AssertsEnabled) Debugging.Assert(num >= 0, () => "free blocks must be >= 0 but was: " + num); int stop; int count; if (num > freeBlocks) @@ -147,7 +147,7 @@ public int FreeBlocks(int num) freeByteBlocks[--freeBlocks] = null; } bytesUsed.AddAndGet(-count * m_blockSize); - if (Debugging.AssertsEnabled) Debugging.Assert(() => bytesUsed.Get() >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(bytesUsed.Get() >= 0); return count; } } diff --git a/src/Lucene.Net/Util/RecyclingIntBlockAllocator.cs b/src/Lucene.Net/Util/RecyclingIntBlockAllocator.cs index 332c4b94a2..a8d518e721 100644 --- a/src/Lucene.Net/Util/RecyclingIntBlockAllocator.cs +++ b/src/Lucene.Net/Util/RecyclingIntBlockAllocator.cs @@ -120,7 +120,7 @@ public override void RecycleInt32Blocks(int[][] blocks, int start, int end) blocks[i] = null; } bytesUsed.AddAndGet(-(end - stop) * (m_blockSize * RamUsageEstimator.NUM_BYTES_INT32)); - if (Debugging.AssertsEnabled) Debugging.Assert(() => bytesUsed.Get() >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(bytesUsed.Get() >= 0); } /// The number of currently buffered blocks. @@ -140,7 +140,7 @@ public override void RecycleInt32Blocks(int[][] blocks, int start, int end) /// The number of actually removed buffers. public int FreeBlocks(int num) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => num >= 0, () => "free blocks must be >= 0 but was: " + num); + if (Debugging.AssertsEnabled) Debugging.Assert(num >= 0, () => "free blocks must be >= 0 but was: " + num); int stop; int count; if (num > freeBlocks) @@ -158,7 +158,7 @@ public int FreeBlocks(int num) freeByteBlocks[--freeBlocks] = null; } bytesUsed.AddAndGet(-count * m_blockSize * RamUsageEstimator.NUM_BYTES_INT32); - if (Debugging.AssertsEnabled) Debugging.Assert(() => bytesUsed.Get() >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(bytesUsed.Get() >= 0); return count; } } diff --git a/src/Lucene.Net/Util/RollingBuffer.cs b/src/Lucene.Net/Util/RollingBuffer.cs index 7727d1e40c..eb753ac3b2 100644 --- a/src/Lucene.Net/Util/RollingBuffer.cs +++ b/src/Lucene.Net/Util/RollingBuffer.cs @@ -138,7 +138,7 @@ public virtual T Get(int pos) nextPos++; count++; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => InBounds(pos)); + if (Debugging.AssertsEnabled) Debugging.Assert(InBounds(pos)); int index = GetIndex(pos); //System.out.println(" pos=" + pos + " nextPos=" + nextPos + " -> index=" + index); //assert buffer[index].pos == pos; @@ -156,8 +156,8 @@ public virtual void FreeBefore(int pos) int toFree = count - (nextPos - pos); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => toFree >= 0); - Debugging.Assert(() => toFree <= count, () => "toFree=" + toFree + " count=" + count); + Debugging.Assert(toFree >= 0); + Debugging.Assert(toFree <= count, () => "toFree=" + toFree + " count=" + count); } int index = nextWrite - count; if (index < 0) diff --git a/src/Lucene.Net/Util/SentinelIntSet.cs b/src/Lucene.Net/Util/SentinelIntSet.cs index ea7f13790a..44bca36259 100644 --- a/src/Lucene.Net/Util/SentinelIntSet.cs +++ b/src/Lucene.Net/Util/SentinelIntSet.cs @@ -114,7 +114,7 @@ public virtual int Hash(int key) /// (internal) Returns the slot for this key. public virtual int GetSlot(int key) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => key != EmptyVal); + if (Debugging.AssertsEnabled) Debugging.Assert(key != EmptyVal); int h = Hash(key); int s = h & (keys.Length - 1); if (keys[s] == key || keys[s] == EmptyVal) @@ -134,7 +134,7 @@ public virtual int GetSlot(int key) /// (internal) Returns the slot for this key, or -slot-1 if not found. public virtual int Find(int key) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => key != EmptyVal); + if (Debugging.AssertsEnabled) Debugging.Assert(key != EmptyVal); int h = Hash(key); int s = h & (keys.Length - 1); if (keys[s] == key) diff --git a/src/Lucene.Net/Util/Sorter.cs b/src/Lucene.Net/Util/Sorter.cs index c1b4aa92d5..9511d4fcbc 100644 --- a/src/Lucene.Net/Util/Sorter.cs +++ b/src/Lucene.Net/Util/Sorter.cs @@ -185,7 +185,7 @@ internal void Reverse(int from, int to) internal void Rotate(int lo, int mid, int hi) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => lo <= mid && mid <= hi); + if (Debugging.AssertsEnabled) Debugging.Assert(lo <= mid && mid <= hi); if (lo == mid || mid == hi) { return; diff --git a/src/Lucene.Net/Util/TimSorter.cs b/src/Lucene.Net/Util/TimSorter.cs index d0cd914c47..f615bea059 100644 --- a/src/Lucene.Net/Util/TimSorter.cs +++ b/src/Lucene.Net/Util/TimSorter.cs @@ -67,7 +67,7 @@ protected TimSorter(int maxTempSlots) /// Minimum run length for an array of length . internal static int MinRun(int length) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => length >= MINRUN); + if (Debugging.AssertsEnabled) Debugging.Assert(length >= MINRUN); int n = length; int r = 0; while (n >= 64) @@ -76,7 +76,7 @@ internal static int MinRun(int length) n = (int)((uint)n >> 1); } int minRun = n + r; - if (Debugging.AssertsEnabled) Debugging.Assert(() => minRun >= MINRUN && minRun <= THRESHOLD); + if (Debugging.AssertsEnabled) Debugging.Assert(minRun >= MINRUN && minRun <= THRESHOLD); return minRun; } @@ -114,7 +114,7 @@ internal virtual void PushRunLen(int len) internal virtual int NextRun() { int runBase = RunEnd(0); - if (Debugging.AssertsEnabled) Debugging.Assert(() => runBase < to); + if (Debugging.AssertsEnabled) Debugging.Assert(runBase < to); if (runBase == to - 1) { return 1; @@ -198,7 +198,7 @@ internal virtual void Reset(int from, int to) internal virtual void MergeAt(int n) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => stackSize >= 2); + if (Debugging.AssertsEnabled) Debugging.Assert(stackSize >= 2); Merge(RunBase(n + 1), RunBase(n), RunEnd(n)); for (int j = n + 1; j > 0; --j) { @@ -249,7 +249,7 @@ public override void Sort(int from, int to) PushRunLen(NextRun()); } while (RunEnd(0) < to); ExhaustStack(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => RunEnd(0) == to); + if (Debugging.AssertsEnabled) Debugging.Assert(RunEnd(0) == to); } internal override void DoRotate(int lo, int mid, int hi) @@ -297,7 +297,7 @@ internal override void DoRotate(int lo, int mid, int hi) internal virtual void MergeLo(int lo, int mid, int hi) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => Compare(lo, mid) > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(Compare(lo, mid) > 0); int len1 = mid - lo; Save(lo, len1); Copy(mid, lo); @@ -335,12 +335,12 @@ internal virtual void MergeLo(int lo, int mid, int hi) { Restore(i++, dest); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => j == dest); + if (Debugging.AssertsEnabled) Debugging.Assert(j == dest); } internal virtual void MergeHi(int lo, int mid, int hi) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => Compare(mid - 1, hi - 1) > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(Compare(mid - 1, hi - 1) > 0); int len2 = hi - mid; Save(mid, len2); Copy(mid - 1, hi - 1); @@ -378,7 +378,7 @@ internal virtual void MergeHi(int lo, int mid, int hi) { Restore(j--, dest); } - if (Debugging.AssertsEnabled) Debugging.Assert(() => i == dest); + if (Debugging.AssertsEnabled) Debugging.Assert(i == dest); } internal virtual int LowerSaved(int from, int to, int val) diff --git a/src/Lucene.Net/Util/UnicodeUtil.cs b/src/Lucene.Net/Util/UnicodeUtil.cs index 721a695cf8..04908c2cfd 100644 --- a/src/Lucene.Net/Util/UnicodeUtil.cs +++ b/src/Lucene.Net/Util/UnicodeUtil.cs @@ -830,7 +830,7 @@ public static void UTF8toUTF16(byte[] utf8, int offset, int length, CharsRef cha int b = utf8[offset++] & 0xff; if (b < 0xc0) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => b < 0x80); + if (Debugging.AssertsEnabled) Debugging.Assert(b < 0x80); @out[out_offset++] = (char)b; } else if (b < 0xe0) @@ -844,7 +844,7 @@ public static void UTF8toUTF16(byte[] utf8, int offset, int length, CharsRef cha } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => b < 0xf8, () => "b = 0x" + b.ToString("x")); + if (Debugging.AssertsEnabled) Debugging.Assert(b < 0xf8, () => "b = 0x" + b.ToString("x")); int ch = ((b & 0x7) << 18) + ((utf8[offset] & 0x3f) << 12) + ((utf8[offset + 1] & 0x3f) << 6) + (utf8[offset + 2] & 0x3f); offset += 3; if (ch < UNI_MAX_BMP) diff --git a/src/Lucene.Net/Util/WAH8DocIdSet.cs b/src/Lucene.Net/Util/WAH8DocIdSet.cs index 05b88629e3..fdecd07b8c 100644 --- a/src/Lucene.Net/Util/WAH8DocIdSet.cs +++ b/src/Lucene.Net/Util/WAH8DocIdSet.cs @@ -154,7 +154,7 @@ public static WAH8DocIdSet Intersect(ICollection docIdSets, int in wordNum = iterators[i].wordNum; goto mainContinue; } - if (Debugging.AssertsEnabled) Debugging.Assert(() => iterators[i].wordNum == wordNum); + if (Debugging.AssertsEnabled) Debugging.Assert(iterators[i].wordNum == wordNum); word &= iterators[i].word; if (word == 0) { @@ -164,7 +164,7 @@ public static WAH8DocIdSet Intersect(ICollection docIdSets, int in } } // Found a common word - if (Debugging.AssertsEnabled) Debugging.Assert(() => word != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(word != 0); builder.AddWord(wordNum, word); ++wordNum; mainContinue:; @@ -250,7 +250,7 @@ protected internal override bool LessThan(Iterator a, Iterator b) internal static int WordNum(int docID) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => docID >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(docID >= 0); return (int)((uint)docID >> 3); } @@ -302,8 +302,8 @@ internal virtual void WriteHeader(bool reverse, int cleanLength, int dirtyLength int cleanLengthMinus2 = cleanLength - 2; if (Debugging.AssertsEnabled) { - Debugging.Assert(() => cleanLengthMinus2 >= 0); - Debugging.Assert(() => dirtyLength >= 0); + Debugging.Assert(cleanLengthMinus2 >= 0); + Debugging.Assert(dirtyLength >= 0); } int token = ((cleanLengthMinus2 & 0x03) << 4) | (dirtyLength & 0x07); if (reverse) @@ -333,15 +333,15 @@ private bool SequenceIsConsistent() // Called only from assert { for (int i = 1; i < dirtyWords.Length; ++i) { - Debugging.Assert(() => dirtyWords.Bytes[i - 1] != 0 || dirtyWords.Bytes[i] != 0); - Debugging.Assert(() => (byte)dirtyWords.Bytes[i - 1] != 0xFF || (byte)dirtyWords.Bytes[i] != 0xFF); + Debugging.Assert(dirtyWords.Bytes[i - 1] != 0 || dirtyWords.Bytes[i] != 0); + Debugging.Assert((byte)dirtyWords.Bytes[i - 1] != 0xFF || (byte)dirtyWords.Bytes[i] != 0xFF); } return true; } internal virtual void WriteSequence() { - if (Debugging.AssertsEnabled) Debugging.Assert(SequenceIsConsistent); + if (Debugging.AssertsEnabled) Debugging.Assert(SequenceIsConsistent()); try { WriteHeader(reverse, clean, dirtyWords.Length); @@ -359,8 +359,8 @@ internal virtual void AddWord(int wordNum, byte word) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => wordNum > lastWordNum); - Debugging.Assert(() => word != 0); + Debugging.Assert(wordNum > lastWordNum); + Debugging.Assert(word != 0); } if (!reverse) @@ -403,7 +403,7 @@ internal virtual void AddWord(int wordNum, byte word) } else { - if (Debugging.AssertsEnabled) Debugging.Assert(() => lastWordNum >= 0); + if (Debugging.AssertsEnabled) Debugging.Assert(lastWordNum >= 0); switch (wordNum - lastWordNum) { case 1: @@ -453,7 +453,7 @@ public virtual WAH8DocIdSet Build() { if (cardinality == 0) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => lastWordNum == -1); + if (Debugging.AssertsEnabled) Debugging.Assert(lastWordNum == -1); return EMPTY; } WriteSequence(); @@ -478,8 +478,8 @@ public virtual WAH8DocIdSet Build() Iterator it = new Iterator(data, cardinality, int.MaxValue, SINGLE_ZERO_BUFFER, SINGLE_ZERO_BUFFER); if (Debugging.AssertsEnabled) { - Debugging.Assert(() => it.@in.Position == 0); - Debugging.Assert(() => it.wordNum == -1); + Debugging.Assert(it.@in.Position == 0); + Debugging.Assert(it.wordNum == -1); } for (int i = 1; i < valueCount; ++i) { @@ -487,7 +487,7 @@ public virtual WAH8DocIdSet Build() for (int j = 0; j < indexInterval; ++j) { bool readSequence = it.ReadSequence(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => readSequence); + if (Debugging.AssertsEnabled) Debugging.Assert(readSequence); it.SkipDirtyBytes(); } int position = it.@in.Position; @@ -687,7 +687,7 @@ internal virtual bool ReadSequence() allOnesLength = ReadCleanLength(@in, token); } dirtyLength = ReadDirtyLength(@in, token); - if (Debugging.AssertsEnabled) Debugging.Assert(() => @in.Length - @in.Position >= dirtyLength, () => @in.Position + " " + @in.Length + " " + dirtyLength); + if (Debugging.AssertsEnabled) Debugging.Assert(@in.Length - @in.Position >= dirtyLength, () => @in.Position + " " + @in.Length + " " + dirtyLength); ++sequenceNum; return true; } @@ -696,8 +696,8 @@ internal virtual void SkipDirtyBytes(int count) { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => count >= 0); - Debugging.Assert(() => count <= allOnesLength + dirtyLength); + Debugging.Assert(count >= 0); + Debugging.Assert(count <= allOnesLength + dirtyLength); } wordNum += count; if (count <= allOnesLength) @@ -744,7 +744,7 @@ internal virtual void NextWord() word = @in.ReadByte(); ++wordNum; --dirtyLength; - if (Debugging.AssertsEnabled) Debugging.Assert(() => word != 0); // never more than one consecutive 0 + if (Debugging.AssertsEnabled) Debugging.Assert(word != 0); // never more than one consecutive 0 return; } } @@ -761,8 +761,8 @@ internal virtual int ForwardBinarySearch(int targetWordNum) int lo = sequenceNum / indexInterval, hi = lo + 1; if (Debugging.AssertsEnabled) { - Debugging.Assert(() => sequenceNum == -1 || wordNums.Get(lo) <= wordNum); - Debugging.Assert(() => lo + 1 == wordNums.Count || wordNums.Get(lo + 1) > wordNum); + Debugging.Assert(sequenceNum == -1 || wordNums.Get(lo) <= wordNum); + Debugging.Assert(lo + 1 == wordNums.Count || wordNums.Get(lo + 1) > wordNum); } while (true) { @@ -796,15 +796,15 @@ internal virtual int ForwardBinarySearch(int targetWordNum) } if (Debugging.AssertsEnabled) { - Debugging.Assert(() => wordNums.Get(hi) <= targetWordNum); - Debugging.Assert(() => hi + 1 == wordNums.Count || wordNums.Get(hi + 1) > targetWordNum); + Debugging.Assert(wordNums.Get(hi) <= targetWordNum); + Debugging.Assert(hi + 1 == wordNums.Count || wordNums.Get(hi + 1) > targetWordNum); } return hi; } internal virtual void AdvanceWord(int targetWordNum) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => targetWordNum > wordNum); + if (Debugging.AssertsEnabled) Debugging.Assert(targetWordNum > wordNum); int delta = targetWordNum - wordNum; if (delta <= allOnesLength + dirtyLength + 1) { @@ -813,7 +813,7 @@ internal virtual void AdvanceWord(int targetWordNum) else { SkipDirtyBytes(); - if (Debugging.AssertsEnabled) Debugging.Assert(() => dirtyLength == 0); + if (Debugging.AssertsEnabled) Debugging.Assert(dirtyLength == 0); if (delta > indexThreshold) { // use the index @@ -865,7 +865,7 @@ public override int NextDoc() return docID = NO_MORE_DOCS; } bitList = BitUtil.BitList(word); - if (Debugging.AssertsEnabled) Debugging.Assert(() => bitList != 0); + if (Debugging.AssertsEnabled) Debugging.Assert(bitList != 0); docID = (wordNum << 3) | ((bitList & 0x0F) - 1); bitList = (int)((uint)bitList >> 4); return docID; @@ -873,7 +873,7 @@ public override int NextDoc() public override int Advance(int target) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => target > docID); + if (Debugging.AssertsEnabled) Debugging.Assert(target > docID); int targetWordNum = WordNum(target); if (targetWordNum > this.wordNum) { diff --git a/src/dotnet/Lucene.Net.Tests.ICU/Search/PostingsHighlight/TestICUPostingsHighlighter.cs b/src/dotnet/Lucene.Net.Tests.ICU/Search/PostingsHighlight/TestICUPostingsHighlighter.cs index 232e9a0753..1c15d4d70e 100644 --- a/src/dotnet/Lucene.Net.Tests.ICU/Search/PostingsHighlight/TestICUPostingsHighlighter.cs +++ b/src/dotnet/Lucene.Net.Tests.ICU/Search/PostingsHighlight/TestICUPostingsHighlighter.cs @@ -776,8 +776,8 @@ protected override IList LoadFieldValues(IndexSearcher searcher, strin { if (Debugging.AssertsEnabled) { - Debugging.Assert(() => fields.Length == 1); - Debugging.Assert(() => docids.Length == 1); + Debugging.Assert(fields.Length == 1); + Debugging.Assert(docids.Length == 1); } String[][] contents = RectangularArrays.ReturnRectangularArray(1, 1); //= new String[1][1]; contents[0][0] = text; @@ -1181,7 +1181,7 @@ internal class GetMultiValuedSeparatorPostingsHighlighter : ICUPostingsHighlight { protected override char GetMultiValuedSeparator(string field) { - if (Debugging.AssertsEnabled) Debugging.Assert(() => field.Equals("body", StringComparison.Ordinal)); + if (Debugging.AssertsEnabled) Debugging.Assert(field.Equals("body", StringComparison.Ordinal)); return '\u2029'; } } From 79041e2e22c761fb2fd07d5540c9cdba9e6e93bb Mon Sep 17 00:00:00 2001 From: Shad Storhaug Date: Sat, 22 Aug 2020 14:00:48 +0700 Subject: [PATCH 10/13] Lucene.Net.TestFramework/Support/JavaCompatibility/LuceneTestCase: Fixed issue with reflection not finding the method due to the Func parameter of Debugging.Assert() --- .../Support/JavaCompatibility/LuceneTestCase.cs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/Lucene.Net.TestFramework/Support/JavaCompatibility/LuceneTestCase.cs b/src/Lucene.Net.TestFramework/Support/JavaCompatibility/LuceneTestCase.cs index d2c7955eee..98f0f95b39 100644 --- a/src/Lucene.Net.TestFramework/Support/JavaCompatibility/LuceneTestCase.cs +++ b/src/Lucene.Net.TestFramework/Support/JavaCompatibility/LuceneTestCase.cs @@ -2,6 +2,7 @@ using Lucene.Net.Support; using System; using System.Collections.Generic; +using System.Diagnostics; using Assert = Lucene.Net.TestFramework.Assert; using JCG = J2N.Collections.Generic; @@ -259,7 +260,9 @@ internal int randomInt(int max) [ExceptionToNetNumericConvention] // LUCENENET: This is for making test porting easier, keeping as-is internal int randomIntBetween(int min, int max) { - if (Debugging.AssertsEnabled) Debugging.Assert(max >= min, () => "max must be >= min: " + min + ", " + max); + // LUCENENET specific - added guard clause instead of assert + if (max < min) + throw new ArgumentOutOfRangeException(nameof(max), $"max must be >= min: {min}, {max}"); long range = (long)max - (long)min; if (range < int.MaxValue) { From b3525cd5102d7791666977463a0b2da1a7da9e3a Mon Sep 17 00:00:00 2001 From: Shad Storhaug Date: Sat, 22 Aug 2020 15:24:56 +0700 Subject: [PATCH 11/13] Codecs: Reverted term vectors readers to use InvalidOperationException always instead of AssertionException only when assert is enabled --- .../SimpleText/SimpleTextTermVectorsReader.cs | 14 ++++++++++++-- .../Codecs/Lucene3x/Lucene3xTermVectorsReader.cs | 12 +++++++++++- .../Codecs/Lucene40/Lucene40TermVectorsReader.cs | 12 +++++++++++- 3 files changed, 34 insertions(+), 4 deletions(-) diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs index 729d5b4dd9..edbeccbb99 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs @@ -540,8 +540,18 @@ public override BytesRef GetPayload() public override int NextPosition() { - if (Debugging.AssertsEnabled) Debugging.Assert((_positions != null && _nextPos < _positions.Length) || - _startOffsets != null && _nextPos < _startOffsets.Length); + //if (Debugging.AssertsEnabled) Debugging.Assert((_positions != null && _nextPos < _positions.Length) || + // _startOffsets != null && _nextPos < _startOffsets.Length); + + // LUCENENET: The above assertion was for control flow when testing. In Java, it would throw an AssertionError, which is + // caught by the BaseTermVectorsFormatTestCase.assertEquals(RandomTokenStream tk, FieldType ft, Terms terms) method in the + // part that is checking for an error after reading to the end of the enumerator. + + // In .NET it is more natural to throw an InvalidOperationException in this case, since we would potentially get an + // IndexOutOfRangeException if we didn't, which doesn't really provide good feedback as to what the cause is. + // This matches the behavior of Lucene 8.x. See #267. + if (((_positions != null && _nextPos < _positions.Length) || _startOffsets != null && _nextPos < _startOffsets.Length) == false) + throw new InvalidOperationException("Read past last position"); if (_positions != null) { diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs index 5e23d2cbce..8f3d99bf21 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs @@ -740,7 +740,17 @@ public override BytesRef GetPayload() public override int NextPosition() { - if (Debugging.AssertsEnabled) Debugging.Assert((positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length); + //if (Debugging.AssertsEnabled) Debugging.Assert((positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length); + + // LUCENENET: The above assertion was for control flow when testing. In Java, it would throw an AssertionError, which is + // caught by the BaseTermVectorsFormatTestCase.assertEquals(RandomTokenStream tk, FieldType ft, Terms terms) method in the + // part that is checking for an error after reading to the end of the enumerator. + + // In .NET it is more natural to throw an InvalidOperationException in this case, since we would potentially get an + // IndexOutOfRangeException if we didn't, which doesn't really provide good feedback as to what the cause is. + // This matches the behavior of Lucene 8.x. See #267. + if (((positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length) == false) + throw new InvalidOperationException("Read past last position"); if (positions != null) { diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs index d58a59daa5..1b1a97013a 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs @@ -733,7 +733,17 @@ public override BytesRef GetPayload() public override int NextPosition() { - if (Debugging.AssertsEnabled) Debugging.Assert((positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length); + //if (Debugging.AssertsEnabled) Debugging.Assert((positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length); + + // LUCENENET: The above assertion was for control flow when testing. In Java, it would throw an AssertionError, which is + // caught by the BaseTermVectorsFormatTestCase.assertEquals(RandomTokenStream tk, FieldType ft, Terms terms) method in the + // part that is checking for an error after reading to the end of the enumerator. + + // In .NET it is more natural to throw an InvalidOperationException in this case, since we would potentially get an + // IndexOutOfRangeException if we didn't, which doesn't really provide good feedback as to what the cause is. + // This matches the behavior of Lucene 8.x. See #267. + if (((positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length) == false) + throw new InvalidOperationException("Read past last position"); if (positions != null) { From 95c9c93e09209bc4dfecf7fe3e4460f2da11605f Mon Sep 17 00:00:00 2001 From: Shad Storhaug Date: Sat, 22 Aug 2020 15:26:32 +0700 Subject: [PATCH 12/13] Lucene.Net.Tests: Don't run tests that require asserts unless asserts are enabled. (closes #326, see #313) --- .../Index/TestFlushByRamOrCountsPolicy.cs | 15 +++++++++++ .../Index/TestIndexWriterExceptions.cs | 26 +++++++++++-------- 2 files changed, 30 insertions(+), 11 deletions(-) diff --git a/src/Lucene.Net.Tests/Index/TestFlushByRamOrCountsPolicy.cs b/src/Lucene.Net.Tests/Index/TestFlushByRamOrCountsPolicy.cs index 4bdfe89a27..58bd570bb3 100644 --- a/src/Lucene.Net.Tests/Index/TestFlushByRamOrCountsPolicy.cs +++ b/src/Lucene.Net.Tests/Index/TestFlushByRamOrCountsPolicy.cs @@ -1,5 +1,6 @@ using J2N.Threading; using J2N.Threading.Atomic; +using Lucene.Net.Diagnostics; using Lucene.Net.Index.Extensions; using Lucene.Net.Store; using NUnit.Framework; @@ -62,6 +63,9 @@ public override void AfterClass() [Test] public virtual void TestFlushByRam() { + // LUCENENET specific - disable the test if asserts are not enabled + AssumeTrue("This test requires asserts to be enabled.", Debugging.AssertsEnabled); + double ramBuffer = (TestNightly ? 1 : 10) + AtLeast(2) + Random.NextDouble(); RunFlushByRam(1 + Random.Next(TestNightly ? 5 : 1), ramBuffer, false); } @@ -69,6 +73,9 @@ public virtual void TestFlushByRam() [Test] public virtual void TestFlushByRamLargeBuffer() { + // LUCENENET specific - disable the test if asserts are not enabled + AssumeTrue("This test requires asserts to be enabled.", Debugging.AssertsEnabled); + // with a 256 mb ram buffer we should never stall RunFlushByRam(1 + Random.Next(TestNightly ? 5 : 1), 256d, true); } @@ -132,6 +139,9 @@ protected internal virtual void RunFlushByRam(int numThreads, double maxRamMB, b [Test] public virtual void TestFlushDocCount() { + // LUCENENET specific - disable the test if asserts are not enabled + AssumeTrue("This test requires asserts to be enabled.", Debugging.AssertsEnabled); + int[] numThreads = new int[] { 2 + AtLeast(1), 1 }; for (int i = 0; i < numThreads.Length; i++) { @@ -184,6 +194,9 @@ public virtual void TestFlushDocCount() [Test] public virtual void TestRandom() { + // LUCENENET specific - disable the test if asserts are not enabled + AssumeTrue("This test requires asserts to be enabled.", Debugging.AssertsEnabled); + int numThreads = 1 + Random.Next(8); int numDocumentsToIndex = 50 + AtLeast(70); AtomicInt32 numDocs = new AtomicInt32(numDocumentsToIndex); @@ -247,6 +260,8 @@ public virtual void TestRandom() [Slow] // LUCENENET: occasionally public virtual void TestStallControl() { + // LUCENENET specific - disable the test if asserts are not enabled + AssumeTrue("This test requires asserts to be enabled.", Debugging.AssertsEnabled); int[] numThreads = new int[] { 4 + Random.Next(8), 1 }; int numDocumentsToIndex = 50 + Random.Next(50); diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs index e81a33d5ab..58ce2a233f 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs @@ -2,6 +2,7 @@ using J2N.Threading.Atomic; using Lucene.Net.Analysis; using Lucene.Net.Attributes; +using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using Lucene.Net.Index.Extensions; using Lucene.Net.Store; @@ -497,6 +498,9 @@ public override void Reset() [Test] public virtual void TestExceptionDocumentsWriterInit() { + // LUCENENET specific - disable the test if asserts are not enabled + AssumeTrue("This test requires asserts to be enabled.", Debugging.AssertsEnabled); + Directory dir = NewDirectory(); TestPoint2 testPoint = new TestPoint2(); IndexWriter w = RandomIndexWriter.MockIndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)), testPoint); @@ -576,6 +580,9 @@ public void Apply(string name) [Test] public virtual void TestExceptionOnMergeInit([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func newScheduler) { + // LUCENENET specific - disable the test if asserts are not enabled + AssumeTrue("This test requires asserts to be enabled.", Debugging.AssertsEnabled); + Directory dir = NewDirectory(); IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy()); @@ -1296,23 +1303,17 @@ public void Apply(string name) [Test] public virtual void TestRollbackExceptionHang() { + // LUCENENET specific - disable the test if asserts are not enabled + AssumeTrue("This test requires asserts to be enabled.", Debugging.AssertsEnabled); + Directory dir = NewDirectory(); TestPoint4 testPoint = new TestPoint4(); IndexWriter w = RandomIndexWriter.MockIndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)), testPoint); AddDoc(w); testPoint.doFail = true; - try - { - w.Rollback(); - Assert.Fail("did not hit intentional RuntimeException"); - } -#pragma warning disable 168 - catch (Exception re) -#pragma warning restore 168 - { - // expected - } + // LUCENENET: Don't assert in try block + Assert.Throws(() => w.Rollback(), "did not hit intentional RuntimeException"); testPoint.doFail = false; w.Rollback(); @@ -2435,6 +2436,9 @@ protected override void HandleMergeException(Exception exc) [Test] public virtual void TestExceptionDuringRollback() { + // LUCENENET specific - disable the test if asserts are not enabled + AssumeTrue("This test requires asserts to be enabled.", Debugging.AssertsEnabled); + // currently: fail in two different places string messageToFailOn = Random.NextBoolean() ? "rollback: done finish merges" : "rollback before checkpoint"; From 17e98afd2d688f6475b106d55176f697c1338319 Mon Sep 17 00:00:00 2001 From: Shad Storhaug Date: Tue, 25 Aug 2020 00:53:21 +0700 Subject: [PATCH 13/13] azure-pipelines.yml: Added option to disable asserts when running tests --- azure-pipelines.yml | 29 +++++++++++++++++------------ 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 2632b46095..2c00b4965a 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -36,6 +36,7 @@ name: 'vNext$(rev:.r)' # Format for build number (will be overridden) # Testing variables # RunTests: 'true' (Optional - set to 'false' to disable test jobs - useful for debugging. If not provided, tests will be run.) +# AssertsEnabled: 'true' (Optional - set to 'false' to run tests without asserts, which is less thorough. This can speed up testing and verify the application will run without asserts.) # IsNightly: 'false' (Optional - set to 'true' to run additional tests for the nightly build) # IsWeekly: 'false' (Optional - set to 'true' to run additional tests for the weekly build) # RunSlowTests: 'true' (Optional - set to 'false' to skip slow tests to make testing time shorter) @@ -143,6 +144,7 @@ stages: - pwsh: | # Generate a lucene.testsettings.json file for use with the test framework + $assert = if ($Env:AssertsEnabled -ne 'false') { 'true' } else { 'false' } $nightly = if ($Env:IsNightly -eq 'true') { 'true' } else { 'false' } $weekly = if ($Env:IsWeekly -eq 'true') { 'true' } else { 'false' } $slow = if ($Env:RunSlowTests -ne 'false') { 'true' } else { 'false' } @@ -153,18 +155,21 @@ stages: $directory = if ($Env:Directory -eq $null) { 'random' } else { $Env:Directory } $verbose = if ($Env:Verbose -eq 'true') { 'true' } else { 'false' } $multiplier = if ($Env:Multiplier -eq $null) { '1' } else { $Env:Multiplier } - $fileText = "{`n`t""tests"":`n`t{`n`t`t" + - """nightly"": ""$nightly"",`n`t`t" + - """weekly"": ""$weekly"",`n`t`t" + - """slow"": ""$slow"",`n`t`t" + - """awaitsfix"": ""$awaitsFix"",`n`t`t" + - """codec"": ""$codec"",`n`t`t" + - """docvaluesformat"": ""$docValuesFormat"",`n`t`t" + - """postingsformat"": ""$postingsFormat"",`n`t`t" + - """directory"": ""$directory"",`n`t`t" + - """verbose"": ""$verbose"",`n`t`t" + - """multiplier"": ""$multiplier""`n`t" + - "}`n}" + $fileText = "{`n`t" + + """assert"": ""$assert"",`n`t" + + """tests"": {`n`t`t" + + """nightly"": ""$nightly"",`n`t`t" + + """weekly"": ""$weekly"",`n`t`t" + + """slow"": ""$slow"",`n`t`t" + + """awaitsfix"": ""$awaitsFix"",`n`t`t" + + """codec"": ""$codec"",`n`t`t" + + """docvaluesformat"": ""$docValuesFormat"",`n`t`t" + + """postingsformat"": ""$postingsFormat"",`n`t`t" + + """directory"": ""$directory"",`n`t`t" + + """verbose"": ""$verbose"",`n`t`t" + + """multiplier"": ""$multiplier""`n`t" + + "}`n" + + "}" Out-File -filePath "$(Build.ArtifactStagingDirectory)/$(TestSettingsFileName)" -encoding UTF8 -inputObject $fileText displayName: 'Persist Test Settings to lucene.testsettings.json' condition: and(succeeded(), ne(variables['RunTests'], 'false'))