diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000000..bf63574970 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,175 @@ +# You can modify the rules from these initially generated values to suit your own policies +# You can learn more about editorconfig here: https://docs.microsoft.com/en-us/visualstudio/ide/editorconfig-code-style-settings-reference + +[*] +charset = utf-8 +trim_trailing_whitespace = true + +[*.md] +indent_style = space +indent_size = 4 + +# C# files +[*.cs] + +#### Core EditorConfig Options #### + +#Formatting - indentation + +#use soft tabs (spaces) for indentation +indent_style = space + +#Formatting - indentation options + +#indent switch case contents. +csharp_indent_case_contents = true +#indent switch labels +csharp_indent_switch_labels = true + +#Formatting - new line options + +#place catch statements on a new line +csharp_new_line_before_catch = true +#place else statements on a new line +csharp_new_line_before_else = true +#require finally statements to be on a new line after the closing brace +csharp_new_line_before_finally = true +#require members of object initializers to be on the same line +csharp_new_line_before_members_in_object_initializers = false +#require braces to be on a new line for control_blocks, types, properties, and methods (also known as "Allman" style) +csharp_new_line_before_open_brace = control_blocks, types, properties, methods + +#Formatting - organize using options + +#do not place System.* using directives before other using directives +dotnet_sort_system_directives_first = false + +#Formatting - spacing options + +#require NO space between a cast and the value +csharp_space_after_cast = false +#require a space before the colon for bases or interfaces in a type declaration +csharp_space_after_colon_in_inheritance_clause = true +#require a space after a keyword in a control flow statement such as a for loop +csharp_space_after_keywords_in_control_flow_statements = true +#require a space before the colon for bases or interfaces in a type declaration +csharp_space_before_colon_in_inheritance_clause = true +#remove space within empty argument list parentheses +csharp_space_between_method_call_empty_parameter_list_parentheses = false +#remove space between method call name and opening parenthesis +csharp_space_between_method_call_name_and_opening_parenthesis = false +#do not place space characters after the opening parenthesis and before the closing parenthesis of a method call +csharp_space_between_method_call_parameter_list_parentheses = false +#remove space within empty parameter list parentheses for a method declaration +csharp_space_between_method_declaration_empty_parameter_list_parentheses = false +#place a space character after the opening parenthesis and before the closing parenthesis of a method declaration parameter list. +csharp_space_between_method_declaration_parameter_list_parentheses = false + +#Formatting - wrapping options + +#leave code block on separate lines +csharp_preserve_single_line_blocks = false +#leave statements and member declarations on the same line +csharp_preserve_single_line_statements = true + +#Style - Code block preferences + +#prefer curly braces even for one line of code +csharp_prefer_braces = when_multiline:silent + +#Style - expression bodied member options + +#prefer expression-bodied members for accessors +csharp_style_expression_bodied_accessors = when_on_single_line:suggestion +#prefer block bodies for constructors +csharp_style_expression_bodied_constructors = false:suggestion +#prefer expression-bodied members for indexers +csharp_style_expression_bodied_indexers = true:suggestion +#prefer block bodies for methods +csharp_style_expression_bodied_methods = when_on_single_line:silent +#prefer expression-bodied members for properties +csharp_style_expression_bodied_properties = when_on_single_line:suggestion + +#Style - expression level options + +#prefer out variables to be declared inline in the argument list of a method call when possible +csharp_style_inlined_variable_declaration = true:suggestion +#prefer the language keyword for member access expressions, instead of the type name, for types that have a keyword to represent them +dotnet_style_predefined_type_for_member_access = true:suggestion + +#Style - Expression-level preferences + +#prefer objects to not be initialized using object initializers, but do not warn +dotnet_style_object_initializer = true:silent +#prefer objects to use auto properties, but turn off the warnings (we want to keep backing fields from Java for the most part) +dotnet_style_prefer_auto_properties = true:silent + +#Style - implicit and explicit types + +#prefer explicit type over var in all cases, unless overridden by another code style rule +csharp_style_var_elsewhere = false:silent +#prefer explicit type over var to declare variables with built-in system types such as int +csharp_style_var_for_built_in_types = false:silent +#prefer explicit type over var when the type is already mentioned on the right-hand side of a declaration +csharp_style_var_when_type_is_apparent = false:silent + +#Style - language keyword and framework type options + +#prefer the language keyword for local variables, method parameters, and class members, instead of the type name, for types that have a keyword to represent them +dotnet_style_predefined_type_for_locals_parameters_members = true:suggestion + +#Style - modifier options + +#prefer accessibility modifiers to be declared except for public interface members. This will currently not differ from always and will act as future proofing for if C# adds default interface methods. +dotnet_style_require_accessibility_modifiers = for_non_interface_members:suggestion + +#Style - Modifier preferences + +#when this rule is set to a list of modifiers, prefer the specified ordering. +csharp_preferred_modifier_order = public,private,protected,internal,virtual,readonly,override,static,abstract,new,sealed,volatile:silent + +#Style - Pattern matching + +#prefer pattern matching instead of is expression with type casts +csharp_style_pattern_matching_over_as_with_null_check = true:suggestion + +#Style -Pattern matcing preferences + +#prefer expression-style for switch case +csharp_style_prefer_switch_expression = false:suggestion + +#Style - qualification options + +#prefer fields not to be prefaced with this. or Me. in Visual Basic +dotnet_style_qualification_for_field = false:none +#prefer methods not to be prefaced with this. or Me. in Visual Basic +dotnet_style_qualification_for_method = false:none +#prefer properties not to be prefaced with this. or Me. in Visual Basic +dotnet_style_qualification_for_property = false:none + +#Style - assignment options +#prefer compound asignment x += 1 rather than x = x + 1. +dotnet_style_prefer_compound_assignment = true:silent + +#### General Code Quality Preferences #### + +# Warn about any performance category issues across the entire API +dotnet_code_quality.Performance.api_surface = all:warning + +# CA1031: Do not catch general exception types +dotnet_diagnostic.CA1031.severity = none + +# CA1034: Do not nest types +dotnet_diagnostic.CA1034.severity = none + + +# Features that require .NET Standard 2.1+ + +# IDE0056: Use index operator +dotnet_diagnostic.IDE0056.severity = none + +# IDE0057: Use range operator +dotnet_diagnostic.IDE0057.severity = none + +# IDE0070: Use 'System.HashCode.Combine' +dotnet_diagnostic.IDE0070.severity = none \ No newline at end of file diff --git a/Directory.Build.props b/Directory.Build.props index 48041625e0..8cd9ea7ce9 100644 --- a/Directory.Build.props +++ b/Directory.Build.props @@ -1,4 +1,4 @@ - - $(NoWarn);IDE0056 - $(NoWarn);IDE0057 diff --git a/Lucene.Net.sln b/Lucene.Net.sln index 24547c23da..591bda9c85 100644 --- a/Lucene.Net.sln +++ b/Lucene.Net.sln @@ -63,6 +63,7 @@ EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{4DF0A2A1-B9C7-4EE5-BAF0-BEEF53E34220}" ProjectSection(SolutionItems) = preProject .asf.yaml = .asf.yaml + .editorconfig = .editorconfig CHANGES.txt = CHANGES.txt CONTRIBUTING.md = CONTRIBUTING.md Directory.Build.props = Directory.Build.props diff --git a/TestTargetFramework.props b/TestTargetFramework.props index 57b4430f5c..7347dd054b 100644 --- a/TestTargetFramework.props +++ b/TestTargetFramework.props @@ -37,5 +37,26 @@ $(TargetFrameworks);net48 + + + $(NoWarn);CA1034 + $(NoWarn);CA1802 + $(NoWarn);CA1822 + $(NoWarn);CA1825 + $(NoWarn);CA2219 + + $(NoWarn);IDE0017 + $(NoWarn);IDE0019;IDE0020;IDE0038 + $(NoWarn);IDE0028 + $(NoWarn);IDE0031 + $(NoWarn);IDE0040 + $(NoWarn);IDE0044 + $(NoWarn);IDE0049 + $(NoWarn);IDE0051 + $(NoWarn);IDE0052 + $(NoWarn);IDE0059 + $(NoWarn);IDE0060 + $(NoWarn);IDE1006 + diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicLetterTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicLetterTokenizerFactory.cs index eceb7d1354..cd5c83505f 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicLetterTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicLetterTokenizerFactory.cs @@ -39,7 +39,7 @@ public ArabicLetterTokenizerFactory(IDictionary args) AssureMatchVersion(); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicNormalizationFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicNormalizationFilterFactory.cs index f7ff24d646..ec2f331658 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicNormalizationFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicNormalizationFilterFactory.cs @@ -42,7 +42,7 @@ public ArabicNormalizationFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicStemFilterFactory.cs index 418110336c..28013b8c52 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Ar/ArabicStemFilterFactory.cs @@ -43,7 +43,7 @@ public ArabicStemFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Bg/BulgarianStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Bg/BulgarianStemFilterFactory.cs index 78916e8c99..c2bc0982b9 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Bg/BulgarianStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Bg/BulgarianStemFilterFactory.cs @@ -43,7 +43,7 @@ public BulgarianStemFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Br/BrazilianStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Br/BrazilianStemFilterFactory.cs index 6b02802704..2306f836ee 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Br/BrazilianStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Br/BrazilianStemFilterFactory.cs @@ -42,7 +42,7 @@ public BrazilianStemFilterFactory(IDictionary args) : base(args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs index 7e4af70c06..98f7eb786f 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilter.cs @@ -30648,24 +30648,22 @@ private static int ZzUnpackAttribute(string packed, int offset, int[] result) /// private int zzEndRead; - /// number of newlines encountered up to the start of the matched text - private int yyline; + ///// number of newlines encountered up to the start of the matched text + //private int yyline; // LUCENENET: Never read /// the number of characters up to the start of the matched text private int yychar; -#pragma warning disable 169, 414 - /// - /// the number of characters from the last newline up to the start of the matched text - /// - private int yycolumn; + ///// + ///// the number of characters from the last newline up to the start of the matched text + ///// + //private int yycolumn; // LUCENENET: Never read - /// - /// zzAtBOL == true <=> the scanner is currently at the beginning of a line - /// - private bool zzAtBOL = true; + ///// + ///// zzAtBOL == true <=> the scanner is currently at the beginning of a line + ///// + //private bool zzAtBOL = true; // LUCENENET: Never read -#pragma warning restore 169, 414 /// zzAtEOF == true <=> the scanner is at the EOF private bool zzAtEOF; @@ -31068,12 +31066,12 @@ private void YyClose() private void YyReset(BufferedCharFilter reader) { zzReader = reader; - zzAtBOL = true; + //zzAtBOL = true; // LUCENENET: Never read zzAtEOF = false; zzEOFDone = false; zzEndRead = zzStartRead = 0; zzCurrentPos = zzMarkedPos = 0; - yyline = yychar = yycolumn = 0; + //yyline = yychar = yycolumn = 0; // LUCENENET: Never read zzLexicalState = YYINITIAL; if (zzBuffer.Length > ZZ_BUFFERSIZE) zzBuffer = new char[ZZ_BUFFERSIZE]; diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilterFactory.cs index c44ae5026c..74dc924773 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/HTMLStripCharFilterFactory.cs @@ -37,7 +37,7 @@ namespace Lucene.Net.Analysis.CharFilters public class HTMLStripCharFilterFactory : CharFilterFactory { private readonly ICollection escapedTags; - private static readonly Regex TAG_NAME_PATTERN = new Regex(@"[^\\s,]+", RegexOptions.Compiled); + //private static readonly Regex TAG_NAME_PATTERN = new Regex(@"[^\\s,]+", RegexOptions.Compiled); // LUCENENET: Never read /// /// Creates a new @@ -46,7 +46,7 @@ public HTMLStripCharFilterFactory(IDictionary args) : base(args) escapedTags = GetSet(args, "escapedTags"); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilterFactory.cs index 4d711bba43..8cec3e4dca 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/MappingCharFilterFactory.cs @@ -49,7 +49,7 @@ public MappingCharFilterFactory(IDictionary args) : base(args) mapping = Get(args, "mapping"); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } @@ -108,7 +108,7 @@ protected virtual void ParseRules(IList rules, NormalizeCharMap.Builder } } - private char[] @out = new char[256]; + private readonly char[] @out = new char[256]; // LUCENENET: marked readonly protected internal virtual string ParseString(string s) { diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKBigramFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKBigramFilterFactory.cs index ec2c82cc05..5938e41363 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKBigramFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKBigramFilterFactory.cs @@ -67,7 +67,7 @@ public CJKBigramFilterFactory(IDictionary args) this.outputUnigrams = GetBoolean(args, "outputUnigrams", false); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKTokenizerFactory.cs index b286af1e6a..2ef85249f8 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKTokenizerFactory.cs @@ -44,7 +44,7 @@ public CJKTokenizerFactory(IDictionary args) : base(args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKWidthFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKWidthFilterFactory.cs index 886102031a..ce12836523 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKWidthFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKWidthFilterFactory.cs @@ -42,7 +42,7 @@ public CJKWidthFilterFactory(IDictionary args) : base(args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ckb/SoraniNormalizationFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ckb/SoraniNormalizationFilterFactory.cs index 18b6afa041..0b86fc4dfe 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Ckb/SoraniNormalizationFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Ckb/SoraniNormalizationFilterFactory.cs @@ -41,7 +41,7 @@ public SoraniNormalizationFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ckb/SoraniStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ckb/SoraniStemFilterFactory.cs index 36fd085afa..f202647197 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Ckb/SoraniStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Ckb/SoraniStemFilterFactory.cs @@ -42,7 +42,7 @@ public SoraniStemFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Cn/ChineseFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Cn/ChineseFilterFactory.cs index 90d4a1a4fc..17eee9a751 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Cn/ChineseFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Cn/ChineseFilterFactory.cs @@ -34,7 +34,7 @@ public ChineseFilterFactory(IDictionary args) : base(args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Cn/ChineseTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Cn/ChineseTokenizerFactory.cs index 454b4283a8..f2c8f77823 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Cn/ChineseTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Cn/ChineseTokenizerFactory.cs @@ -37,7 +37,7 @@ public ChineseTokenizerFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CommonGrams/CommonGramsFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/CommonGrams/CommonGramsFilterFactory.cs index 33dcd94a51..972014215f 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/CommonGrams/CommonGramsFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/CommonGrams/CommonGramsFilterFactory.cs @@ -51,7 +51,7 @@ public CommonGramsFilterFactory(IDictionary args) ignoreCase = GetBoolean(args, "ignoreCase", false); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Compound/DictionaryCompoundWordTokenFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Compound/DictionaryCompoundWordTokenFilterFactory.cs index 1d0ca58d8c..da1150dfa7 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Compound/DictionaryCompoundWordTokenFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Compound/DictionaryCompoundWordTokenFilterFactory.cs @@ -55,7 +55,7 @@ public DictionaryCompoundWordTokenFilterFactory(IDictionary args onlyLongestMatch = GetBoolean(args, "onlyLongestMatch", true); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/HyphenationTree.cs b/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/HyphenationTree.cs index 6c3ed78367..21d966a35a 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/HyphenationTree.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/HyphenationTree.cs @@ -190,10 +190,8 @@ public virtual void LoadPatterns(Stream source, Encoding encoding) #endif }; - using (var reader = XmlReader.Create(new StreamReader(source, encoding), xmlReaderSettings)) - { - LoadPatterns(reader); - } + using var reader = XmlReader.Create(new StreamReader(source, encoding), xmlReaderSettings); + LoadPatterns(reader); } /// diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/PatternParser.cs b/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/PatternParser.cs index fac42610cb..6e73b19267 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/PatternParser.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/PatternParser.cs @@ -91,10 +91,8 @@ public virtual void Parse(string path) public virtual void Parse(string path, Encoding encoding) { var xmlReaderSettings = GetXmlReaderSettings(); - using (var src = XmlReader.Create(new StreamReader(new FileStream(path, FileMode.Open), encoding), xmlReaderSettings)) - { - Parse(src); - } + using var src = XmlReader.Create(new StreamReader(new FileStream(path, FileMode.Open), encoding), xmlReaderSettings); + Parse(src); } /// @@ -117,10 +115,8 @@ public virtual void Parse(FileInfo file, Encoding encoding) { var xmlReaderSettings = GetXmlReaderSettings(); - using (var src = XmlReader.Create(new StreamReader(file.OpenRead(), encoding), xmlReaderSettings)) - { - Parse(src); - } + using var src = XmlReader.Create(new StreamReader(file.OpenRead(), encoding), xmlReaderSettings); + Parse(src); } /// @@ -138,10 +134,8 @@ public virtual void Parse(Stream xmlStream) { var xmlReaderSettings = GetXmlReaderSettings(); - using (var src = XmlReader.Create(xmlStream, xmlReaderSettings)) - { - Parse(src); - } + using var src = XmlReader.Create(xmlStream, xmlReaderSettings); + Parse(src); } /// diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Compound/HyphenationCompoundWordTokenFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Compound/HyphenationCompoundWordTokenFilterFactory.cs index 74b9dd3377..116538abb4 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Compound/HyphenationCompoundWordTokenFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Compound/HyphenationCompoundWordTokenFilterFactory.cs @@ -79,7 +79,7 @@ public HyphenationCompoundWordTokenFilterFactory(IDictionary arg onlyLongestMatch = GetBoolean(args, "onlyLongestMatch", false); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Core/KeywordTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Core/KeywordTokenizerFactory.cs index 39012543ba..36cc7c893e 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Core/KeywordTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Core/KeywordTokenizerFactory.cs @@ -42,7 +42,7 @@ public KeywordTokenizerFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Core/LetterTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Core/LetterTokenizerFactory.cs index 0bdc60753f..0f384e194f 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Core/LetterTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Core/LetterTokenizerFactory.cs @@ -43,7 +43,7 @@ public LetterTokenizerFactory(IDictionary args) AssureMatchVersion(); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Core/LowerCaseFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Core/LowerCaseFilterFactory.cs index 7b8d27bd36..d51add882e 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Core/LowerCaseFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Core/LowerCaseFilterFactory.cs @@ -42,7 +42,7 @@ public LowerCaseFilterFactory(IDictionary args) AssureMatchVersion(); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Core/LowerCaseTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Core/LowerCaseTokenizerFactory.cs index 7f898075db..68b83b8eca 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Core/LowerCaseTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Core/LowerCaseTokenizerFactory.cs @@ -43,7 +43,7 @@ public LowerCaseTokenizerFactory(IDictionary args) AssureMatchVersion(); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Core/StopFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Core/StopFilterFactory.cs index afcfd2ddd9..51361bc6e0 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Core/StopFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Core/StopFilterFactory.cs @@ -89,7 +89,7 @@ public StopFilterFactory(IDictionary args) enablePositionIncrements = GetBoolean(args, "enablePositionIncrements", true); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Core/TypeTokenFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Core/TypeTokenFilterFactory.cs index 65d5b4e6c0..8abe89add8 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Core/TypeTokenFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Core/TypeTokenFilterFactory.cs @@ -51,7 +51,7 @@ public TypeTokenFilterFactory(IDictionary args) useWhitelist = GetBoolean(args, "useWhitelist", false); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Core/UpperCaseFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Core/UpperCaseFilterFactory.cs index 1b6c52c011..78ea8ddc44 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Core/UpperCaseFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Core/UpperCaseFilterFactory.cs @@ -48,7 +48,7 @@ public UpperCaseFilterFactory(IDictionary args) AssureMatchVersion(); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Core/WhitespaceTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Core/WhitespaceTokenizerFactory.cs index f7b1e36c70..8e0815a913 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Core/WhitespaceTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Core/WhitespaceTokenizerFactory.cs @@ -43,7 +43,7 @@ public WhitespaceTokenizerFactory(IDictionary args) AssureMatchVersion(); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Cz/CzechStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Cz/CzechStemFilterFactory.cs index a48129f379..30cac93d94 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Cz/CzechStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Cz/CzechStemFilterFactory.cs @@ -41,7 +41,7 @@ public CzechStemFilterFactory(IDictionary args) : base(args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/De/GermanLightStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/De/GermanLightStemFilterFactory.cs index eb07f6ba9d..1eb61627d1 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/De/GermanLightStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/De/GermanLightStemFilterFactory.cs @@ -41,7 +41,7 @@ public GermanLightStemFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/De/GermanMinimalStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/De/GermanMinimalStemFilterFactory.cs index 1a4cc5c03e..cae7f5843e 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/De/GermanMinimalStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/De/GermanMinimalStemFilterFactory.cs @@ -41,7 +41,7 @@ public GermanMinimalStemFilterFactory(IDictionary args) : base(a { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/De/GermanNormalizationFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/De/GermanNormalizationFilterFactory.cs index 2d7035ad7c..ae43bde1a2 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/De/GermanNormalizationFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/De/GermanNormalizationFilterFactory.cs @@ -40,7 +40,7 @@ public GermanNormalizationFilterFactory(IDictionary args) : base { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/De/GermanStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/De/GermanStemFilterFactory.cs index 48d9b5492e..1be9a0ff80 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/De/GermanStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/De/GermanStemFilterFactory.cs @@ -41,7 +41,7 @@ public GermanStemFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/El/GreekLowerCaseFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/El/GreekLowerCaseFilterFactory.cs index 9f57c999f9..cfe8612a45 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/El/GreekLowerCaseFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/El/GreekLowerCaseFilterFactory.cs @@ -40,7 +40,7 @@ public GreekLowerCaseFilterFactory(IDictionary args) : base(args AssureMatchVersion(); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/El/GreekStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/El/GreekStemFilterFactory.cs index 80eeefcf83..2c551c7478 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/El/GreekStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/El/GreekStemFilterFactory.cs @@ -41,7 +41,7 @@ public GreekStemFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/En/EnglishMinimalStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/En/EnglishMinimalStemFilterFactory.cs index 338ed1d7be..00a1bef631 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/En/EnglishMinimalStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/En/EnglishMinimalStemFilterFactory.cs @@ -40,7 +40,7 @@ public EnglishMinimalStemFilterFactory(IDictionary args) : base( { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/En/EnglishPossessiveFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/En/EnglishPossessiveFilterFactory.cs index 3b28bdb564..a9db7c93df 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/En/EnglishPossessiveFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/En/EnglishPossessiveFilterFactory.cs @@ -43,7 +43,7 @@ public EnglishPossessiveFilterFactory(IDictionary args) : base(a AssureMatchVersion(); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/En/KStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/En/KStemFilterFactory.cs index fd38cc5af3..1aeba71807 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/En/KStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/En/KStemFilterFactory.cs @@ -40,7 +40,7 @@ public KStemFilterFactory(IDictionary args) : base(args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/En/PorterStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/En/PorterStemFilterFactory.cs index 1f51cf80b3..a50bc2371f 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/En/PorterStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/En/PorterStemFilterFactory.cs @@ -40,7 +40,7 @@ public PorterStemFilterFactory(IDictionary args) : base(args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Es/SpanishLightStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Es/SpanishLightStemFilterFactory.cs index 036aad3d05..8f9daa3e9f 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Es/SpanishLightStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Es/SpanishLightStemFilterFactory.cs @@ -41,7 +41,7 @@ public SpanishLightStemFilterFactory(IDictionary args) : base(ar { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Fa/PersianCharFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Fa/PersianCharFilterFactory.cs index e6bf9fad40..d4d58fb098 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Fa/PersianCharFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Fa/PersianCharFilterFactory.cs @@ -41,7 +41,7 @@ public PersianCharFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Fa/PersianNormalizationFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Fa/PersianNormalizationFilterFactory.cs index d27938a441..d427ea5369 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Fa/PersianNormalizationFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Fa/PersianNormalizationFilterFactory.cs @@ -40,7 +40,7 @@ public PersianNormalizationFilterFactory(IDictionary args) : bas { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Fi/FinnishLightStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Fi/FinnishLightStemFilterFactory.cs index 5e9503e49b..5aa3237c2b 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Fi/FinnishLightStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Fi/FinnishLightStemFilterFactory.cs @@ -40,7 +40,7 @@ public FinnishLightStemFilterFactory(IDictionary args) : base(ar { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Fr/FrenchLightStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Fr/FrenchLightStemFilterFactory.cs index a1e63cb899..512771db62 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Fr/FrenchLightStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Fr/FrenchLightStemFilterFactory.cs @@ -41,7 +41,7 @@ public FrenchLightStemFilterFactory(IDictionary args) : base(arg { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Fr/FrenchMinimalStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Fr/FrenchMinimalStemFilterFactory.cs index 4c2cedeec0..36abdb8e12 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Fr/FrenchMinimalStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Fr/FrenchMinimalStemFilterFactory.cs @@ -41,7 +41,7 @@ public FrenchMinimalStemFilterFactory(IDictionary args) : base(a { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ga/IrishLowerCaseFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ga/IrishLowerCaseFilterFactory.cs index b8e189663f..a62eb1ff21 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Ga/IrishLowerCaseFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Ga/IrishLowerCaseFilterFactory.cs @@ -39,7 +39,7 @@ public IrishLowerCaseFilterFactory(IDictionary args) : base(args { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianMinimalStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianMinimalStemFilterFactory.cs index 5fe76a354e..785c3978a8 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianMinimalStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianMinimalStemFilterFactory.cs @@ -40,7 +40,7 @@ public GalicianMinimalStemFilterFactory(IDictionary args) : base { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianStemFilterFactory.cs index c89b7f0fc4..c4057f4aa8 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Gl/GalicianStemFilterFactory.cs @@ -41,7 +41,7 @@ public GalicianStemFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } public override TokenStream Create(TokenStream input) diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hi/HindiNormalizationFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hi/HindiNormalizationFilterFactory.cs index 474ac03315..09ddfec8c3 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Hi/HindiNormalizationFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Hi/HindiNormalizationFilterFactory.cs @@ -40,7 +40,7 @@ public HindiNormalizationFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hi/HindiStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hi/HindiStemFilterFactory.cs index 6d862ae4be..9da1681916 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Hi/HindiStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Hi/HindiStemFilterFactory.cs @@ -40,7 +40,7 @@ public HindiStemFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hu/HungarianLightStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hu/HungarianLightStemFilterFactory.cs index 652b178217..2dceca6469 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Hu/HungarianLightStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Hu/HungarianLightStemFilterFactory.cs @@ -41,7 +41,7 @@ public HungarianLightStemFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs index 0f4e984970..b61ebd9d4e 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs @@ -269,10 +269,11 @@ private void ReadAffixFile(Stream affixStream, Encoding decoder) { JCG.SortedDictionary> prefixes = new JCG.SortedDictionary>(StringComparer.Ordinal); JCG.SortedDictionary> suffixes = new JCG.SortedDictionary>(StringComparer.Ordinal); - IDictionary seenPatterns = new JCG.Dictionary(); - - // zero condition -> 0 ord - seenPatterns[".*"] = 0; + IDictionary seenPatterns = new JCG.Dictionary + { + // zero condition -> 0 ord + [".*"] = 0 + }; patterns.Add(null); // zero strip -> 0 ord diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/HunspellStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/HunspellStemFilterFactory.cs index 28890e95c0..7d9b4d39f0 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/HunspellStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/HunspellStemFilterFactory.cs @@ -71,7 +71,7 @@ public HunspellStemFilterFactory(IDictionary args) GetInt32(args, "recursionCap", 0); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Id/IndonesianStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Id/IndonesianStemFilterFactory.cs index ac9b6f71a4..16b97f3545 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Id/IndonesianStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Id/IndonesianStemFilterFactory.cs @@ -45,7 +45,7 @@ public IndonesianStemFilterFactory(IDictionary args) stemDerivational = GetBoolean(args, "stemDerivational", true); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/In/IndicNormalizationFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/In/IndicNormalizationFilterFactory.cs index 4672ec4b63..e98ac371ea 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/In/IndicNormalizationFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/In/IndicNormalizationFilterFactory.cs @@ -39,7 +39,7 @@ public IndicNormalizationFilterFactory(IDictionary args) : base( { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/It/ItalianLightStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/It/ItalianLightStemFilterFactory.cs index 16beaf78f5..d251dff8ca 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/It/ItalianLightStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/It/ItalianLightStemFilterFactory.cs @@ -41,7 +41,7 @@ public ItalianLightStemFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Lv/LatvianStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Lv/LatvianStemFilterFactory.cs index 003d37b11d..9408efb844 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Lv/LatvianStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Lv/LatvianStemFilterFactory.cs @@ -41,7 +41,7 @@ public LatvianStemFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ASCIIFoldingFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ASCIIFoldingFilterFactory.cs index c5167da1b3..7ae8788255 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ASCIIFoldingFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ASCIIFoldingFilterFactory.cs @@ -43,7 +43,7 @@ public ASCIIFoldingFilterFactory(IDictionary args) preserveOriginal = GetBoolean(args, "preserveOriginal", false); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/CapitalizationFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/CapitalizationFilterFactory.cs index e509e42b03..990b4ab360 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/CapitalizationFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/CapitalizationFilterFactory.cs @@ -108,7 +108,7 @@ public CapitalizationFilterFactory(IDictionary args) culture = GetCulture(args, CULTURE, null); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/CodepointCountFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/CodepointCountFilterFactory.cs index 1bfee5ec11..7246c6c522 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/CodepointCountFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/CodepointCountFilterFactory.cs @@ -47,7 +47,7 @@ public CodepointCountFilterFactory(IDictionary args) max = RequireInt32(args, MAX_KEY); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/HyphenatedWordsFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/HyphenatedWordsFilterFactory.cs index 736dc81e39..8368e60b1e 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/HyphenatedWordsFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/HyphenatedWordsFilterFactory.cs @@ -40,7 +40,7 @@ public HyphenatedWordsFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeepWordFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeepWordFilterFactory.cs index 10cc5f6843..1145e966e5 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeepWordFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeepWordFilterFactory.cs @@ -49,7 +49,7 @@ public KeepWordFilterFactory(IDictionary args) enablePositionIncrements = GetBoolean(args, "enablePositionIncrements", true); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeywordMarkerFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeywordMarkerFilterFactory.cs index f3dedb0911..f99a1e9b82 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeywordMarkerFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeywordMarkerFilterFactory.cs @@ -52,7 +52,7 @@ public KeywordMarkerFilterFactory(IDictionary args) ignoreCase = GetBoolean(args, "ignoreCase", false); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeywordRepeatFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeywordRepeatFilterFactory.cs index 386c26e4b5..6e8a4e12ff 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeywordRepeatFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/KeywordRepeatFilterFactory.cs @@ -37,7 +37,7 @@ public KeywordRepeatFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LengthFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LengthFilterFactory.cs index 5d40b1df74..c44abaa39b 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LengthFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LengthFilterFactory.cs @@ -49,7 +49,7 @@ public LengthFilterFactory(IDictionary args) enablePositionIncrements = GetBoolean(args, "enablePositionIncrements", true); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LimitTokenCountFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LimitTokenCountFilterFactory.cs index b4e95ffd47..8eeb80b1d7 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LimitTokenCountFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LimitTokenCountFilterFactory.cs @@ -51,7 +51,7 @@ public LimitTokenCountFilterFactory(IDictionary args) consumeAllTokens = GetBoolean(args, CONSUME_ALL_TOKENS_KEY, false); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LimitTokenPositionFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LimitTokenPositionFilterFactory.cs index d0b2a6151e..396add10dc 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LimitTokenPositionFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/LimitTokenPositionFilterFactory.cs @@ -51,7 +51,7 @@ public LimitTokenPositionFilterFactory(IDictionary args) consumeAllTokens = GetBoolean(args, CONSUME_ALL_TOKENS_KEY, false); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/Lucene47WordDelimiterFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/Lucene47WordDelimiterFilter.cs index 8e0d70fc22..3a62bdc15d 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/Lucene47WordDelimiterFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/Lucene47WordDelimiterFilter.cs @@ -432,6 +432,7 @@ private static bool IsDigit(int type) /// /// Word type to check /// true if the type contains , false otherwise + [System.Diagnostics.CodeAnalysis.SuppressMessage("CodeQuality", "IDE0051:Remove unused private members", Justification = "Obsolete class, anyway")] private static bool IsSubwordDelim(int type) { return (type & SUBWORD_DELIM) != 0; @@ -442,6 +443,7 @@ private static bool IsSubwordDelim(int type) /// /// Word type to check /// true if the type contains , false otherwise + [System.Diagnostics.CodeAnalysis.SuppressMessage("CodeQuality", "IDE0051:Remove unused private members", Justification = "Obsolete class, anyway")] private static bool IsUpper(int type) { return (type & UPPER) != 0; diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PrefixAwareTokenFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PrefixAwareTokenFilter.cs index d4b7020107..c56938ec4c 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PrefixAwareTokenFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PrefixAwareTokenFilter.cs @@ -78,7 +78,7 @@ public PrefixAwareTokenFilter(TokenStream prefix, TokenStream suffix) public override sealed bool IncrementToken() { - Token nextToken = null; + Token nextToken; // LUCENENET: IDE0059: Remove unnecessary value assignment if (!prefixExhausted) { nextToken = GetNextPrefixInputToken(reusableToken); diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/RemoveDuplicatesTokenFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/RemoveDuplicatesTokenFilterFactory.cs index 2fbb279945..46e2a126c0 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/RemoveDuplicatesTokenFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/RemoveDuplicatesTokenFilterFactory.cs @@ -39,7 +39,7 @@ public RemoveDuplicatesTokenFilterFactory(IDictionary args) : ba { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ScandinavianFoldingFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ScandinavianFoldingFilterFactory.cs index 955c5d73c2..2476761891 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ScandinavianFoldingFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ScandinavianFoldingFilterFactory.cs @@ -40,7 +40,7 @@ public ScandinavianFoldingFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ScandinavianNormalizationFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ScandinavianNormalizationFilterFactory.cs index 189108d828..90147a6e82 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ScandinavianNormalizationFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/ScandinavianNormalizationFilterFactory.cs @@ -40,7 +40,7 @@ public ScandinavianNormalizationFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/StemmerOverrideFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/StemmerOverrideFilterFactory.cs index 4f292dbf96..b516622c28 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/StemmerOverrideFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/StemmerOverrideFilterFactory.cs @@ -47,7 +47,7 @@ public StemmerOverrideFilterFactory(IDictionary args) ignoreCase = GetBoolean(args, "ignoreCase", false); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/TrimFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/TrimFilter.cs index 5928e471f0..c89c8f9c10 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/TrimFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/TrimFilter.cs @@ -76,8 +76,8 @@ public override bool IncrementToken() { return true; } - int start = 0; - int end = 0; + int start; // LUCENENET: IDE0059: Remove unnecessary value assignment + int end; // LUCENENET: IDE0059: Remove unnecessary value assignment int endOff = 0; // eat the first characters diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/TrimFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/TrimFilterFactory.cs index b772d2ae25..dd57e0081f 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/TrimFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/TrimFilterFactory.cs @@ -44,7 +44,7 @@ public TrimFilterFactory(IDictionary args) m_updateOffsets = GetBoolean(args, "updateOffsets", false); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/WordDelimiterFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/WordDelimiterFilterFactory.cs index f792a51239..1588a1e06f 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/WordDelimiterFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/WordDelimiterFilterFactory.cs @@ -99,7 +99,7 @@ public WordDelimiterFilterFactory(IDictionary args) this.flags = flags; if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } @@ -138,7 +138,7 @@ public override TokenStream Create(TokenStream input) } // source => type - private static Regex typePattern = new Regex("(.*)\\s*=>\\s*(.*)\\s*$", RegexOptions.Compiled); + private static readonly Regex typePattern = new Regex("(.*)\\s*=>\\s*(.*)\\s*$", RegexOptions.Compiled); // parses a list of MappingCharFilter style rules into a custom byte[] type table private byte[] ParseTypes(IList rules) diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/WordDelimiterIterator.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/WordDelimiterIterator.cs index 0a0a3ef4ec..be656eeec1 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/WordDelimiterIterator.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/WordDelimiterIterator.cs @@ -365,52 +365,6 @@ public static byte GetType(int ch) return WordDelimiterFilter.SUBWORD_DELIM; } - - //switch (char.getType(ch)) - //{ - // case char.UPPERCASE_LETTER: - // return WordDelimiterFilter.UPPER; - // case char.LOWERCASE_LETTER: - // return WordDelimiterFilter.LOWER; - - // case char.TITLECASE_LETTER: - // case char.MODIFIER_LETTER: - // case char.OTHER_LETTER: - // case char.NON_SPACING_MARK: - // case char.ENCLOSING_MARK: // depends what it encloses? - // case char.COMBINING_SPACING_MARK: - // return WordDelimiterFilter.ALPHA; - - // case char.DECIMAL_DIGIT_NUMBER: - // case char.LETTER_NUMBER: - // case char.OTHER_NUMBER: - // return WordDelimiterFilter.DIGIT; - - // // case Character.SPACE_SEPARATOR: - // // case Character.LINE_SEPARATOR: - // // case Character.PARAGRAPH_SEPARATOR: - // // case Character.CONTROL: - // // case Character.FORMAT: - // // case Character.PRIVATE_USE: - - // case char.SURROGATE: // prevent splitting - // return WordDelimiterFilter.ALPHA | WordDelimiterFilter.DIGIT; - - // // case Character.DASH_PUNCTUATION: - // // case Character.START_PUNCTUATION: - // // case Character.END_PUNCTUATION: - // // case Character.CONNECTOR_PUNCTUATION: - // // case Character.OTHER_PUNCTUATION: - // // case Character.MATH_SYMBOL: - // // case Character.CURRENCY_SYMBOL: - // // case Character.MODIFIER_SYMBOL: - // // case Character.OTHER_SYMBOL: - // // case Character.INITIAL_QUOTE_PUNCTUATION: - // // case Character.FINAL_QUOTE_PUNCTUATION: - - // default: - // return WordDelimiterFilter.SUBWORD_DELIM; - //} } } } \ No newline at end of file diff --git a/src/Lucene.Net.Analysis.Common/Analysis/NGram/EdgeNGramFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/NGram/EdgeNGramFilterFactory.cs index 64e90c5406..bf991fe9b8 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/NGram/EdgeNGramFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/NGram/EdgeNGramFilterFactory.cs @@ -47,7 +47,7 @@ public EdgeNGramFilterFactory(IDictionary args) side = Get(args, "side", EdgeNGramTokenFilter.Side.FRONT.ToString()); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/NGram/EdgeNGramTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/NGram/EdgeNGramTokenizerFactory.cs index 583bb61993..c8941c6f9a 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/NGram/EdgeNGramTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/NGram/EdgeNGramTokenizerFactory.cs @@ -47,7 +47,7 @@ public EdgeNGramTokenizerFactory(IDictionary args) : base(args) side = Get(args, "side", EdgeNGramTokenFilter.Side.FRONT.ToString()); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramFilterFactory.cs index bea6b8e367..f4197d506c 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramFilterFactory.cs @@ -45,7 +45,7 @@ public NGramFilterFactory(IDictionary args) maxGramSize = GetInt32(args, "maxGramSize", NGramTokenFilter.DEFAULT_MAX_NGRAM_SIZE); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenizerFactory.cs index 91f4356fc6..118a909887 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenizerFactory.cs @@ -46,7 +46,7 @@ public NGramTokenizerFactory(IDictionary args) maxGramSize = GetInt32(args, "maxGramSize", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/No/NorwegianLightStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/No/NorwegianLightStemFilterFactory.cs index 8cbbc39603..0c6782bbb1 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/No/NorwegianLightStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/No/NorwegianLightStemFilterFactory.cs @@ -60,7 +60,7 @@ public NorwegianLightStemFilterFactory(IDictionary args) } if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/No/NorwegianMinimalStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/No/NorwegianMinimalStemFilterFactory.cs index f5603d64ad..2abfce7b47 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/No/NorwegianMinimalStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/No/NorwegianMinimalStemFilterFactory.cs @@ -60,7 +60,7 @@ public NorwegianMinimalStemFilterFactory(IDictionary args) } if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/No/NorwegianMinimalStemmer.cs b/src/Lucene.Net.Analysis.Common/Analysis/No/NorwegianMinimalStemmer.cs index ee4b07ee8f..312c210d35 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/No/NorwegianMinimalStemmer.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/No/NorwegianMinimalStemmer.cs @@ -63,7 +63,7 @@ namespace Lucene.Net.Analysis.No /// public class NorwegianMinimalStemmer { - private readonly bool useBokmaal; + //private readonly bool useBokmaal; // LUCENENET: Never read private readonly bool useNynorsk; /// @@ -76,7 +76,7 @@ public NorwegianMinimalStemmer(NorwegianStandard flags) { throw new ArgumentException("invalid flags"); } - useBokmaal = (flags & NorwegianStandard.BOKMAAL) != 0; + //useBokmaal = (flags & NorwegianStandard.BOKMAAL) != 0; // LUCENENET: Never read useNynorsk = (flags & NorwegianStandard.NYNORSK) != 0; } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Path/PathHierarchyTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Path/PathHierarchyTokenizerFactory.cs index 673f62caa4..6f1301a95d 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Path/PathHierarchyTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Path/PathHierarchyTokenizerFactory.cs @@ -85,7 +85,7 @@ public PathHierarchyTokenizerFactory(IDictionary args) skip = GetInt32(args, "skip", PathHierarchyTokenizer.DEFAULT_SKIP); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternReplaceCharFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternReplaceCharFilterFactory.cs index bd88d6299d..e6ada46a40 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternReplaceCharFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternReplaceCharFilterFactory.cs @@ -60,7 +60,7 @@ public PatternReplaceCharFilterFactory(IDictionary args) : base( } if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternReplaceFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternReplaceFilterFactory.cs index 8a9fa0c245..a6b421f60e 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternReplaceFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternReplaceFilterFactory.cs @@ -49,7 +49,7 @@ public PatternReplaceFilterFactory(IDictionary args) : base(args replaceAll = "all".Equals(Get(args, "replace", new string[] { "all", "first" }, "all"), StringComparison.Ordinal); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternTokenizerFactory.cs index 8e6754c207..63faf0ad2e 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Pattern/PatternTokenizerFactory.cs @@ -77,7 +77,7 @@ public PatternTokenizerFactory(IDictionary args) m_group = GetInt32(args, GROUP, -1); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Payloads/DelimitedPayloadTokenFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Payloads/DelimitedPayloadTokenFilterFactory.cs index 21ec2fffdf..7d32ad7b76 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Payloads/DelimitedPayloadTokenFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Payloads/DelimitedPayloadTokenFilterFactory.cs @@ -50,7 +50,7 @@ public DelimitedPayloadTokenFilterFactory(IDictionary args) delimiter = GetChar(args, DELIMITER_ATTR, '|'); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Payloads/NumericPayloadTokenFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Payloads/NumericPayloadTokenFilterFactory.cs index 6d7ff6fa09..90f9d60eeb 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Payloads/NumericPayloadTokenFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Payloads/NumericPayloadTokenFilterFactory.cs @@ -44,7 +44,7 @@ public NumericPayloadTokenFilterFactory(IDictionary args) : base typeMatch = Require(args, "typeMatch"); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Payloads/PayloadHelper.cs b/src/Lucene.Net.Analysis.Common/Analysis/Payloads/PayloadHelper.cs index 692bff2414..32c76d41f2 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Payloads/PayloadHelper.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Payloads/PayloadHelper.cs @@ -20,7 +20,7 @@ /// /// Utility methods for encoding payloads. /// - public class PayloadHelper + public static class PayloadHelper // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { /// /// NOTE: This was encodeFloat() in Lucene diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Payloads/TokenOffsetPayloadTokenFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Payloads/TokenOffsetPayloadTokenFilterFactory.cs index 0d0c6cd0e9..75e5501a48 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Payloads/TokenOffsetPayloadTokenFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Payloads/TokenOffsetPayloadTokenFilterFactory.cs @@ -40,7 +40,7 @@ public TokenOffsetPayloadTokenFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Payloads/TypeAsPayloadTokenFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Payloads/TypeAsPayloadTokenFilterFactory.cs index 247f81778d..4cdc6dc5a4 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Payloads/TypeAsPayloadTokenFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Payloads/TypeAsPayloadTokenFilterFactory.cs @@ -40,7 +40,7 @@ public TypeAsPayloadTokenFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Position/PositionFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Position/PositionFilterFactory.cs index 6d07152c49..8dc83bf6e5 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Position/PositionFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Position/PositionFilterFactory.cs @@ -48,7 +48,7 @@ public PositionFilterFactory(IDictionary args) positionIncrement = GetInt32(args, "positionIncrement", 0); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } if (m_luceneMatchVersion.OnOrAfter(Lucene.Net.Util.LuceneVersion.LUCENE_44)) { diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseLightStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseLightStemFilterFactory.cs index 05a1fe034d..a838b3d477 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseLightStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseLightStemFilterFactory.cs @@ -41,7 +41,7 @@ public PortugueseLightStemFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseMinimalStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseMinimalStemFilterFactory.cs index 9d58cc1c51..27c41f2cf6 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseMinimalStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseMinimalStemFilterFactory.cs @@ -41,7 +41,7 @@ public PortugueseMinimalStemFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseStemFilterFactory.cs index 8ec79b37b8..6079c64dd3 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Pt/PortugueseStemFilterFactory.cs @@ -41,7 +41,7 @@ public PortugueseStemFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Reverse/ReverseStringFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Reverse/ReverseStringFilterFactory.cs index d4f59e9bed..e21da33952 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Reverse/ReverseStringFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Reverse/ReverseStringFilterFactory.cs @@ -42,7 +42,7 @@ public ReverseStringFilterFactory(IDictionary args) : base(args) AssureMatchVersion(); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianLetterTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianLetterTokenizerFactory.cs index b1d90016e1..815c57d901 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianLetterTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianLetterTokenizerFactory.cs @@ -35,7 +35,7 @@ public RussianLetterTokenizerFactory(IDictionary args) : base(ar AssureMatchVersion(); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianLightStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianLightStemFilterFactory.cs index 7f01c412a5..a2a6c7b662 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianLightStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Ru/RussianLightStemFilterFactory.cs @@ -41,7 +41,7 @@ public RussianLightStemFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Shingle/ShingleFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Shingle/ShingleFilterFactory.cs index 67686ffdfd..41f4d9b4f4 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Shingle/ShingleFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Shingle/ShingleFilterFactory.cs @@ -66,7 +66,7 @@ public ShingleFilterFactory(IDictionary args) fillerToken = Get(args, "fillerToken", ShingleFilter.DEFAULT_FILLER_TOKEN); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs index 4320687f83..09d202bacb 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs @@ -202,7 +202,7 @@ public sealed class SinkTokenStream : TokenStream private readonly IList cachedStates = new List(); private AttributeSource.State finalState; private IEnumerator it = null; - private SinkFilter filter; + private readonly SinkFilter filter; // LUCENENET: marked readonly internal SinkTokenStream(AttributeSource source, SinkFilter filter) : base(source) diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Snowball/SnowballPorterFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Snowball/SnowballPorterFilterFactory.cs index eb9f1fd2ef..3f5d654f98 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Snowball/SnowballPorterFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Snowball/SnowballPorterFilterFactory.cs @@ -54,7 +54,7 @@ public SnowballPorterFilterFactory(IDictionary args) : base(args wordFiles = Get(args, PROTECTED_TOKENS); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicFilterFactory.cs index ea36b5c368..fb87766f66 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicFilterFactory.cs @@ -40,7 +40,7 @@ public ClassicFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicTokenizerFactory.cs index 2113f9e3ac..a7cdb3c879 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicTokenizerFactory.cs @@ -44,7 +44,7 @@ public ClassicTokenizerFactory(IDictionary args) maxTokenLength = GetInt32(args, "maxTokenLength", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicTokenizerImpl.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicTokenizerImpl.cs index 0de05fde40..4457b4461e 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicTokenizerImpl.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicTokenizerImpl.cs @@ -135,7 +135,8 @@ private static int[] ZzUnpackAction() { int[] result = new int[50]; int offset = 0; - offset = ZzUnpackAction(ZZ_ACTION_PACKED_0, offset, result); + /*offset = */ + ZzUnpackAction(ZZ_ACTION_PACKED_0, offset, result); // LUCENENET: IDE0059: Remove unnecessary value assignment return result; } @@ -175,7 +176,8 @@ private static int[] ZzUnpackRowMap() { int[] result = new int[50]; int offset = 0; - offset = ZzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result); + /*offset = */ + ZzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result); // LUCENENET: IDE0059: Remove unnecessary value assignment return result; } @@ -243,7 +245,8 @@ private static int[] ZzUnpackTrans() { int[] result = new int[552]; int offset = 0; - offset = ZzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result); + /*offset = */ + ZzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result); // LUCENENET: IDE0059: Remove unnecessary value assignment return result; } @@ -291,7 +294,8 @@ private static int[] ZzUnpackAttribute() { int[] result = new int[50]; int offset = 0; - offset = ZzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result); + /*offset = */ + ZzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result); // LUCENENET: IDE0059: Remove unnecessary value assignment return result; } @@ -348,35 +352,33 @@ private static int ZzUnpackAttribute(string packed, int offset, int[] result) /// private int zzEndRead; - /// - /// number of newlines encountered up to the start of the matched text - private int yyline; + ///// + ///// number of newlines encountered up to the start of the matched text + //private int yyline; // LUCENENET: Never read /// /// the number of characters up to the start of the matched text private int yyChar; -#pragma warning disable 169, 414 - /// - /// the number of characters from the last newline up to the start of the - /// matched text - /// - private int yycolumn; + ///// + ///// the number of characters from the last newline up to the start of the + ///// matched text + ///// + //private int yycolumn; // LUCENENET: Never read - /// - /// zzAtBOL == true <=> the scanner is currently at the beginning of a line - /// - private bool zzAtBOL = true; + ///// + ///// zzAtBOL == true <=> the scanner is currently at the beginning of a line + ///// + //private bool zzAtBOL = true; // LUCENENET: Never read /// /// zzAtEOF == true <=> the scanner is at the EOF private bool zzAtEOF; - /// - /// denotes if the user-EOF-code has already been executed - private bool zzEOFDone; + ///// + ///// denotes if the user-EOF-code has already been executed + //private bool zzEOFDone; // LUCENENET: Never read -#pragma warning restore 169, 414 /* user code: */ @@ -526,12 +528,13 @@ public void YyClose() public void YyReset(TextReader reader) { zzReader = reader; - zzAtBOL = true; + //zzAtBOL = true; // LUCENENET: Never read zzAtEOF = false; - zzEOFDone = false; + //zzEOFDone = false; // LUCENENET: Never read zzEndRead = zzStartRead = 0; zzCurrentPos = zzMarkedPos = 0; - yyline = yyChar = yycolumn = 0; + //yyline = yyChar = yycolumn = 0; // LUCENENET: Never read + yyChar = 0; zzLexicalState = YYINITIAL; if (zzBuffer.Length > ZZ_BUFFERSIZE) { diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardFilterFactory.cs index fe884a63eb..1a7a0533e8 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardFilterFactory.cs @@ -41,7 +41,7 @@ public StandardFilterFactory(IDictionary args) AssureMatchVersion(); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizerFactory.cs index db051bd50a..bd959c5390 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizerFactory.cs @@ -45,7 +45,7 @@ public StandardTokenizerFactory(IDictionary args) maxTokenLength = GetInt32(args, "maxTokenLength", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizerImpl.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizerImpl.cs index ca34d71d2b..ea8acb67ed 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizerImpl.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardTokenizerImpl.cs @@ -228,7 +228,8 @@ private static int[] ZzUnpackAction() { int[] result = new int[197]; int offset = 0; - offset = ZzUnpackAction(ZZ_ACTION_PACKED_0, offset, result); + /*offset = */ + ZzUnpackAction(ZZ_ACTION_PACKED_0, offset, result); // LUCENENET: IDE0059: Remove unnecessary value assignment return result; } @@ -286,7 +287,8 @@ private static int[] ZzUnpackRowMap() { int[] result = new int[197]; int offset = 0; - offset = ZzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result); + /*offset = */ + ZzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result); // LUCENENET: IDE0059: Remove unnecessary value assignment return result; } @@ -829,7 +831,8 @@ private static int[] ZzUnpackTrans() { int[] result = new int[26554]; int offset = 0; - offset = ZzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result); + /*offset = */ + ZzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result); // LUCENENET: IDE0059: Remove unnecessary value assignment return result; } @@ -877,7 +880,8 @@ private static int[] ZzUnpackAttribute() { int[] result = new int[197]; int offset = 0; - offset = ZzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result); + /*offset = */ + ZzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result); // LUCENENET: IDE0059: Remove unnecessary value assignment return result; } @@ -934,35 +938,33 @@ private static int ZzUnpackAttribute(string packed, int offset, int[] result) /// private int zzEndRead; - /// - /// number of newlines encountered up to the start of the matched text - private int yyline; + ///// + ///// number of newlines encountered up to the start of the matched text + //private int yyline; // LUCENENET: Never read /// /// the number of characters up to the start of the matched text private int yyChar; -#pragma warning disable 169, 414 - /// - /// the number of characters from the last newline up to the start of the - /// matched text - /// - private int yycolumn; + ///// + ///// the number of characters from the last newline up to the start of the + ///// matched text + ///// + //private int yycolumn; // LUCENENET: Never read - /// - /// zzAtBOL == true <=> the scanner is currently at the beginning of a line - /// - private bool zzAtBOL = true; + ///// + ///// zzAtBOL == true <=> the scanner is currently at the beginning of a line + ///// + //private bool zzAtBOL = true; // LUCENENET: Never read /// /// zzAtEOF == true <=> the scanner is at the EOF private bool zzAtEOF; - /// - /// denotes if the user-EOF-code has already been executed - private bool zzEOFDone; + ///// + ///// denotes if the user-EOF-code has already been executed + //private bool zzEOFDone; // LUCENENET: Never read -#pragma warning restore 169, 414 /* user code: */ /// @@ -1123,12 +1125,13 @@ public void YyClose() public void YyReset(TextReader reader) { zzReader = reader; - zzAtBOL = true; + //zzAtBOL = true; // LUCENENET: Never read zzAtEOF = false; - zzEOFDone = false; + //zzEOFDone = false; // LUCENENET: Never read zzEndRead = zzStartRead = 0; zzCurrentPos = zzMarkedPos = 0; - yyline = yyChar = yycolumn = 0; + //yyline = yyChar = yycolumn = 0; // LUCENENET: Never read + yyChar = 0; zzLexicalState = YYINITIAL; if (zzBuffer.Length > ZZ_BUFFERSIZE) { diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/StandardTokenizerImpl31.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/StandardTokenizerImpl31.cs index 0750d6f038..762355f732 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/StandardTokenizerImpl31.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/StandardTokenizerImpl31.cs @@ -676,30 +676,27 @@ private static int ZzUnpackAttribute(string packed, int offset, int[] result) /// private int zzEndRead; - /// number of newlines encountered up to the start of the matched text - private int yyline; + ///// number of newlines encountered up to the start of the matched text + //private int yyline; // LUCENENET: Never read /// the number of characters up to the start of the matched text private int yyChar; -#pragma warning disable 169, 414 + ///// + ///// the number of characters from the last newline up to the start of the + ///// matched text + ///// + //private int yycolumn; // LUCENENET: Never read - /// - /// the number of characters from the last newline up to the start of the - /// matched text - /// - private int yycolumn; - - /// zzAtBOL == true <=> the scanner is currently at the beginning of a line - private bool zzAtBOL = true; + ///// zzAtBOL == true <=> the scanner is currently at the beginning of a line + //private bool zzAtBOL = true; // LUCENENET: Never read /// zzAtEOF == true <=> the scanner is at the EOF private bool zzAtEOF; - /// denotes if the user-EOF-code has already been executed - private bool zzEOFDone; + ///// denotes if the user-EOF-code has already been executed + //private bool zzEOFDone; // LUCENENET: Never read -#pragma warning disable 169, 414 /* user code: */ /// Alphanumeric sequences @@ -847,12 +844,13 @@ public void YyClose() public void YyReset(TextReader reader) { zzReader = reader; - zzAtBOL = true; + //zzAtBOL = true; // LUCENENET: Never read zzAtEOF = false; - zzEOFDone = false; + //zzEOFDone = false; // LUCENENET: Never read zzEndRead = zzStartRead = 0; zzCurrentPos = zzMarkedPos = 0; - yyline = yyChar = yycolumn = 0; + //yyline = yyChar = yycolumn = 0; // LUCENENET: Never read + yyChar = 0; zzLexicalState = YYINITIAL; if (zzBuffer.Length > ZZ_BUFFERSIZE) zzBuffer = new char[ZZ_BUFFERSIZE]; diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/UAX29URLEmailTokenizerImpl31.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/UAX29URLEmailTokenizerImpl31.cs index 20996a53fa..fa729eea05 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/UAX29URLEmailTokenizerImpl31.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std31/UAX29URLEmailTokenizerImpl31.cs @@ -3231,30 +3231,27 @@ private static int ZzUnpackAttribute(string packed, int offset, int[] result) /// private int zzEndRead; - /// number of newlines encountered up to the start of the matched text - private int yyline; + ///// number of newlines encountered up to the start of the matched text + //private int yyline; // LUCENENET: Never read /// the number of characters up to the start of the matched text private int yychar; -#pragma warning disable 169, 414 + ///// + ///// the number of characters from the last newline up to the start of the + ///// matched text + ///// + //private int yycolumn; // LUCENENET: Never read - /// - /// the number of characters from the last newline up to the start of the - /// matched text - /// - private int yycolumn; - - /// zzAtBOL == true <=> the scanner is currently at the beginning of a line - private bool zzAtBOL = true; + ///// zzAtBOL == true <=> the scanner is currently at the beginning of a line + //private bool zzAtBOL = true; // LUCENENET: Never read /// zzAtEOF == true <=> the scanner is at the EOF private bool zzAtEOF; - /// denotes if the user-EOF-code has already been executed - private bool zzEOFDone; + ///// denotes if the user-EOF-code has already been executed + //private bool zzEOFDone; // LUCENENET: Never read -#pragma warning restore 169, 414 /* user code: */ /// Alphanumeric sequences @@ -3407,12 +3404,13 @@ public void YyClose() public void YyReset(TextReader reader) { zzReader = reader; - zzAtBOL = true; + //zzAtBOL = true; // LUCENENET: Never read zzAtEOF = false; - zzEOFDone = false; + //zzEOFDone = false; // LUCENENET: Never read zzEndRead = zzStartRead = 0; zzCurrentPos = zzMarkedPos = 0; - yyline = yychar = yycolumn = 0; + //yyline = yychar = yycolumn = 0; // LUCENENET: Never read + yychar = 0; zzLexicalState = YYINITIAL; if (zzBuffer.Length > ZZ_BUFFERSIZE) zzBuffer = new char[ZZ_BUFFERSIZE]; diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/StandardTokenizerImpl34.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/StandardTokenizerImpl34.cs index 3308df0fe9..b00c69895f 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/StandardTokenizerImpl34.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/StandardTokenizerImpl34.cs @@ -692,32 +692,29 @@ private static int ZzUnpackAttribute(string packed, int offset, int[] result) /// private int zzEndRead; - /// number of newlines encountered up to the start of the matched text - private int yyline; + ///// number of newlines encountered up to the start of the matched text + //private int yyline; // LUCENENET: Never read /// the number of characters up to the start of the matched text private int yyChar; -#pragma warning disable 169, 414 + ///// + ///// the number of characters from the last newline up to the start of the + ///// matched text + ///// + //private int yycolumn; // LUCENENET: Never read - /// - /// the number of characters from the last newline up to the start of the - /// matched text - /// - private int yycolumn; - - /// - /// zzAtBOL == true <=> the scanner is currently at the beginning of a line - /// - private bool zzAtBOL = true; + ///// + ///// zzAtBOL == true <=> the scanner is currently at the beginning of a line + ///// + //private bool zzAtBOL = true; // LUCENENET: Never read /// zzAtEOF == true <=> the scanner is at the EOF private bool zzAtEOF; - /// denotes if the user-EOF-code has already been executed - private bool zzEOFDone; + ///// denotes if the user-EOF-code has already been executed + //private bool zzEOFDone; // LUCENENET: Never read -#pragma warning restore 169, 414 /* user code: */ /// Alphanumeric sequences @@ -868,12 +865,13 @@ public void YyClose() public void YyReset(TextReader reader) { zzReader = reader; - zzAtBOL = true; + //zzAtBOL = true; // LUCENENET: Never read zzAtEOF = false; - zzEOFDone = false; + //zzEOFDone = false; // LUCENENET: Never read zzEndRead = zzStartRead = 0; zzCurrentPos = zzMarkedPos = 0; - yyline = yyChar = yycolumn = 0; + //yyline = yyChar = yycolumn = 0; // LUCENENET: Never read + yyChar = 0; zzLexicalState = YYINITIAL; if (zzBuffer.Length > ZZ_BUFFERSIZE) zzBuffer = new char[ZZ_BUFFERSIZE]; diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/UAX29URLEmailTokenizerImpl34.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/UAX29URLEmailTokenizerImpl34.cs index ea9210fc1e..aed02578e8 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/UAX29URLEmailTokenizerImpl34.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std34/UAX29URLEmailTokenizerImpl34.cs @@ -3334,31 +3334,29 @@ private static int ZzUnpackAttribute(string packed, int offset, int[] result) /// private int zzEndRead; - /// number of newlines encountered up to the start of the matched text - private int yyline; + ///// number of newlines encountered up to the start of the matched text + //private int yyline; // LUCENENET: Never read /// the number of characters up to the start of the matched text private int yychar; -#pragma warning disable 169, 414 - /// - /// the number of characters from the last newline up to the start of the - /// matched text - /// - private int yycolumn; + ///// + ///// the number of characters from the last newline up to the start of the + ///// matched text + ///// + //private int yycolumn; // LUCENENET: Never read - /// - /// zzAtBOL == true <=> the scanner is currently at the beginning of a line - /// - private bool zzAtBOL = true; + ///// + ///// zzAtBOL == true <=> the scanner is currently at the beginning of a line + ///// + //private bool zzAtBOL = true; // LUCENENET: Never read /// zzAtEOF == true <=> the scanner is at the EOF private bool zzAtEOF; - /// denotes if the user-EOF-code has already been executed - private bool zzEOFDone; + ///// denotes if the user-EOF-code has already been executed + //private bool zzEOFDone; // LUCENENET: Never read -#pragma warning restore 169, 414 /* user code: */ /// Alphanumeric sequences @@ -3515,12 +3513,13 @@ public void YyClose() public void YyReset(TextReader reader) { zzReader = reader; - zzAtBOL = true; + //zzAtBOL = true; // LUCENENET: Never read zzAtEOF = false; - zzEOFDone = false; + //zzEOFDone = false; // LUCENENET: Never read zzEndRead = zzStartRead = 0; zzCurrentPos = zzMarkedPos = 0; - yyline = yychar = yycolumn = 0; + //yyline = yychar = yycolumn = 0; // LUCENENET: Never read + yychar = 0; zzLexicalState = YYINITIAL; if (zzBuffer.Length > ZZ_BUFFERSIZE) zzBuffer = new char[ZZ_BUFFERSIZE]; diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std36/UAX29URLEmailTokenizerImpl36.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std36/UAX29URLEmailTokenizerImpl36.cs index 5d899061d3..e3acd5cf28 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std36/UAX29URLEmailTokenizerImpl36.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std36/UAX29URLEmailTokenizerImpl36.cs @@ -3769,31 +3769,29 @@ private static int ZzUnpackAttribute(string packed, int offset, int[] result) /// private int zzEndRead; - /// number of newlines encountered up to the start of the matched text - private int yyline; + ///// number of newlines encountered up to the start of the matched text + //private int yyline; // LUCENENET: Never read /// the number of characters up to the start of the matched text private int yychar; -#pragma warning disable 169, 414 - /// - /// the number of characters from the last newline up to the start of the - /// matched text - /// - private int yycolumn; + ///// + ///// the number of characters from the last newline up to the start of the + ///// matched text + ///// + //private int yycolumn; // LUCENENET: Never read - /// - /// zzAtBOL == true <=> the scanner is currently at the beginning of a line - /// - private bool zzAtBOL = true; + ///// + ///// zzAtBOL == true <=> the scanner is currently at the beginning of a line + ///// + //private bool zzAtBOL = true; // LUCENENET: Never read /// zzAtEOF == true <=> the scanner is at the EOF private bool zzAtEOF; - /// denotes if the user-EOF-code has already been executed - private bool zzEOFDone; + ///// denotes if the user-EOF-code has already been executed + //private bool zzEOFDone; // LUCENENET: Never read -#pragma warning restore 169, 414 /* user code: */ /// Alphanumeric sequences @@ -3950,12 +3948,13 @@ public void YyClose() public void YyReset(TextReader reader) { zzReader = reader; - zzAtBOL = true; + //zzAtBOL = true; // LUCENENET: Never read zzAtEOF = false; - zzEOFDone = false; + //zzEOFDone = false; // LUCENENET: Never read zzEndRead = zzStartRead = 0; zzCurrentPos = zzMarkedPos = 0; - yyline = yychar = yycolumn = 0; + //yyline = yychar = yycolumn = 0; // LUCENENET: Never read + yychar = 0; zzLexicalState = YYINITIAL; if (zzBuffer.Length > ZZ_BUFFERSIZE) zzBuffer = new char[ZZ_BUFFERSIZE]; diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/StandardTokenizerImpl40.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/StandardTokenizerImpl40.cs index 7fe20fbcac..432f8faf0b 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/StandardTokenizerImpl40.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/StandardTokenizerImpl40.cs @@ -799,31 +799,29 @@ private static int ZzUnpackAttribute(string packed, int offset, int[] result) /// private int zzEndRead; - /// number of newlines encountered up to the start of the matched text - private int yyline; + ///// number of newlines encountered up to the start of the matched text + //private int yyline; // LUCENENET: Never read /// the number of characters up to the start of the matched text private int yyChar; -#pragma warning disable 169, 414 - /// - /// the number of characters from the last newline up to the start of the - /// matched text - /// - private int yycolumn; + ///// + ///// the number of characters from the last newline up to the start of the + ///// matched text + ///// + //private int yycolumn; // LUCENENET: Never read - /// - /// zzAtBOL == true <=> the scanner is currently at the beginning of a line - /// - private bool zzAtBOL = true; + ///// + ///// zzAtBOL == true <=> the scanner is currently at the beginning of a line + ///// + //private bool zzAtBOL = true; // LUCENENET: Never read /// zzAtEOF == true <=> the scanner is at the EOF private bool zzAtEOF; - /// denotes if the user-EOF-code has already been executed - private bool zzEOFDone; + ///// denotes if the user-EOF-code has already been executed + //private bool zzEOFDone; // LUCENENET: Never read -#pragma warning restore 169, 414 /* user code: */ /// Alphanumeric sequences @@ -978,12 +976,13 @@ public void YyClose() public void YyReset(TextReader reader) { zzReader = reader; - zzAtBOL = true; + //zzAtBOL = true; // LUCENENET: Never read zzAtEOF = false; - zzEOFDone = false; + //zzEOFDone = false; // LUCENENET: Never read zzEndRead = zzStartRead = 0; zzCurrentPos = zzMarkedPos = 0; - yyline = yyChar = yycolumn = 0; + //yyline = yyChar = yycolumn = 0; // LUCENENET: Never read + yyChar = 0; zzLexicalState = YYINITIAL; if (zzBuffer.Length > ZZ_BUFFERSIZE) zzBuffer = new char[ZZ_BUFFERSIZE]; diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/UAX29URLEmailTokenizerImpl40.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/UAX29URLEmailTokenizerImpl40.cs index 7a2cbc0772..19e3346aa0 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/UAX29URLEmailTokenizerImpl40.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/Std40/UAX29URLEmailTokenizerImpl40.cs @@ -3981,31 +3981,29 @@ private static int ZzUnpackAttribute(string packed, int offset, int[] result) /// private int zzEndRead; - /// number of newlines encountered up to the start of the matched text - private int yyline; + ///// number of newlines encountered up to the start of the matched text + //private int yyline; // LUCENENET: Never read /// the number of characters up to the start of the matched text private int yychar; -#pragma warning disable 169, 414 - /// - /// the number of characters from the last newline up to the start of the - /// matched text - /// - private int yycolumn; + ///// + ///// the number of characters from the last newline up to the start of the + ///// matched text + ///// + //private int yycolumn; // LUCENENET: Never read - /// - /// zzAtBOL == true <=> the scanner is currently at the beginning of a line - /// - private bool zzAtBOL = true; + ///// + ///// zzAtBOL == true <=> the scanner is currently at the beginning of a line + ///// + //private bool zzAtBOL = true; // LUCENENET: Never read /// zzAtEOF == true <=> the scanner is at the EOF private bool zzAtEOF; - /// denotes if the user-EOF-code has already been executed - private bool zzEOFDone; + ///// denotes if the user-EOF-code has already been executed + //private bool zzEOFDone; // LUCENENET: Never read -#pragma warning restore 169, 414 /* user code: */ /// Alphanumeric sequences @@ -4166,12 +4164,13 @@ public void YyClose() public void YyReset(TextReader reader) { zzReader = reader; - zzAtBOL = true; + //zzAtBOL = true; // LUCENENET: Never read zzAtEOF = false; - zzEOFDone = false; + //zzEOFDone = false; // LUCENENET: Never read zzEndRead = zzStartRead = 0; zzCurrentPos = zzMarkedPos = 0; - yyline = yychar = yycolumn = 0; + //yyline = yychar = yycolumn = 0; // LUCENENET: Never read + yychar = 0; zzLexicalState = YYINITIAL; if (zzBuffer.Length > ZZ_BUFFERSIZE) zzBuffer = new char[ZZ_BUFFERSIZE]; diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizerFactory.cs index 0a5a34c551..497c731475 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizerFactory.cs @@ -45,7 +45,7 @@ public UAX29URLEmailTokenizerFactory(IDictionary args) maxTokenLength = GetInt32(args, "maxTokenLength", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizerImpl.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizerImpl.cs index 9d5089a041..d2565c4894 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizerImpl.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailTokenizerImpl.cs @@ -273,7 +273,8 @@ private static int[] ZzUnpackAction() { int[] result = new int[3116]; int offset = 0; - offset = ZzUnpackAction(ZZ_ACTION_PACKED_0, offset, result); + /*offset = */ + ZzUnpackAction(ZZ_ACTION_PACKED_0, offset, result); // LUCENENET: IDE0059: Remove unnecessary value assignment return result; } @@ -693,7 +694,8 @@ private static int[] ZzUnpackRowMap() { int[] result = new int[3116]; int offset = 0; - offset = ZzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result); + /*offset = */ + ZzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result); // LUCENENET: IDE0059: Remove unnecessary value assignment return result; } @@ -8966,7 +8968,7 @@ private static int[] ZzUnpackTrans() int offset = 0; offset = ZzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result); offset = ZzUnpackTrans(ZZ_TRANS_PACKED_1, offset, result); - offset = ZzUnpackTrans(ZZ_TRANS_PACKED_2, offset, result); + /*offset = */ZzUnpackTrans(ZZ_TRANS_PACKED_2, offset, result); // LUCENENET: IDE0059: Remove unnecessary value assignment return result; } @@ -9038,7 +9040,7 @@ private static int[] ZzUnpackAttribute() { int[] result = new int[3116]; int offset = 0; - offset = ZzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result); + /*offset = */ZzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result); // LUCENENET: IDE0059: Remove unnecessary value assignment return result; } @@ -9086,31 +9088,29 @@ private static int ZzUnpackAttribute(string packed, int offset, int[] result) /// private int zzEndRead; - /// number of newlines encountered up to the start of the matched text - private int yyline; + ///// number of newlines encountered up to the start of the matched text + //private int yyline; // LUCENENET: Never read /// the number of characters up to the start of the matched text private int yychar; -#pragma warning disable 169, 414 - /// - /// the number of characters from the last newline up to the start of the - /// matched text - /// - private int yycolumn; + ///// + ///// the number of characters from the last newline up to the start of the + ///// matched text + ///// + //private int yycolumn; // LUCENENET: Never read - /// - /// zzAtBOL == true <=> the scanner is currently at the beginning of a line - /// - private bool zzAtBOL = true; + ///// + ///// zzAtBOL == true <=> the scanner is currently at the beginning of a line + ///// + //private bool zzAtBOL = true; // LUCENENET: Never read /// zzAtEOF == true <=> the scanner is at the EOF private bool zzAtEOF; - /// denotes if the user-EOF-code has already been executed - private bool zzEOFDone; + ///// denotes if the user-EOF-code has already been executed + //private bool zzEOFDone; // LUCENENET: Never read -#pragma warning restore 169, 414 /* user code: */ /// Alphanumeric sequences @@ -9271,12 +9271,13 @@ public void YyClose() public void YyReset(TextReader reader) { zzReader = reader; - zzAtBOL = true; + //zzAtBOL = true; // LUCENENET: Never read zzAtEOF = false; - zzEOFDone = false; + //zzEOFDone = false; // LUCENENET: Never read zzEndRead = zzStartRead = 0; zzCurrentPos = zzMarkedPos = 0; - yyline = yychar = yycolumn = 0; + //yyline = yychar = yycolumn = 0; // LUCENENET: Never read + yychar = 0; zzLexicalState = YYINITIAL; if (zzBuffer.Length > ZZ_BUFFERSIZE) zzBuffer = new char[ZZ_BUFFERSIZE]; diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Sv/SwedishLightStemFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Sv/SwedishLightStemFilterFactory.cs index ea6ffa7d59..49c2e0f7c4 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Sv/SwedishLightStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Sv/SwedishLightStemFilterFactory.cs @@ -41,7 +41,7 @@ public SwedishLightStemFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/FSTSynonymFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/FSTSynonymFilterFactory.cs index 21ec6d8e96..1b05100cea 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/FSTSynonymFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/FSTSynonymFilterFactory.cs @@ -66,7 +66,7 @@ public FSTSynonymFilterFactory(IDictionary args) } if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } @@ -151,9 +151,9 @@ private TokenizerFactory LoadTokenizerFactory(IResourceLoader loader, string cna { TokenizerFactory tokFactory = (TokenizerFactory)Activator.CreateInstance(clazz, new object[] { tokArgs }); - if (tokFactory is IResourceLoaderAware) + if (tokFactory is IResourceLoaderAware resourceLoaderAware) { - ((IResourceLoaderAware)tokFactory).Inform(loader); + resourceLoaderAware.Inform(loader); } return tokFactory; } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SlowSynonymFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SlowSynonymFilterFactory.cs index aaafa9a1aa..4538724c38 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SlowSynonymFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SlowSynonymFilterFactory.cs @@ -74,7 +74,7 @@ public SlowSynonymFilterFactory(IDictionary args) } if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } @@ -150,8 +150,10 @@ internal static void ParseRules(IEnumerable rules, SlowSynonymMap map, s else { // reduce to first argument - target = new List>(1); - target.Add(source[0]); + target = new List>(1) + { + source[0] + }; } } @@ -212,9 +214,9 @@ private TokenizerFactory LoadTokenizerFactory(IResourceLoader loader, string cna { TokenizerFactory tokFactory = (TokenizerFactory)Activator.CreateInstance(clazz, new object[] { tokArgs }); - if (tokFactory is IResourceLoaderAware) + if (tokFactory is IResourceLoaderAware resourceLoaderAware) { - ((IResourceLoaderAware)tokFactory).Inform(loader); + resourceLoaderAware.Inform(loader); } return tokFactory; } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiTokenizerFactory.cs index 8b345b4a95..3eb0e5e65a 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiTokenizerFactory.cs @@ -42,7 +42,7 @@ public ThaiTokenizerFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiWordFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiWordFilterFactory.cs index 84d660e4f3..8e88f5bbb1 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiWordFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Th/ThaiWordFilterFactory.cs @@ -43,7 +43,7 @@ public ThaiWordFilterFactory(IDictionary args) : base(args) AssureMatchVersion(); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Tr/TurkishLowerCaseFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Tr/TurkishLowerCaseFilterFactory.cs index a3cd1f6da9..30e9c777b8 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Tr/TurkishLowerCaseFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Tr/TurkishLowerCaseFilterFactory.cs @@ -40,7 +40,7 @@ public TurkishLowerCaseFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/AnalysisSPILoader.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/AnalysisSPILoader.cs index 0384dca583..d363c5c263 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/AnalysisSPILoader.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/AnalysisSPILoader.cs @@ -115,8 +115,7 @@ public S NewInstance(string name, IDictionary args) public Type LookupClass(string name) { - Type service; - if (this.services.TryGetValue(name.ToLowerInvariant(), out service)) + if (this.services.TryGetValue(name.ToLowerInvariant(), out Type service)) { return service; } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/BufferedCharFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/BufferedCharFilter.cs index e96223c5cc..00711479d6 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/BufferedCharFilter.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/BufferedCharFilter.cs @@ -355,19 +355,6 @@ public override int Read(char[] buffer, int offset, int length) } } - /// - /// Peeks at the next input character, refilling the buffer if necessary. If - /// this character is a newline character ("\n"), it is discarded. - /// - private void ChompNewline() - { - if ((pos != end || FillBuf() != -1) - && buf[pos] == '\n') - { - pos++; - } - } - /// /// Returns the next line of text available from this reader. A line is /// represented by zero or more characters followed by '\n', diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs index b2dd01af15..4838472011 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs @@ -86,7 +86,7 @@ public class CharArrayMap : ICharArrayMap, IDictionary /// internal class MapValue { - private TValue value = default(TValue); + private TValue value = default; public TValue Value { get => value; @@ -215,12 +215,10 @@ public virtual bool Contains(KeyValuePair item) /// A 32-bit integer that represents the index in at which copying begins. public virtual void CopyTo(KeyValuePair[] array, int arrayIndex) { - using (var iter = (EntryIterator)EntrySet().GetEnumerator()) + using var iter = (EntryIterator)EntrySet().GetEnumerator(); + for (int i = arrayIndex; iter.MoveNext(); i++) { - for (int i = arrayIndex; iter.MoveNext(); i++) - { - array[i] = new KeyValuePair(iter.Current.Key, iter.CurrentValue); - } + array[i] = new KeyValuePair(iter.Current.Key, iter.CurrentValue); } } @@ -231,12 +229,10 @@ public virtual void CopyTo(KeyValuePair[] array, int arrayIndex) /// public virtual void CopyTo(CharArrayMap map) { - using (var iter = (EntryIterator)EntrySet().GetEnumerator()) + using var iter = (EntryIterator)EntrySet().GetEnumerator(); + while (iter.MoveNext()) { - while (iter.MoveNext()) - { - map.Put(iter.Current.Key, iter.CurrentValue); - } + map.Put(iter.Current.Key, iter.CurrentValue); } } @@ -286,7 +282,7 @@ public virtual bool ContainsKey(object o) { if (o == null) { - throw new ArgumentException("o can't be null", "o"); + throw new ArgumentException("o can't be null", nameof(o)); } var c = o as char[]; @@ -305,7 +301,7 @@ public virtual bool ContainsKey(object o) public virtual TValue Get(char[] text, int offset, int length) { var value = values[GetSlot(text, offset, length)]; - return (value != null) ? value.Value : default(TValue); + return (value != null) ? value.Value : default; } /// @@ -314,7 +310,7 @@ public virtual TValue Get(char[] text, int offset, int length) public virtual TValue Get(char[] text) { var value = values[GetSlot(text, 0, text.Length)]; - return (value != null) ? value.Value : default(TValue); + return (value != null) ? value.Value : default; } /// @@ -323,7 +319,7 @@ public virtual TValue Get(char[] text) public virtual TValue Get(ICharSequence text) { var value = values[GetSlot(text)]; - return (value != null) ? value.Value : default(TValue); + return (value != null) ? value.Value : default; } /// @@ -332,7 +328,7 @@ public virtual TValue Get(ICharSequence text) public virtual TValue Get(string text) { var value = values[GetSlot(text)]; - return (value != null) ? value.Value : default(TValue); + return (value != null) ? value.Value : default; } /// @@ -417,7 +413,7 @@ private int GetSlot(string text) public virtual TValue Put(ICharSequence text, TValue value) { MapValue oldValue = PutImpl(text, new MapValue(value)); // could be more efficient - return (oldValue != null) ? oldValue.Value : default(TValue); + return (oldValue != null) ? oldValue.Value : default; } /// @@ -427,7 +423,7 @@ public virtual TValue Put(ICharSequence text, TValue value) public virtual TValue Put(object o, TValue value) { MapValue oldValue = PutImpl(o, new MapValue(value)); - return (oldValue != null) ? oldValue.Value : default(TValue); + return (oldValue != null) ? oldValue.Value : default; } /// @@ -436,7 +432,7 @@ public virtual TValue Put(object o, TValue value) public virtual TValue Put(string text, TValue value) { MapValue oldValue = PutImpl(text, new MapValue(value)); - return (oldValue != null) ? oldValue.Value : default(TValue); + return (oldValue != null) ? oldValue.Value : default; } /// @@ -447,7 +443,7 @@ public virtual TValue Put(string text, TValue value) public virtual TValue Put(char[] text, TValue value) { MapValue oldValue = PutImpl(text, new MapValue(value)); - return (oldValue != null) ? oldValue.Value : default(TValue); + return (oldValue != null) ? oldValue.Value : default; } /// @@ -802,7 +798,7 @@ private int GetHashCode(char[] text, int offset, int length) { if (text == null) { - throw new ArgumentException("text can't be null", "text"); + throw new ArgumentException("text can't be null", nameof(text)); } int code = 0; int stop = offset + length; @@ -829,7 +825,7 @@ private int GetHashCode(ICharSequence text) { if (text == null) { - throw new ArgumentException("text can't be null", "text"); + throw new ArgumentException("text can't be null", nameof(text)); } int code = 0; @@ -857,7 +853,7 @@ private int GetHashCode(string text) { if (text == null) { - throw new ArgumentException("text can't be null", "text"); + throw new ArgumentException("text can't be null", nameof(text)); } int code = 0; @@ -943,7 +939,7 @@ public virtual bool TryGetValue(char[] key, int offset, int length, out TValue v value = val.Value; return true; } - value = default(TValue); + value = default; return false; } @@ -963,7 +959,7 @@ public virtual bool TryGetValue(char[] key, out TValue value) value = val.Value; return true; } - value = default(TValue); + value = default; return false; } @@ -983,7 +979,7 @@ public virtual bool TryGetValue(ICharSequence key, out TValue value) value = val.Value; return true; } - value = default(TValue); + value = default; return false; } @@ -1003,7 +999,7 @@ public virtual bool TryGetValue(string key, out TValue value) value = val.Value; return true; } - value = default(TValue); + value = default; return false; } @@ -1144,12 +1140,10 @@ public bool Contains(string item) public void CopyTo(string[] array, int arrayIndex) { - using (var iter = GetEnumerator()) + using var iter = GetEnumerator(); + for (int i = arrayIndex; iter.MoveNext(); i++) { - for (int i = arrayIndex; iter.MoveNext(); i++) - { - array[i] = iter.Current; - } + array[i] = iter.Current; } } @@ -1260,25 +1254,23 @@ IEnumerator IEnumerable.GetEnumerator() public override string ToString() { - using (var i = (ValueEnumerator)GetEnumerator()) - { - if (!i.HasNext) - return "[]"; + using var i = (ValueEnumerator)GetEnumerator(); + if (!i.HasNext) + return "[]"; - StringBuilder sb = new StringBuilder(); - sb.Append('['); - while (i.MoveNext()) + StringBuilder sb = new StringBuilder(); + sb.Append('['); + while (i.MoveNext()) + { + TValue value = i.Current; + if (sb.Length > 1) { - TValue value = i.Current; - if (sb.Length > 1) - { - sb.Append(',').Append(' '); - } - sb.Append(value.ToString()); + sb.Append(',').Append(' '); } - - return sb.Append(']').ToString(); + sb.Append(value.ToString()); } + + return sb.Append(']').ToString(); } /// @@ -1520,7 +1512,7 @@ public virtual TValue CurrentValue get { var val = outerInstance.values[lastPos]; - return val != null ? val.Value : default(TValue); + return val != null ? val.Value : default; } } @@ -1572,7 +1564,7 @@ public virtual KeyValuePair Current var val = outerInstance.values[lastPos]; return new KeyValuePair( new string(outerInstance.keys[lastPos]), - val != null ? val.Value : default(TValue)); + val != null ? val.Value : default); } } @@ -1735,12 +1727,8 @@ internal interface ICharArrayMap bool Put(string text); } - public class CharArrayMap + public static class CharArrayMap // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { - // Prevent direct creation - private CharArrayMap() - { } - /// /// Returns a copy of the given map as a . If the given map /// is a the ignoreCase property will be preserved. @@ -1805,7 +1793,7 @@ public static CharArrayMap UnmodifiableMap(CharArrayMap { if (map == null) { - throw new ArgumentException("Given map is null", "map"); + throw new ArgumentException("Given map is null", nameof(map)); } if (map == CharArrayMap.EmptyMap() || map.Count == 0) { @@ -1826,7 +1814,7 @@ internal static ICharArrayMap UnmodifiableMap(ICharArrayMap map) { if (map == null) { - throw new ArgumentException("Given map is null", "map"); + throw new ArgumentException("Given map is null", nameof(map)); } if (map == CharArrayMap.EmptyMap() || map.Count == 0) { @@ -1971,7 +1959,7 @@ public override bool ContainsKey(char[] text, int offset, int length) { if (text == null) { - throw new ArgumentNullException("text"); + throw new ArgumentNullException(nameof(text)); } return false; } @@ -1980,7 +1968,7 @@ public override bool ContainsKey(char[] text) { if (text == null) { - throw new ArgumentNullException("text"); + throw new ArgumentNullException(nameof(text)); } return false; } @@ -1989,7 +1977,7 @@ public override bool ContainsKey(ICharSequence text) { if (text == null) { - throw new ArgumentNullException("text"); + throw new ArgumentNullException(nameof(text)); } return false; } @@ -1998,7 +1986,7 @@ public override bool ContainsKey(object o) { if (o == null) { - throw new ArgumentNullException("o"); + throw new ArgumentNullException(nameof(o)); } return false; } @@ -2007,36 +1995,36 @@ public override V Get(char[] text, int offset, int length) { if (text == null) { - throw new ArgumentNullException("text"); + throw new ArgumentNullException(nameof(text)); } - return default(V); + return default; } public override V Get(char[] text) { if (text == null) { - throw new ArgumentNullException("text"); + throw new ArgumentNullException(nameof(text)); } - return default(V); + return default; } public override V Get(ICharSequence text) { if (text == null) { - throw new ArgumentNullException("text"); + throw new ArgumentNullException(nameof(text)); } - return default(V); + return default; } public override V Get(object o) { if (o == null) { - throw new ArgumentNullException("o"); + throw new ArgumentNullException(nameof(o)); } - return default(V); + return default; } } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArraySet.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArraySet.cs index 3036aa84c0..465b9539b7 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArraySet.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArraySet.cs @@ -412,7 +412,7 @@ public virtual bool UnionWith(IEnumerable other) { if (other == null) { - throw new ArgumentNullException("other"); + throw new ArgumentNullException(nameof(other)); } if (IsReadOnly) { @@ -439,7 +439,7 @@ public virtual bool UnionWith(IEnumerable other) { if (other == null) { - throw new ArgumentNullException("other"); + throw new ArgumentNullException(nameof(other)); } if (IsReadOnly) { @@ -465,7 +465,7 @@ public virtual void UnionWith(IEnumerable other) { if (other == null) { - throw new ArgumentNullException("other"); + throw new ArgumentNullException(nameof(other)); } if (IsReadOnly) { @@ -487,7 +487,7 @@ public virtual bool UnionWith(IEnumerable other) { if (other == null) { - throw new ArgumentNullException("other"); + throw new ArgumentNullException(nameof(other)); } if (IsReadOnly) { @@ -559,7 +559,7 @@ public virtual bool IsSubsetOf(IEnumerable other) { if (other == null) { - throw new ArgumentNullException("other"); + throw new ArgumentNullException(nameof(other)); } if (this.Count == 0) { @@ -577,8 +577,7 @@ public virtual bool IsSubsetOf(IEnumerable other) // we just need to return true if the other set // contains all of the elements of the this set, // but we need to use the comparison rules of the current set. - int foundCount, unfoundCount; - this.GetFoundAndUnfoundCounts(other, out foundCount, out unfoundCount); + this.GetFoundAndUnfoundCounts(other, out int foundCount, out int _); return foundCount == this.Count; } @@ -591,7 +590,7 @@ public virtual bool IsSubsetOf(IEnumerable other) { if (other == null) { - throw new ArgumentNullException("other"); + throw new ArgumentNullException(nameof(other)); } if (this.Count == 0) { @@ -600,8 +599,7 @@ public virtual bool IsSubsetOf(IEnumerable other) // we just need to return true if the other set // contains all of the elements of the this set, // but we need to use the comparison rules of the current set. - int foundCount, unfoundCount; - this.GetFoundAndUnfoundCounts(other, out foundCount, out unfoundCount); + this.GetFoundAndUnfoundCounts(other, out int foundCount, out int _); return foundCount == this.Count; } @@ -614,7 +612,7 @@ public virtual bool IsSupersetOf(IEnumerable other) { if (other == null) { - throw new ArgumentNullException("other"); + throw new ArgumentNullException(nameof(other)); } ICollection is2 = other as ICollection; if (is2 != null) @@ -641,7 +639,7 @@ public virtual bool IsSupersetOf(IEnumerable other) { if (other == null) { - throw new ArgumentNullException("other"); + throw new ArgumentNullException(nameof(other)); } ICollection is2 = other as ICollection; if (is2 != null && is2.Count == 0) @@ -660,7 +658,7 @@ public virtual bool IsProperSubsetOf(IEnumerable other) { if (other == null) { - throw new ArgumentNullException("other"); + throw new ArgumentNullException(nameof(other)); } ICollection is2 = other as ICollection; if (is2 != null) @@ -682,8 +680,7 @@ public virtual bool IsProperSubsetOf(IEnumerable other) // we just need to return true if the other set // contains all of the elements of the this set plus at least one more, // but we need to use the comparison rules of the current set. - int foundCount, unfoundCount; - this.GetFoundAndUnfoundCounts(other, out foundCount, out unfoundCount); + this.GetFoundAndUnfoundCounts(other, out int foundCount, out int unfoundCount); return foundCount == this.Count && unfoundCount > 0; } @@ -696,7 +693,7 @@ public virtual bool IsProperSubsetOf(IEnumerable other) { if (other == null) { - throw new ArgumentNullException("other"); + throw new ArgumentNullException(nameof(other)); } ICollection is2 = other as ICollection; if (is2 != null && this.Count == 0) @@ -706,8 +703,7 @@ public virtual bool IsProperSubsetOf(IEnumerable other) // we just need to return true if the other set // contains all of the elements of the this set plus at least one more, // but we need to use the comparison rules of the current set. - int foundCount, unfoundCount; - this.GetFoundAndUnfoundCounts(other, out foundCount, out unfoundCount); + this.GetFoundAndUnfoundCounts(other, out int foundCount, out int unfoundCount); return foundCount == this.Count && unfoundCount > 0; } @@ -720,7 +716,7 @@ public virtual bool IsProperSupersetOf(IEnumerable other) { if (other == null) { - throw new ArgumentNullException("other"); + throw new ArgumentNullException(nameof(other)); } if (this.Count == 0) { @@ -743,8 +739,7 @@ public virtual bool IsProperSupersetOf(IEnumerable other) return this.ContainsAll(set); } } - int foundCount, unfoundCount; - this.GetFoundAndUnfoundCounts(other, out foundCount, out unfoundCount); + this.GetFoundAndUnfoundCounts(other, out int foundCount, out int unfoundCount); return foundCount < this.Count && unfoundCount == 0; } @@ -757,7 +752,7 @@ public virtual bool IsProperSupersetOf(IEnumerable other) { if (other == null) { - throw new ArgumentNullException("other"); + throw new ArgumentNullException(nameof(other)); } if (this.Count == 0) { @@ -768,8 +763,7 @@ public virtual bool IsProperSupersetOf(IEnumerable other) { return true; } - int foundCount, unfoundCount; - this.GetFoundAndUnfoundCounts(other, out foundCount, out unfoundCount); + this.GetFoundAndUnfoundCounts(other, out int foundCount, out int unfoundCount); return foundCount < this.Count && unfoundCount == 0; } @@ -782,7 +776,7 @@ public virtual bool Overlaps(IEnumerable other) { if (other == null) { - throw new ArgumentNullException("other"); + throw new ArgumentNullException(nameof(other)); } if (this.Count != 0) { @@ -806,7 +800,7 @@ public virtual bool Overlaps(IEnumerable other) { if (other == null) { - throw new ArgumentNullException("other"); + throw new ArgumentNullException(nameof(other)); } if (this.Count != 0) { @@ -1144,7 +1138,7 @@ public static bool UnionWith(this CharArraySet set, IEnumerable other) { if (other == null) { - throw new ArgumentNullException("other"); + throw new ArgumentNullException(nameof(other)); } if (set.IsReadOnly) { @@ -1172,7 +1166,7 @@ public static bool UnionWith(this CharArraySet set, IEnumerable other) { if (other == null) { - throw new ArgumentNullException("other"); + throw new ArgumentNullException(nameof(other)); } if (set.IsReadOnly) { @@ -1200,7 +1194,7 @@ public static bool UnionWith(this CharArraySet set, IEnumerable other) //{ // if (other == null) // { - // throw new ArgumentNullException("other"); + // throw new ArgumentNullException(nameof(other)); // } // if (set.IsReadOnly) // { @@ -1228,7 +1222,7 @@ public static bool UnionWith(this CharArraySet set, IEnumerable other) //{ // if (other == null) // { - // throw new ArgumentNullException("other"); + // throw new ArgumentNullException(nameof(other)); // } // if (set.IsReadOnly) // { @@ -1256,7 +1250,7 @@ public static bool UnionWith(this CharArraySet set, IEnumerable other) //{ // if (other == null) // { - // throw new ArgumentNullException("other"); + // throw new ArgumentNullException(nameof(other)); // } // if (set.IsReadOnly) // { @@ -1284,7 +1278,7 @@ public static bool UnionWith(this CharArraySet set, IEnumerable other) { if (other == null) { - throw new ArgumentNullException("other"); + throw new ArgumentNullException(nameof(other)); } if (set.IsReadOnly) { @@ -1312,7 +1306,7 @@ public static bool UnionWith(this CharArraySet set, IEnumerable other) { if (other == null) { - throw new ArgumentNullException("other"); + throw new ArgumentNullException(nameof(other)); } if (set.IsReadOnly) { @@ -1341,7 +1335,7 @@ public static bool UnionWith(this CharArraySet set, IEnumerable other) { if (other == null) { - throw new ArgumentNullException("other"); + throw new ArgumentNullException(nameof(other)); } if (set.IsReadOnly) { @@ -1369,7 +1363,7 @@ public static bool UnionWith(this CharArraySet set, IEnumerable other) { if (other == null) { - throw new ArgumentNullException("other"); + throw new ArgumentNullException(nameof(other)); } if (set.IsReadOnly) { @@ -1398,7 +1392,7 @@ public static bool UnionWith(this CharArraySet set, IEnumerable other) { if (other == null) { - throw new ArgumentNullException("other"); + throw new ArgumentNullException(nameof(other)); } if (set.IsReadOnly) { @@ -1427,7 +1421,7 @@ public static bool UnionWith(this CharArraySet set, IEnumerable other) { if (other == null) { - throw new ArgumentNullException("other"); + throw new ArgumentNullException(nameof(other)); } if (set.IsReadOnly) { @@ -1456,7 +1450,7 @@ public static bool UnionWith(this CharArraySet set, IEnumerable other) { if (other == null) { - throw new ArgumentNullException("other"); + throw new ArgumentNullException(nameof(other)); } if (set.IsReadOnly) { diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/ElisionFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/ElisionFilterFactory.cs index ee67812af8..2f76cad966 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/ElisionFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/ElisionFilterFactory.cs @@ -47,7 +47,7 @@ public ElisionFilterFactory(IDictionary args) : base(args) ignoreCase = GetBoolean(args, "ignoreCase", false); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/SegmentingTokenizerBase.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/SegmentingTokenizerBase.cs index 396cc003fc..259365c1fa 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/SegmentingTokenizerBase.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/SegmentingTokenizerBase.cs @@ -68,7 +68,7 @@ public abstract class SegmentingTokenizerBase : Tokenizer /// be provided to this constructor. /// /// - public SegmentingTokenizerBase(TextReader reader, BreakIterator iterator) + protected SegmentingTokenizerBase(TextReader reader, BreakIterator iterator) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : this(AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY, reader, iterator) { } @@ -76,7 +76,7 @@ public SegmentingTokenizerBase(TextReader reader, BreakIterator iterator) /// /// Construct a new SegmenterBase, also supplying the /// - public SegmentingTokenizerBase(AttributeFactory factory, TextReader reader, BreakIterator iterator) + protected SegmentingTokenizerBase(AttributeFactory factory, TextReader reader, BreakIterator iterator) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : base(factory, reader) { offsetAtt = AddAttribute(); diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/StemmerUtil.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/StemmerUtil.cs index a6f0ebc41e..a8e5d638df 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/StemmerUtil.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/StemmerUtil.cs @@ -26,14 +26,8 @@ namespace Lucene.Net.Analysis.Util /// /// @lucene.internal /// - public class StemmerUtil + public static class StemmerUtil // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { - /// - /// no instance - private StemmerUtil() - { - } - /// /// Returns true if the character array starts with the prefix. /// diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/WordlistLoader.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/WordlistLoader.cs index f49a375d99..27230cd9a9 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Util/WordlistLoader.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/WordlistLoader.cs @@ -31,19 +31,13 @@ namespace Lucene.Net.Analysis.Util /// to obtain instances. /// @lucene.internal /// - public class WordlistLoader + public static class WordlistLoader // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { private const int INITIAL_CAPACITY = 16; // LUCENENET specific private readonly static Regex WHITESPACE = new Regex("\\s+", RegexOptions.Compiled); - /// - /// no instance - private WordlistLoader() - { - } - // LUCENENET TODO: Add .NET overloads that accept a file name? Or at least a FileInfo object as was done in 3.0.3? /// diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerFactory.cs index 21a1705a8d..2e54313381 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerFactory.cs @@ -42,7 +42,7 @@ public WikipediaTokenizerFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerImpl.cs b/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerImpl.cs index fdcf8e7c32..ac043c622d 100644 --- a/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerImpl.cs +++ b/src/Lucene.Net.Analysis.Common/Analysis/Wikipedia/WikipediaTokenizerImpl.cs @@ -421,31 +421,29 @@ private static int ZzUnpackAttribute(string packed, int offset, int[] result) /// private int zzEndRead; - /// number of newlines encountered up to the start of the matched text - private int yyline; + ///// number of newlines encountered up to the start of the matched text + //private int yyline; /// the number of characters up to the start of the matched text private int yychar; -#pragma warning disable 169, 414 - /// - /// the number of characters from the last newline up to the start of the - /// matched text - /// - private int yycolumn; + ///// + ///// the number of characters from the last newline up to the start of the + ///// matched text + ///// + //private int yycolumn; // LUCENENET: Never read - /// - /// zzAtBOL == true <=> the scanner is currently at the beginning of a line - /// - private bool zzAtBOL = true; + ///// + ///// zzAtBOL == true <=> the scanner is currently at the beginning of a line + ///// + //private bool zzAtBOL = true; // LUCENENET: Never read /// zzAtEOF == true <=> the scanner is at the EOF private bool zzAtEOF; - /// denotes if the user-EOF-code has already been executed - private bool zzEOFDone; + ///// denotes if the user-EOF-code has already been executed + //private bool zzEOFDone; // LUCENENET: Never read -#pragma warning disable 169, 414 /* user code: */ @@ -633,12 +631,13 @@ public void YyClose() public void YyReset(TextReader reader) { zzReader = reader; - zzAtBOL = true; + //zzAtBOL = true; // LUCENENET: Never read zzAtEOF = false; - zzEOFDone = false; + //zzEOFDone = false; // LUCENENET: Never read zzEndRead = zzStartRead = 0; zzCurrentPos = zzMarkedPos = 0; - yyline = yychar = yycolumn = 0; + //yyline = yychar = yycolumn = 0; // LUCENENET: Never read + yychar = 0; zzLexicalState = YYINITIAL; if (zzBuffer.Length > ZZ_BUFFERSIZE) zzBuffer = new char[ZZ_BUFFERSIZE]; diff --git a/src/Lucene.Net.Analysis.Common/Collation/CollationKeyFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Collation/CollationKeyFilterFactory.cs index 1f4391df80..081aaa98b6 100644 --- a/src/Lucene.Net.Analysis.Common/Collation/CollationKeyFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Common/Collation/CollationKeyFilterFactory.cs @@ -103,7 +103,7 @@ public CollationKeyFilterFactory(IDictionary args) : base(args) if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Among.cs b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Among.cs index d636a2f708..3e2bf8c060 100644 --- a/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Among.cs +++ b/src/Lucene.Net.Analysis.Common/Tartarus/Snowball/Among.cs @@ -102,7 +102,7 @@ public Among(string s, int substring_i, int result, private readonly MethodInfo method; /// object to invoke method on - public SnowballProgram MethodObject => MethodObject; + public SnowballProgram MethodObject => methodobject; private readonly SnowballProgram methodobject; } diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUFoldingFilterFactory.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUFoldingFilterFactory.cs index 158775104e..8b444bfd12 100644 --- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUFoldingFilterFactory.cs +++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUFoldingFilterFactory.cs @@ -43,9 +43,9 @@ public class ICUFoldingFilterFactory : TokenFilterFactory, IMultiTermAwareCompon public ICUFoldingFilterFactory(IDictionary args) : base(args) { - if (args.Count != 0) + if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilter.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilter.cs index 49ff4a3ed1..93018bb574 100644 --- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilter.cs +++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilter.cs @@ -63,22 +63,14 @@ public ICUNormalizer2CharFilter(TextReader input) public ICUNormalizer2CharFilter(TextReader input, Normalizer2 normalizer) : this(input, normalizer, 128) { - if (normalizer == null) - { - throw new ArgumentNullException("normalizer"); - } - this.normalizer = normalizer; + this.normalizer = normalizer ?? throw new ArgumentNullException(nameof(normalizer)); } // for testing ONLY internal ICUNormalizer2CharFilter(TextReader input, Normalizer2 normalizer, int bufferSize) : base(input) { - if (normalizer == null) - { - throw new ArgumentNullException("normalizer"); - } - this.normalizer = normalizer; + this.normalizer = normalizer ?? throw new ArgumentNullException(nameof(normalizer)); this.tmpBuffer = CharacterUtils.NewCharacterBuffer(bufferSize); } diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilterFactory.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilterFactory.cs index 993ddf2766..730bb59bb6 100644 --- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilterFactory.cs +++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2CharFilterFactory.cs @@ -81,7 +81,7 @@ public ICUNormalizer2CharFilterFactory(IDictionary args) } if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } this.normalizer = normalizer; } diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2FilterFactory.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2FilterFactory.cs index cf11e70943..190886a8e5 100644 --- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2FilterFactory.cs +++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUNormalizer2FilterFactory.cs @@ -83,7 +83,7 @@ public ICUNormalizer2FilterFactory(IDictionary args) } if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } this.normalizer = normalizer; } diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUTransformFilterFactory.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUTransformFilterFactory.cs index 9627804c13..ece0cd2421 100644 --- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUTransformFilterFactory.cs +++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/ICUTransformFilterFactory.cs @@ -49,9 +49,9 @@ public ICUTransformFilterFactory(IDictionary args) string direction = Get(args, "direction", new string[] { "forward", "reverse" }, "forward", false); TransliterationDirection dir = "forward".Equals(direction, StringComparison.Ordinal) ? Transliterator.Forward : Transliterator.Reverse; transliterator = Transliterator.GetInstance(id, dir); - if (args.Count != 0) + if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/DefaultICUTokenizerConfig.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/DefaultICUTokenizerConfig.cs index b6093cba53..ef3444d6cf 100644 --- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/DefaultICUTokenizerConfig.cs +++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/DefaultICUTokenizerConfig.cs @@ -131,18 +131,16 @@ public override string GetType(int script, int ruleStatus) private static RuleBasedBreakIterator ReadBreakIterator(string filename) { - using (Stream @is = typeof(DefaultICUTokenizerConfig).FindAndGetManifestResourceStream(filename)) + using Stream @is = typeof(DefaultICUTokenizerConfig).FindAndGetManifestResourceStream(filename); + try { - try - { - RuleBasedBreakIterator bi = - RuleBasedBreakIterator.GetInstanceFromCompiledRules(@is); - return bi; - } - catch (IOException e) - { - throw new Exception(e.ToString(), e); - } + RuleBasedBreakIterator bi = + RuleBasedBreakIterator.GetInstanceFromCompiledRules(@is); + return bi; + } + catch (IOException e) + { + throw new Exception(e.ToString(), e); } } } diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs index f959065f9d..aa743898c4 100644 --- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs @@ -97,9 +97,9 @@ public ICUTokenizerFactory(IDictionary args) } cjkAsWords = GetBoolean(args, "cjkAsWords", true); myanmarAsWords = GetBoolean(args, "myanmarAsWords", true); - if (args.Count != 0) + if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/TokenAttributes/ScriptAttributeImpl.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/TokenAttributes/ScriptAttributeImpl.cs index 99bd81761c..ce1e1260f0 100644 --- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/TokenAttributes/ScriptAttributeImpl.cs +++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/TokenAttributes/ScriptAttributeImpl.cs @@ -72,9 +72,9 @@ public override bool Equals(object other) return true; } - if (other is ScriptAttribute) + if (other is ScriptAttribute scriptAttribute) { - return ((ScriptAttribute)other).code == code; + return scriptAttribute.code == code; } return false; diff --git a/src/Lucene.Net.Analysis.ICU/Collation/ICUCollationKeyFilter.cs b/src/Lucene.Net.Analysis.ICU/Collation/ICUCollationKeyFilter.cs index bd4c47d290..527df4402c 100644 --- a/src/Lucene.Net.Analysis.ICU/Collation/ICUCollationKeyFilter.cs +++ b/src/Lucene.Net.Analysis.ICU/Collation/ICUCollationKeyFilter.cs @@ -59,8 +59,8 @@ namespace Lucene.Net.Collation [ExceptionToClassNameConvention] public sealed class ICUCollationKeyFilter : TokenFilter { - private Collator collator = null; - private RawCollationKey reusableKey = new RawCollationKey(); + private readonly Collator collator = null; // LUCENENET: marked readonly + private readonly RawCollationKey reusableKey = new RawCollationKey(); // LUCENENET: marked readonly private readonly ICharTermAttribute termAtt; /// diff --git a/src/Lucene.Net.Analysis.ICU/Collation/ICUCollationKeyFilterFactory.cs b/src/Lucene.Net.Analysis.ICU/Collation/ICUCollationKeyFilterFactory.cs index 4ba29c24d7..dd30193408 100644 --- a/src/Lucene.Net.Analysis.ICU/Collation/ICUCollationKeyFilterFactory.cs +++ b/src/Lucene.Net.Analysis.ICU/Collation/ICUCollationKeyFilterFactory.cs @@ -101,9 +101,9 @@ public ICUCollationKeyFilterFactory(IDictionary args) + "To tailor rules for a built-in language, see the javadocs for RuleBasedCollator. " + "Then save the entire customized ruleset to a file, and use with the custom parameter"); - if (args.Count != 0) + if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } @@ -240,12 +240,12 @@ public virtual AbstractAnalysisFactory GetMultiTermComponent() return this; } - private string ToUTF8String(Stream input) + private static string ToUTF8String(Stream input) // LUCENENET: CA1822: Mark members as static { StringBuilder sb = new StringBuilder(); char[] buffer = new char[1024]; TextReader r = IOUtils.GetDecodingReader(input, Encoding.UTF8); - int len = 0; + int len; // LUCENENET: IDE0059: Remove unnecessary value assignment while ((len = r.Read(buffer, 0, buffer.Length)) > 0) { sb.Append(buffer, 0, len); diff --git a/src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs b/src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs index 3532165a85..eb3c754fc1 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs @@ -53,20 +53,20 @@ public abstract class BinaryDictionary : IDictionary private readonly string[] inflFormDict; // LUCENENET specific - variable to hold the name of the data directory (or empty string to load embedded resources) - private static readonly string DATA_DIR; + private static readonly string DATA_DIR = LoadDataDir(); // LUCENENET specific - name of the subdirectory inside of the directory where the Kuromoji dictionary files reside. - private static readonly string DATA_SUBDIR = "kuromoji-data"; + private const string DATA_SUBDIR = "kuromoji-data"; - static BinaryDictionary() + private static string LoadDataDir() { // LUCENENET specific - reformatted with :, renamed from "analysis.data.dir" string currentPath = SystemProperties.GetProperty("kuromoji:data:dir", #if FEATURE_APPDOMAIN_BASEDIRECTORY - AppDomain.CurrentDomain.BaseDirectory + AppDomain.CurrentDomain.BaseDirectory #else - System.AppContext.BaseDirectory + System.AppContext.BaseDirectory #endif - ); + ); // If a matching directory path is found, set our DATA_DIR static // variable. If it is null or empty after this process, we need to @@ -74,8 +74,7 @@ static BinaryDictionary() string candidatePath = System.IO.Path.Combine(currentPath, DATA_SUBDIR); if (System.IO.Directory.Exists(candidatePath)) { - DATA_DIR = candidatePath; - return; + return candidatePath; } while (new DirectoryInfo(currentPath).Parent != null) @@ -85,8 +84,7 @@ static BinaryDictionary() candidatePath = System.IO.Path.Combine(new DirectoryInfo(currentPath).Parent.FullName, DATA_SUBDIR); if (System.IO.Directory.Exists(candidatePath)) { - DATA_DIR = candidatePath; - return; + return candidatePath; } currentPath = new DirectoryInfo(currentPath).Parent.FullName; } @@ -95,6 +93,8 @@ static BinaryDictionary() // ignore security errors } } + + return null; // This is the signal to load from local resources } protected BinaryDictionary() @@ -103,7 +103,7 @@ protected BinaryDictionary() string[] posDict = null; string[] inflFormDict = null; string[] inflTypeDict = null; - ByteBuffer buffer = null; + ByteBuffer buffer; // LUCENENET: IDE0059: Remove unnecessary value assignment using (Stream mapIS = GetResource(TARGETMAP_FILENAME_SUFFIX)) { diff --git a/src/Lucene.Net.Analysis.Kuromoji/Dict/CharacterDefinition.cs b/src/Lucene.Net.Analysis.Kuromoji/Dict/CharacterDefinition.cs index 7e0938a5b2..2d6b285785 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Dict/CharacterDefinition.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Dict/CharacterDefinition.cs @@ -60,17 +60,15 @@ private enum CharacterClass : byte private CharacterDefinition() { - using (Stream @is = BinaryDictionary.GetTypeResource(GetType(), FILENAME_SUFFIX)) + using Stream @is = BinaryDictionary.GetTypeResource(GetType(), FILENAME_SUFFIX); + DataInput @in = new InputStreamDataInput(@is); + CodecUtil.CheckHeader(@in, HEADER, VERSION, VERSION); + @in.ReadBytes(characterCategoryMap, 0, characterCategoryMap.Length); + for (int i = 0; i < CLASS_COUNT; i++) { - DataInput @in = new InputStreamDataInput(@is); - CodecUtil.CheckHeader(@in, HEADER, VERSION, VERSION); - @in.ReadBytes(characterCategoryMap, 0, characterCategoryMap.Length); - for (int i = 0; i < CLASS_COUNT; i++) - { - byte b = @in.ReadByte(); - invokeMap[i] = (b & 0x01) != 0; - groupMap[i] = (b & 0x02) != 0; - } + byte b = @in.ReadByte(); + invokeMap[i] = (b & 0x01) != 0; + groupMap[i] = (b & 0x02) != 0; } } diff --git a/src/Lucene.Net.Analysis.Kuromoji/Dict/Dictionary.cs b/src/Lucene.Net.Analysis.Kuromoji/Dict/Dictionary.cs index 4c24a4eb69..1b8b808560 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Dict/Dictionary.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Dict/Dictionary.cs @@ -99,7 +99,7 @@ public interface IDictionary } // LUCENENT TODO: Make this whole thing into an abstact class?? - public class Dictionary + public static class Dictionary // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { public static readonly string INTERNAL_SEPARATOR = "\u0000"; } diff --git a/src/Lucene.Net.Analysis.Kuromoji/Dict/UserDictionary.cs b/src/Lucene.Net.Analysis.Kuromoji/Dict/UserDictionary.cs index d98f613cc0..e42ce66e1b 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Dict/UserDictionary.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Dict/UserDictionary.cs @@ -1,5 +1,6 @@ using J2N.Text; using Lucene.Net.Analysis.Ja.Util; +using Lucene.Net.Support; using Lucene.Net.Util; using Lucene.Net.Util.Fst; using System; @@ -43,13 +44,13 @@ public sealed class UserDictionary : IDictionary // holds readings and POS, indexed by wordid private readonly string[] data; - private static readonly int CUSTOM_DICTIONARY_WORD_ID_OFFSET = 100000000; + private const int CUSTOM_DICTIONARY_WORD_ID_OFFSET = 100000000; - public static readonly int WORD_COST = -100000; + public const int WORD_COST = -100000; - public static readonly int LEFT_ID = 5; + public const int LEFT_ID = 5; - public static readonly int RIGHT_ID = 5; + public const int RIGHT_ID = 5; private static readonly Regex specialChars = new Regex(@"#.*$", RegexOptions.Compiled); private static readonly Regex commentLine = new Regex(@" *", RegexOptions.Compiled); @@ -169,14 +170,14 @@ public int[][] Lookup(char[] chars, int off, int len) public TokenInfoFST FST => fst; - private static readonly int[][] EMPTY_RESULT = new int[0][]; + private static readonly int[][] EMPTY_RESULT = Arrays.Empty(); /// /// Convert Map of index and wordIdAndLength to array of {wordId, index, length} /// /// /// Array of {wordId, index, length}. - private int[][] ToIndexArray(IDictionary input) + private static int[][] ToIndexArray(IDictionary input) // LUCENENET: CA1822: Mark members as static { List result = new List(); foreach (int i in input.Keys) diff --git a/src/Lucene.Net.Analysis.Kuromoji/GraphvizFormatter.cs b/src/Lucene.Net.Analysis.Kuromoji/GraphvizFormatter.cs index 0570a17060..e2c922e36c 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/GraphvizFormatter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/GraphvizFormatter.cs @@ -31,11 +31,11 @@ namespace Lucene.Net.Analysis.Ja /// public class GraphvizFormatter { - private readonly static string BOS_LABEL = "BOS"; + private const string BOS_LABEL = "BOS"; - private readonly static string EOS_LABEL = "EOS"; + private const string EOS_LABEL = "EOS"; - private readonly static string FONT_NAME = "Helvetica"; + private const string FONT_NAME = "Helvetica"; private readonly ConnectionCosts costs; @@ -135,8 +135,7 @@ private string FormatNodes(JapaneseTokenizer tok, WrappedPositionArray positions sb.Append(toNodeID); string attrs; - string path; - bestPathMap.TryGetValue(fromNodeID, out path); + bestPathMap.TryGetValue(fromNodeID, out string path); if (toNodeID.Equals(path, StringComparison.Ordinal)) { // This arc is on best path @@ -193,7 +192,7 @@ private string FormatTrailer() return "}"; } - private string GetNodeID(int pos, int idx) + private static string GetNodeID(int pos, int idx) // LUCENENET: CA1822: Mark members as static { return pos + "." + idx; } diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseBaseFormFilterFactory.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseBaseFormFilterFactory.cs index 5524be7301..cec2cfd15a 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseBaseFormFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseBaseFormFilterFactory.cs @@ -40,7 +40,7 @@ public JapaneseBaseFormFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilter.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilter.cs index ac781db9db..58a60b8fee 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilter.cs @@ -60,10 +60,10 @@ public class JapaneseIterationMarkCharFilter : CharFilter private const char FULL_STOP_PUNCTUATION = '\u3002'; // 。 // Hiragana to dakuten map (lookup using code point - 0x30ab(か)*/ - private static char[] h2d = new char[50]; + private static readonly char[] h2d = new char[50]; // LUCENENET: marked readonly // Katakana to dakuten map (lookup using code point - 0x30ab(カ - private static char[] k2d = new char[50]; + private static readonly char[] k2d = new char[50]; // LUCENENET: marked readonly private readonly RollingCharBuffer buffer = new RollingCharBuffer(); @@ -73,9 +73,9 @@ public class JapaneseIterationMarkCharFilter : CharFilter private int iterationMarkSpanEndPosition = 0; - private bool normalizeKanji; + private readonly bool normalizeKanji; // LUCENENET: marked readonly - private bool normalizeKana; + private readonly bool normalizeKana; // LUCENENET: marked readonly static JapaneseIterationMarkCharFilter() { @@ -327,7 +327,7 @@ private char Normalize(char c, char m) /// Hiragana character. /// Repetition mark referring to . /// Normalized character - return on illegal iteration marks. - private char NormalizedHiragana(char c, char m) + private static char NormalizedHiragana(char c, char m) // LUCENENET: CA1822: Mark members as static { switch (m) { @@ -346,7 +346,7 @@ private char NormalizedHiragana(char c, char m) /// Katakana character. /// Repetition mark referring to . /// Normalized character - return on illegal iteration marks. - private char NormalizedKatakana(char c, char m) + private static char NormalizedKatakana(char c, char m) // LUCENENET: CA1822: Mark members as static { switch (m) { @@ -425,7 +425,7 @@ private bool IsKanjiIterationMark(char c) /// /// Character to look up. /// Hiragana dakuten variant of c or c itself if no dakuten variant exists. - private char LookupHiraganaDakuten(char c) + private static char LookupHiraganaDakuten(char c) // LUCENENET: CA1822: Mark members as static { return Lookup(c, h2d, '\u304b'); // Code point is for か } @@ -435,7 +435,7 @@ private char LookupHiraganaDakuten(char c) /// /// Character to look up. /// Katakana dakuten variant of or itself if no dakuten variant exists. - private char LookupKatakanaDakuten(char c) + private static char LookupKatakanaDakuten(char c) // LUCENENET: CA1822: Mark members as static { return Lookup(c, k2d, '\u30ab'); // Code point is for カ } @@ -445,7 +445,7 @@ private char LookupKatakanaDakuten(char c) /// /// Character to check. /// true if c is a hiragana dakuten and otherwise false. - private bool IsHiraganaDakuten(char c) + private static bool IsHiraganaDakuten(char c) // LUCENENET: CA1822: Mark members as static { return Inside(c, h2d, '\u304b') && c == LookupHiraganaDakuten(c); } @@ -455,7 +455,7 @@ private bool IsHiraganaDakuten(char c) /// /// Character to check. /// true if c is a hiragana dakuten and otherwise false. - private bool IsKatakanaDakuten(char c) + private static bool IsKatakanaDakuten(char c) // LUCENENET: CA1822: Mark members as static { return Inside(c, k2d, '\u30ab') && c == LookupKatakanaDakuten(c); } @@ -468,7 +468,7 @@ private bool IsKatakanaDakuten(char c) /// Dakuten map. /// Code point offset from . /// Mapped character or if no mapping exists. - private char Lookup(char c, char[] map, char offset) + private static char Lookup(char c, char[] map, char offset) // LUCENENET: CA1822: Mark members as static { if (!Inside(c, map, offset)) { @@ -487,7 +487,7 @@ private char Lookup(char c, char[] map, char offset) /// Dakuten map. /// Code point offset from . /// true if is mapped by map and otherwise false. - private bool Inside(char c, char[] map, char offset) + private static bool Inside(char c, char[] map, char offset) // LUCENENET: CA1822: Mark members as static { return c >= offset && c < offset + map.Length; } diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilterFactory.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilterFactory.cs index c9518c96b4..2550784782 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseIterationMarkCharFilterFactory.cs @@ -35,8 +35,8 @@ namespace Lucene.Net.Analysis.Ja /// public class JapaneseIterationMarkCharFilterFactory : CharFilterFactory, IMultiTermAwareComponent { - private static readonly string NORMALIZE_KANJI_PARAM = "normalizeKanji"; - private static readonly string NORMALIZE_KANA_PARAM = "normalizeKana"; + private const string NORMALIZE_KANJI_PARAM = "normalizeKanji"; + private const string NORMALIZE_KANA_PARAM = "normalizeKana"; private readonly bool normalizeKanji; private readonly bool normalizeKana; @@ -49,7 +49,7 @@ public JapaneseIterationMarkCharFilterFactory(IDictionary args) normalizeKana = GetBoolean(args, NORMALIZE_KANA_PARAM, JapaneseIterationMarkCharFilter.NORMALIZE_KANA_DEFAULT); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseKatakanaStemFilter.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseKatakanaStemFilter.cs index 857e5bf66b..b250fdbfef 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseKatakanaStemFilter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseKatakanaStemFilter.cs @@ -37,8 +37,8 @@ namespace Lucene.Net.Analysis.Ja /// public sealed class JapaneseKatakanaStemFilter : TokenFilter { - public readonly static int DEFAULT_MINIMUM_LENGTH = 4; - private readonly static char HIRAGANA_KATAKANA_PROLONGED_SOUND_MARK = '\u30fc'; + public const int DEFAULT_MINIMUM_LENGTH = 4; + private const char HIRAGANA_KATAKANA_PROLONGED_SOUND_MARK = '\u30fc'; private readonly ICharTermAttribute termAttr; private readonly IKeywordAttribute keywordAttr; @@ -95,7 +95,7 @@ private int Stem(char[] term, int length) return length; } - private bool IsKatakana(char[] term, int length) + private static bool IsKatakana(char[] term, int length) // LUCENENET: CA1822: Mark members as static { for (int i = 0; i < length; i++) { diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseKatakanaStemFilterFactory.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseKatakanaStemFilterFactory.cs index af2acb55fa..ec322ee398 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseKatakanaStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseKatakanaStemFilterFactory.cs @@ -35,7 +35,7 @@ namespace Lucene.Net.Analysis.Ja /// public class JapaneseKatakanaStemFilterFactory : TokenFilterFactory { - private static readonly string MINIMUM_LENGTH_PARAM = "minimumLength"; + private const string MINIMUM_LENGTH_PARAM = "minimumLength"; private readonly int minimumLength; /// Creates a new @@ -49,7 +49,7 @@ public JapaneseKatakanaStemFilterFactory(IDictionary args) } if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapanesePartOfSpeechStopFilterFactory.cs b/src/Lucene.Net.Analysis.Kuromoji/JapanesePartOfSpeechStopFilterFactory.cs index f52c5504b4..c66f7c37c4 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/JapanesePartOfSpeechStopFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/JapanesePartOfSpeechStopFilterFactory.cs @@ -49,7 +49,7 @@ public JapanesePartOfSpeechStopFilterFactory(IDictionary args) enablePositionIncrements = GetBoolean(args, "enablePositionIncrements", true); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseReadingFormFilter.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseReadingFormFilter.cs index b2e1542691..3cc563f3f0 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseReadingFormFilter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseReadingFormFilter.cs @@ -32,8 +32,8 @@ public sealed class JapaneseReadingFormFilter : TokenFilter private readonly ICharTermAttribute termAttr; private readonly IReadingAttribute readingAttr; - private StringBuilder buffer = new StringBuilder(); - private bool useRomaji; + private readonly StringBuilder buffer = new StringBuilder(); // LUCENENET: marked readonly + private readonly bool useRomaji; // LUCENENET: marked readonly public JapaneseReadingFormFilter(TokenStream input, bool useRomaji) : base(input) diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseReadingFormFilterFactory.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseReadingFormFilterFactory.cs index 9464c2efc9..705b656d94 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseReadingFormFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseReadingFormFilterFactory.cs @@ -35,7 +35,7 @@ namespace Lucene.Net.Analysis.Ja /// public class JapaneseReadingFormFilterFactory : TokenFilterFactory { - private static readonly string ROMAJI_PARAM = "useRomaji"; + private const string ROMAJI_PARAM = "useRomaji"; private readonly bool useRomaji; /// Creates a new . @@ -45,7 +45,7 @@ public JapaneseReadingFormFilterFactory(IDictionary args) useRomaji = GetBoolean(args, ROMAJI_PARAM, false); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs index 2c595715cc..a635e987ac 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizer.cs @@ -65,19 +65,21 @@ public sealed class JapaneseTokenizer : Tokenizer // LUCENENET specific: de-nested Type and renamed JapaneseTokenizerType - private static readonly bool VERBOSE = false; +#pragma warning disable CA1802 // Use literals where appropriate + private static readonly bool VERBOSE = false; // For debugging +#pragma warning restore CA1802 // Use literals where appropriate - private static readonly int SEARCH_MODE_KANJI_LENGTH = 2; + private const int SEARCH_MODE_KANJI_LENGTH = 2; - private static readonly int SEARCH_MODE_OTHER_LENGTH = 7; // Must be >= SEARCH_MODE_KANJI_LENGTH + private const int SEARCH_MODE_OTHER_LENGTH = 7; // Must be >= SEARCH_MODE_KANJI_LENGTH - private static readonly int SEARCH_MODE_KANJI_PENALTY = 3000; + private const int SEARCH_MODE_KANJI_PENALTY = 3000; - private static readonly int SEARCH_MODE_OTHER_PENALTY = 1700; + private const int SEARCH_MODE_OTHER_PENALTY = 1700; // For safety: - private static readonly int MAX_UNKNOWN_WORD_LENGTH = 1024; - private static readonly int MAX_BACKTRACE_GAP = 1024; + private const int MAX_UNKNOWN_WORD_LENGTH = 1024; + private const int MAX_BACKTRACE_GAP = 1024; private readonly IDictionary dictionaryMap = new Dictionary(); @@ -1177,8 +1179,7 @@ private void Backtrace(Position endPosData, int fromIDX) internal IDictionary GetDict(JapaneseTokenizerType type) { - IDictionary result; - dictionaryMap.TryGetValue(type, out result); + dictionaryMap.TryGetValue(type, out IDictionary result); return result; } diff --git a/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizerFactory.cs b/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizerFactory.cs index 738226a315..0ba2c8d97a 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/JapaneseTokenizerFactory.cs @@ -43,13 +43,13 @@ namespace Lucene.Net.Analysis.Ja /// public class JapaneseTokenizerFactory : TokenizerFactory, IResourceLoaderAware { - private static readonly string MODE = "mode"; + private const string MODE = "mode"; - private static readonly string USER_DICT_PATH = "userDictionary"; + private const string USER_DICT_PATH = "userDictionary"; - private static readonly string USER_DICT_ENCODING = "userDictionaryEncoding"; + private const string USER_DICT_ENCODING = "userDictionaryEncoding"; - private static readonly string DISCARD_PUNCTUATION = "discardPunctuation"; // Expert option + private const string DISCARD_PUNCTUATION = "discardPunctuation"; // Expert option private UserDictionary userDictionary; @@ -68,7 +68,7 @@ public JapaneseTokenizerFactory(IDictionary args) discardPunctuation = GetBoolean(args, DISCARD_PUNCTUATION, true); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/BaseFormAttributeImpl.cs b/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/BaseFormAttributeImpl.cs index e9c82c85e6..4df05e4509 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/BaseFormAttributeImpl.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/BaseFormAttributeImpl.cs @@ -31,7 +31,7 @@ public class BaseFormAttribute : Attribute, IBaseFormAttribute public virtual string GetBaseForm() { - return token == null ? null : token.GetBaseForm(); + return token?.GetBaseForm(); } public virtual void SetToken(Token token) diff --git a/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/InflectionAttributeImpl.cs b/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/InflectionAttributeImpl.cs index fe0dae81a5..9f0bf2cd00 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/InflectionAttributeImpl.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/InflectionAttributeImpl.cs @@ -32,12 +32,12 @@ public class InflectionAttribute : Attribute, IInflectionAttribute public virtual string GetInflectionType() { - return token == null ? null : token.GetInflectionType(); + return token?.GetInflectionType(); } public virtual string GetInflectionForm() { - return token == null ? null : token.GetInflectionForm(); + return token?.GetInflectionForm(); } public virtual void SetToken(Token token) diff --git a/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/PartOfSpeechAttributeImpl.cs b/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/PartOfSpeechAttributeImpl.cs index c043511e29..cf589c160f 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/PartOfSpeechAttributeImpl.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/PartOfSpeechAttributeImpl.cs @@ -32,7 +32,7 @@ public class PartOfSpeechAttribute : Attribute, IPartOfSpeechAttribute public virtual string GetPartOfSpeech() { - return token == null ? null : token.GetPartOfSpeech(); + return token?.GetPartOfSpeech(); } public virtual void SetToken(Token token) diff --git a/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/ReadingAttributeImpl.cs b/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/ReadingAttributeImpl.cs index 6697598342..6cb656f9bd 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/ReadingAttributeImpl.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/TokenAttributes/ReadingAttributeImpl.cs @@ -32,12 +32,12 @@ public class ReadingAttribute : Attribute, IReadingAttribute public virtual string GetReading() { - return token == null ? null : token.GetReading(); + return token?.GetReading(); } public virtual string GetPronunciation() { - return token == null ? null : token.GetPronunciation(); + return token?.GetPronunciation(); } public virtual void SetToken(Token token) diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs index 14bfb117a1..9843dc7f04 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/BinaryDictionaryWriter.cs @@ -38,7 +38,7 @@ public abstract class BinaryDictionaryWriter private int[] targetMapOffsets = new int[8192]; private readonly List posDict = new List(); - public BinaryDictionaryWriter(Type implClazz, int size) + protected BinaryDictionaryWriter(Type implClazz, int size) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.m_implClazz = implClazz; m_buffer = ByteBuffer.Allocate(size); @@ -296,59 +296,55 @@ protected virtual void WriteTargetMap(string filename) { //new File(filename).getParentFile().mkdirs(); System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename)); - using (Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write)) + using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write); + DataOutput @out = new OutputStreamDataOutput(os); + CodecUtil.WriteHeader(@out, BinaryDictionary.TARGETMAP_HEADER, BinaryDictionary.VERSION); + + int numSourceIds = lastSourceId + 1; + @out.WriteVInt32(targetMapEndOffset); // <-- size of main array + @out.WriteVInt32(numSourceIds + 1); // <-- size of offset array (+ 1 more entry) + int prev = 0, sourceId = 0; + for (int ofs = 0; ofs < targetMapEndOffset; ofs++) { - DataOutput @out = new OutputStreamDataOutput(os); - CodecUtil.WriteHeader(@out, BinaryDictionary.TARGETMAP_HEADER, BinaryDictionary.VERSION); - - int numSourceIds = lastSourceId + 1; - @out.WriteVInt32(targetMapEndOffset); // <-- size of main array - @out.WriteVInt32(numSourceIds + 1); // <-- size of offset array (+ 1 more entry) - int prev = 0, sourceId = 0; - for (int ofs = 0; ofs < targetMapEndOffset; ofs++) + int val = targetMap[ofs], delta = val - prev; + if (Debugging.AssertsEnabled) Debugging.Assert(delta >= 0); + if (ofs == targetMapOffsets[sourceId]) { - int val = targetMap[ofs], delta = val - prev; - if (Debugging.AssertsEnabled) Debugging.Assert(delta >= 0); - if (ofs == targetMapOffsets[sourceId]) - { - @out.WriteVInt32((delta << 1) | 0x01); - sourceId++; - } - else - { - @out.WriteVInt32((delta << 1)); - } - prev += delta; + @out.WriteVInt32((delta << 1) | 0x01); + sourceId++; + } + else + { + @out.WriteVInt32((delta << 1)); } - if (Debugging.AssertsEnabled) Debugging.Assert(sourceId == numSourceIds,"sourceId:{0} != numSourceIds:{1}", sourceId, numSourceIds); + prev += delta; } + if (Debugging.AssertsEnabled) Debugging.Assert(sourceId == numSourceIds, "sourceId:{0} != numSourceIds:{1}", sourceId, numSourceIds); } protected virtual void WritePosDict(string filename) { //new File(filename).getParentFile().mkdirs(); System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename)); - using (Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write)) + using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write); + DataOutput @out = new OutputStreamDataOutput(os); + CodecUtil.WriteHeader(@out, BinaryDictionary.POSDICT_HEADER, BinaryDictionary.VERSION); + @out.WriteVInt32(posDict.Count); + foreach (string s in posDict) { - DataOutput @out = new OutputStreamDataOutput(os); - CodecUtil.WriteHeader(@out, BinaryDictionary.POSDICT_HEADER, BinaryDictionary.VERSION); - @out.WriteVInt32(posDict.Count); - foreach (string s in posDict) + if (s == null) { - if (s == null) - { - @out.WriteByte((byte)0); - @out.WriteByte((byte)0); - @out.WriteByte((byte)0); - } - else - { - string[] data = CSVUtil.Parse(s); - if (Debugging.AssertsEnabled) Debugging.Assert(data.Length == 3,"malformed pos/inflection: {0}", s); - @out.WriteString(data[0]); - @out.WriteString(data[1]); - @out.WriteString(data[2]); - } + @out.WriteByte((byte)0); + @out.WriteByte((byte)0); + @out.WriteByte((byte)0); + } + else + { + string[] data = CSVUtil.Parse(s); + if (Debugging.AssertsEnabled) Debugging.Assert(data.Length == 3, "malformed pos/inflection: {0}", s); + @out.WriteString(data[0]); + @out.WriteString(data[1]); + @out.WriteString(data[2]); } } } @@ -357,24 +353,22 @@ protected virtual void WriteDictionary(string filename) { //new File(filename).getParentFile().mkdirs(); System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename)); - using (Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write)) - { - DataOutput @out = new OutputStreamDataOutput(os); - CodecUtil.WriteHeader(@out, BinaryDictionary.DICT_HEADER, BinaryDictionary.VERSION); - @out.WriteVInt32(m_buffer.Position); + using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write); + DataOutput @out = new OutputStreamDataOutput(os); + CodecUtil.WriteHeader(@out, BinaryDictionary.DICT_HEADER, BinaryDictionary.VERSION); + @out.WriteVInt32(m_buffer.Position); - //WritableByteChannel channel = Channels.newChannel(os); - // Write Buffer - m_buffer.Flip(); // set position to 0, set limit to current position - //channel.write(buffer); + //WritableByteChannel channel = Channels.newChannel(os); + // Write Buffer + m_buffer.Flip(); // set position to 0, set limit to current position + //channel.write(buffer); - while (m_buffer.HasRemaining) - { - @out.WriteByte(m_buffer.Get()); - } - - if (Debugging.AssertsEnabled) Debugging.Assert(m_buffer.Remaining == 0L); + while (m_buffer.HasRemaining) + { + @out.WriteByte(m_buffer.Get()); } + + if (Debugging.AssertsEnabled) Debugging.Assert(m_buffer.Remaining == 0L); } } } diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/CharacterDefinitionWriter.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/CharacterDefinitionWriter.cs index 9f019fb437..d963d1252e 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/CharacterDefinitionWriter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/CharacterDefinitionWriter.cs @@ -58,7 +58,9 @@ public void PutCharacterCategory(int codePoint, string characterClassName) characterCategoryMap[codePoint] = CharacterDefinition.LookupCharacterClass(characterClassName); } +#pragma warning disable IDE0060 // Remove unused parameter public void PutInvokeDefinition(string characterClassName, int invoke, int group, int length) +#pragma warning restore IDE0060 // Remove unused parameter { byte characterClass = CharacterDefinition.LookupCharacterClass(characterClassName); invokeMap[characterClass] = invoke == 1; @@ -77,19 +79,17 @@ public void Write(string baseDir) string filename = System.IO.Path.Combine(baseDir, typeof(CharacterDefinition).Name + CharacterDefinition.FILENAME_SUFFIX); //new File(filename).getParentFile().mkdirs(); System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(baseDir)); - using (Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write)) + using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write); + DataOutput @out = new OutputStreamDataOutput(os); + CodecUtil.WriteHeader(@out, CharacterDefinition.HEADER, CharacterDefinition.VERSION); + @out.WriteBytes(characterCategoryMap, 0, characterCategoryMap.Length); + for (int i = 0; i < CharacterDefinition.CLASS_COUNT; i++) { - DataOutput @out = new OutputStreamDataOutput(os); - CodecUtil.WriteHeader(@out, CharacterDefinition.HEADER, CharacterDefinition.VERSION); - @out.WriteBytes(characterCategoryMap, 0, characterCategoryMap.Length); - for (int i = 0; i < CharacterDefinition.CLASS_COUNT; i++) - { - byte b = (byte)( - (invokeMap[i] ? 0x01 : 0x00) | - (groupMap[i] ? 0x02 : 0x00) - ); - @out.WriteByte(b); - } + byte b = (byte)( + (invokeMap[i] ? 0x01 : 0x00) | + (groupMap[i] ? 0x02 : 0x00) + ); + @out.WriteByte(b); } } } diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs index 1d7d8e37f3..d15ddb9aef 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsBuilder.cs @@ -24,46 +24,40 @@ namespace Lucene.Net.Analysis.Ja.Util * limitations under the License. */ - public class ConnectionCostsBuilder + public static class ConnectionCostsBuilder // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { private static readonly Regex whiteSpaceRegex = new Regex("\\s+", RegexOptions.Compiled); - private ConnectionCostsBuilder() - { - } - public static ConnectionCostsWriter Build(string filename) { - using (Stream inputStream = new FileStream(filename, FileMode.Open, FileAccess.Read)) - { - StreamReader streamReader = new StreamReader(inputStream, Encoding.ASCII); + using Stream inputStream = new FileStream(filename, FileMode.Open, FileAccess.Read); + StreamReader streamReader = new StreamReader(inputStream, Encoding.ASCII); - string line = streamReader.ReadLine(); - string[] dimensions = whiteSpaceRegex.Split(line).TrimEnd(); + string line = streamReader.ReadLine(); + string[] dimensions = whiteSpaceRegex.Split(line).TrimEnd(); - if (Debugging.AssertsEnabled) Debugging.Assert(dimensions.Length == 2); + if (Debugging.AssertsEnabled) Debugging.Assert(dimensions.Length == 2); - int forwardSize = int.Parse(dimensions[0], CultureInfo.InvariantCulture); - int backwardSize = int.Parse(dimensions[1], CultureInfo.InvariantCulture); + int forwardSize = int.Parse(dimensions[0], CultureInfo.InvariantCulture); + int backwardSize = int.Parse(dimensions[1], CultureInfo.InvariantCulture); - if (Debugging.AssertsEnabled) Debugging.Assert(forwardSize > 0 && backwardSize > 0); + if (Debugging.AssertsEnabled) Debugging.Assert(forwardSize > 0 && backwardSize > 0); - ConnectionCostsWriter costs = new ConnectionCostsWriter(forwardSize, backwardSize); + ConnectionCostsWriter costs = new ConnectionCostsWriter(forwardSize, backwardSize); - while ((line = streamReader.ReadLine()) != null) - { - string[] fields = whiteSpaceRegex.Split(line).TrimEnd(); + while ((line = streamReader.ReadLine()) != null) + { + string[] fields = whiteSpaceRegex.Split(line).TrimEnd(); - if (Debugging.AssertsEnabled) Debugging.Assert(fields.Length == 3); + if (Debugging.AssertsEnabled) Debugging.Assert(fields.Length == 3); - int forwardId = int.Parse(fields[0], CultureInfo.InvariantCulture); - int backwardId = int.Parse(fields[1], CultureInfo.InvariantCulture); - int cost = int.Parse(fields[2], CultureInfo.InvariantCulture); + int forwardId = int.Parse(fields[0], CultureInfo.InvariantCulture); + int backwardId = int.Parse(fields[1], CultureInfo.InvariantCulture); + int cost = int.Parse(fields[2], CultureInfo.InvariantCulture); - costs.Add(forwardId, backwardId, cost); - } - return costs; + costs.Add(forwardId, backwardId, cost); } + return costs; } } } diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs index 4b5fdb4943..936999db26 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/ConnectionCostsWriter.cs @@ -56,23 +56,21 @@ public void Write(string baseDir) string filename = System.IO.Path.Combine(baseDir, typeof(ConnectionCosts).Name + CharacterDefinition.FILENAME_SUFFIX); //new File(filename).getParentFile().mkdirs(); System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(filename)); - using (Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write)) + using Stream os = new FileStream(filename, FileMode.Create, FileAccess.Write); + DataOutput @out = new OutputStreamDataOutput(os); + CodecUtil.WriteHeader(@out, ConnectionCosts.HEADER, ConnectionCosts.VERSION); + @out.WriteVInt32(forwardSize); + @out.WriteVInt32(backwardSize); + int last = 0; + if (Debugging.AssertsEnabled) Debugging.Assert(costs.Length == backwardSize); + foreach (short[] a in costs) { - DataOutput @out = new OutputStreamDataOutput(os); - CodecUtil.WriteHeader(@out, ConnectionCosts.HEADER, ConnectionCosts.VERSION); - @out.WriteVInt32(forwardSize); - @out.WriteVInt32(backwardSize); - int last = 0; - if (Debugging.AssertsEnabled) Debugging.Assert(costs.Length == backwardSize); - foreach (short[] a in costs) + if (Debugging.AssertsEnabled) Debugging.Assert(a.Length == forwardSize); + for (int i = 0; i < a.Length; i++) { - if (Debugging.AssertsEnabled) Debugging.Assert(a.Length == forwardSize); - for (int i = 0; i < a.Length; i++) - { - int delta = (int)a[i] - last; - @out.WriteVInt32((delta >> 31) ^ (delta << 1)); - last = a[i]; - } + int delta = (int)a[i] - last; + @out.WriteVInt32((delta >> 31) ^ (delta << 1)); + last = a[i]; } } } diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/DictionaryBuilder.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/DictionaryBuilder.cs index d6d6b43773..83fd993eec 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/DictionaryBuilder.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/DictionaryBuilder.cs @@ -20,14 +20,10 @@ namespace Lucene.Net.Analysis.Ja.Util * limitations under the License. */ - public class DictionaryBuilder + public static class DictionaryBuilder // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { public enum DictionaryFormat { IPADIC, UNIDIC }; - private DictionaryBuilder() - { - } - static DictionaryBuilder() { #if FEATURE_ENCODINGPROVIDERS @@ -47,16 +43,16 @@ public static void Build(DictionaryFormat format, TokenInfoDictionaryBuilder tokenInfoBuilder = new TokenInfoDictionaryBuilder(format, encoding, normalizeEntry); TokenInfoDictionaryWriter tokenInfoDictionary = tokenInfoBuilder.Build(inputDirname); tokenInfoDictionary.Write(outputDirname); - tokenInfoDictionary = null; - tokenInfoBuilder = null; + //tokenInfoDictionary = null; // LUCENENET: IDE0059: Remove unnecessary value assignment + //tokenInfoBuilder = null; // LUCENENET: IDE0059: Remove unnecessary value assignment Console.WriteLine("done"); Console.WriteLine("building unknown word dict..."); UnknownDictionaryBuilder unkBuilder = new UnknownDictionaryBuilder(encoding); UnknownDictionaryWriter unkDictionary = unkBuilder.Build(inputDirname); unkDictionary.Write(outputDirname); - unkDictionary = null; - unkBuilder = null; + //unkDictionary = null; // LUCENENET: IDE0059: Remove unnecessary value assignment + //unkBuilder = null; // LUCENENET: IDE0059: Remove unnecessary value assignment Console.WriteLine("done"); Console.WriteLine("building connection costs..."); diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs index 31ccf90d32..8562f01221 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/TokenInfoDictionaryBuilder.cs @@ -33,12 +33,12 @@ public class TokenInfoDictionaryBuilder /// Internal word id - incrementally assigned as entries are read and added. This will be byte offset of dictionary file private int offset = 0; - private string encoding = "euc-jp"; + private readonly string encoding = "euc-jp"; // LUCENENET: marked readonly - private bool normalizeEntries = false; + private readonly bool normalizeEntries = false; // LUCENENET: marked readonly //private Normalizer2 normalizer; - private DictionaryBuilder.DictionaryFormat format = DictionaryBuilder.DictionaryFormat.IPADIC; + private readonly DictionaryBuilder.DictionaryFormat format = DictionaryBuilder.DictionaryFormat.IPADIC; // LUCENENET: marked readonly public TokenInfoDictionaryBuilder(DictionaryBuilder.DictionaryFormat format, string encoding, bool normalizeEntries) { @@ -68,43 +68,41 @@ public virtual TokenInfoDictionaryWriter BuildDictionary(IList csvFiles) List lines = new List(400000); foreach (string file in csvFiles) { - using (Stream inputStream = new FileStream(file, FileMode.Open, FileAccess.Read)) + using Stream inputStream = new FileStream(file, FileMode.Open, FileAccess.Read); + Encoding decoder = Encoding.GetEncoding(encoding); + TextReader reader = new StreamReader(inputStream, decoder); + + string line = null; + while ((line = reader.ReadLine()) != null) { - Encoding decoder = Encoding.GetEncoding(encoding); - TextReader reader = new StreamReader(inputStream, decoder); + string[] entry = CSVUtil.Parse(line); - string line = null; - while ((line = reader.ReadLine()) != null) + if (entry.Length < 13) { - string[] entry = CSVUtil.Parse(line); + Console.WriteLine("Entry in CSV is not valid: " + line); + continue; + } + + string[] formatted = FormatEntry(entry); + lines.Add(formatted); - if (entry.Length < 13) + // NFKC normalize dictionary entry + if (normalizeEntries) + { + //if (normalizer.isNormalized(entry[0])){ + if (entry[0].IsNormalized(NormalizationForm.FormKC)) { - Console.WriteLine("Entry in CSV is not valid: " + line); continue; } - - string[] formatted = FormatEntry(entry); - lines.Add(formatted); - - // NFKC normalize dictionary entry - if (normalizeEntries) + string[] normalizedEntry = new string[entry.Length]; + for (int i = 0; i < entry.Length; i++) { - //if (normalizer.isNormalized(entry[0])){ - if (entry[0].IsNormalized(NormalizationForm.FormKC)) - { - continue; - } - string[] normalizedEntry = new string[entry.Length]; - for (int i = 0; i < entry.Length; i++) - { - //normalizedEntry[i] = normalizer.normalize(entry[i]); - normalizedEntry[i] = entry[i].Normalize(NormalizationForm.FormKC); - } - - formatted = FormatEntry(normalizedEntry); - lines.Add(formatted); + //normalizedEntry[i] = normalizer.normalize(entry[i]); + normalizedEntry[i] = entry[i].Normalize(NormalizationForm.FormKC); } + + formatted = FormatEntry(normalizedEntry); + lines.Add(formatted); } } } diff --git a/src/Lucene.Net.Analysis.Kuromoji/Tools/UnknownDictionaryBuilder.cs b/src/Lucene.Net.Analysis.Kuromoji/Tools/UnknownDictionaryBuilder.cs index 61e30f2187..060ce8ad62 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Tools/UnknownDictionaryBuilder.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Tools/UnknownDictionaryBuilder.cs @@ -89,53 +89,51 @@ public virtual UnknownDictionaryWriter ReadDictionaryFile(string filename, strin public virtual void ReadCharacterDefinition(string filename, UnknownDictionaryWriter dictionary) { - using (Stream inputStream = new FileStream(filename, FileMode.Open, FileAccess.Read)) - using (TextReader reader = new StreamReader(inputStream, Encoding.GetEncoding(encoding))) + using Stream inputStream = new FileStream(filename, FileMode.Open, FileAccess.Read); + using TextReader reader = new StreamReader(inputStream, Encoding.GetEncoding(encoding)); + string line = null; + + while ((line = reader.ReadLine()) != null) { - string line = null; + line = Regex.Replace(line, "^\\s", ""); + line = Regex.Replace(line, "\\s*#.*", ""); + line = Regex.Replace(line, "\\s+", " "); - while ((line = reader.ReadLine()) != null) + // Skip empty line or comment line + if (line.Length == 0) { - line = Regex.Replace(line, "^\\s", ""); - line = Regex.Replace(line, "\\s*#.*", ""); - line = Regex.Replace(line, "\\s+", " "); + continue; + } + + if (line.StartsWith("0x", StringComparison.Ordinal)) + { // Category mapping + string[] values = new Regex(" ").Split(line, 2); // Split only first space - // Skip empty line or comment line - if (line.Length == 0) + if (!values[0].Contains("..")) { - continue; + int cp = Convert.ToInt32(values[0], 16); + dictionary.PutCharacterCategory(cp, values[1]); } + else + { + string[] codePoints = Regex.Split(values[0], "\\.\\.").TrimEnd(); + int cpFrom = Convert.ToInt32(codePoints[0], 16); + int cpTo = Convert.ToInt32(codePoints[1], 16); - if (line.StartsWith("0x", StringComparison.Ordinal)) - { // Category mapping - string[] values = new Regex(" ").Split(line, 2); // Split only first space - - if (!values[0].Contains("..")) - { - int cp = Convert.ToInt32(values[0], 16); - dictionary.PutCharacterCategory(cp, values[1]); - } - else + for (int i = cpFrom; i <= cpTo; i++) { - string[] codePoints = Regex.Split(values[0], "\\.\\.").TrimEnd(); - int cpFrom = Convert.ToInt32(codePoints[0], 16); - int cpTo = Convert.ToInt32(codePoints[1], 16); - - for (int i = cpFrom; i <= cpTo; i++) - { - dictionary.PutCharacterCategory(i, values[1]); - } + dictionary.PutCharacterCategory(i, values[1]); } } - else - { // Invoke definition - string[] values = line.Split(' ').TrimEnd(); // Consecutive space is merged above - string characterClassName = values[0]; - int invoke = int.Parse(values[1], CultureInfo.InvariantCulture); - int group = int.Parse(values[2], CultureInfo.InvariantCulture); - int length = int.Parse(values[3], CultureInfo.InvariantCulture); - dictionary.PutInvokeDefinition(characterClassName, invoke, group, length); - } + } + else + { // Invoke definition + string[] values = line.Split(' ').TrimEnd(); // Consecutive space is merged above + string characterClassName = values[0]; + int invoke = int.Parse(values[1], CultureInfo.InvariantCulture); + int group = int.Parse(values[2], CultureInfo.InvariantCulture); + int length = int.Parse(values[3], CultureInfo.InvariantCulture); + dictionary.PutInvokeDefinition(characterClassName, invoke, group, length); } } } diff --git a/src/Lucene.Net.Analysis.Kuromoji/Util/ToStringUtil.cs b/src/Lucene.Net.Analysis.Kuromoji/Util/ToStringUtil.cs index 95e27030f3..f88421c5bf 100644 --- a/src/Lucene.Net.Analysis.Kuromoji/Util/ToStringUtil.cs +++ b/src/Lucene.Net.Analysis.Kuromoji/Util/ToStringUtil.cs @@ -128,8 +128,7 @@ public static class ToStringUtil /// public static string GetPOSTranslation(string s) { - string result; - posTranslations.TryGetValue(s, out result); + posTranslations.TryGetValue(s, out string result); return result; } @@ -202,8 +201,7 @@ public static string GetPOSTranslation(string s) /// public static string GetInflectionTypeTranslation(string s) { - string result; - inflTypeTranslations.TryGetValue(s, out result); + inflTypeTranslations.TryGetValue(s, out string result); return result; } @@ -246,8 +244,7 @@ public static string GetInflectionTypeTranslation(string s) /// public static string GetInflectedFormTranslation(string s) { - string result; - inflFormTranslations.TryGetValue(s, out result); + inflFormTranslations.TryGetValue(s, out string result); return result; } diff --git a/src/Lucene.Net.Analysis.Morfologik/Morfologik/MorfologikFilterFactory.cs b/src/Lucene.Net.Analysis.Morfologik/Morfologik/MorfologikFilterFactory.cs index 5feef4a2ea..5f1b573c6c 100644 --- a/src/Lucene.Net.Analysis.Morfologik/Morfologik/MorfologikFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Morfologik/Morfologik/MorfologikFilterFactory.cs @@ -71,9 +71,9 @@ public MorfologikFilterFactory(IDictionary args) resourceName = Get(args, DICTIONARY_ATTRIBUTE); - if (args.Count != 0) + if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } @@ -86,11 +86,9 @@ public virtual void Inform(IResourceLoader loader) } else { - using (Stream dict = loader.OpenResource(resourceName)) - using (Stream meta = loader.OpenResource(DictionaryMetadata.GetExpectedMetadataFileName(resourceName))) - { - this.dictionary = Dictionary.Read(dict, meta); - } + using Stream dict = loader.OpenResource(resourceName); + using Stream meta = loader.OpenResource(DictionaryMetadata.GetExpectedMetadataFileName(resourceName)); + this.dictionary = Dictionary.Read(dict, meta); } } diff --git a/src/Lucene.Net.Analysis.Morfologik/Morfologik/TokenAttributes/MorphosyntacticTagsAttribute.cs b/src/Lucene.Net.Analysis.Morfologik/Morfologik/TokenAttributes/MorphosyntacticTagsAttribute.cs index 2e15a3dc34..056a790124 100644 --- a/src/Lucene.Net.Analysis.Morfologik/Morfologik/TokenAttributes/MorphosyntacticTagsAttribute.cs +++ b/src/Lucene.Net.Analysis.Morfologik/Morfologik/TokenAttributes/MorphosyntacticTagsAttribute.cs @@ -59,9 +59,10 @@ public override void Clear() public override bool Equals(object other) { - if (other is IMorphosyntacticTagsAttribute) + if (other is null) return false; + if (other is IMorphosyntacticTagsAttribute morphosyntacticTagsAttribute) { - return Equal(this.Tags, ((IMorphosyntacticTagsAttribute)other).Tags); + return Equal(this.Tags, morphosyntacticTagsAttribute.Tags); } return false; } diff --git a/src/Lucene.Net.Analysis.Morfologik/Uk/UkrainianMorfologikAnalyzer.cs b/src/Lucene.Net.Analysis.Morfologik/Uk/UkrainianMorfologikAnalyzer.cs index 6e3ecc8219..1e97edb02c 100644 --- a/src/Lucene.Net.Analysis.Morfologik/Uk/UkrainianMorfologikAnalyzer.cs +++ b/src/Lucene.Net.Analysis.Morfologik/Uk/UkrainianMorfologikAnalyzer.cs @@ -66,13 +66,11 @@ private static CharArraySet LoadDefaultSet() LuceneVersion.LUCENE_CURRENT); #pragma warning restore 612, 618 } -#pragma warning disable 168 catch (IOException ex) -#pragma warning restore 168 { // default set should always be present as it is part of the // distribution (JAR) - throw new Exception("Unable to load default stopword set"); + throw new Exception("Unable to load default stopword set", ex); } } } @@ -164,9 +162,9 @@ private static Dictionary GetDictionary() // (see https://search.maven.org/search?q=a:morfologik-ukrainian-search). However, we are embedding the file in .NET. // Since it doesn't appear to be updated frequently, this should be okay. string dictFile = "ukrainian.dict"; - using (var dictStream = type.FindAndGetManifestResourceStream(dictFile)) - using (var metadataStream = type.FindAndGetManifestResourceStream(DictionaryMetadata.GetExpectedMetadataFileName(dictFile))) - return Dictionary.Read(dictStream, metadataStream); + using var dictStream = type.FindAndGetManifestResourceStream(dictFile); + using var metadataStream = type.FindAndGetManifestResourceStream(DictionaryMetadata.GetExpectedMetadataFileName(dictFile)); + return Dictionary.Read(dictStream, metadataStream); } catch (IOException e) { diff --git a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPChunkerFilterFactory.cs b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPChunkerFilterFactory.cs index 09c1b143fd..53d37503cd 100644 --- a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPChunkerFilterFactory.cs +++ b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPChunkerFilterFactory.cs @@ -49,7 +49,7 @@ public OpenNLPChunkerFilterFactory(IDictionary args) chunkerModelFile = Get(args, CHUNKER_MODEL); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPLemmatizerFilter.cs b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPLemmatizerFilter.cs index e981e25db8..a15b6f839e 100644 --- a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPLemmatizerFilter.cs +++ b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPLemmatizerFilter.cs @@ -41,7 +41,7 @@ public class OpenNLPLemmatizerFilter : TokenFilter private readonly ITypeAttribute typeAtt; private readonly IKeywordAttribute keywordAtt; private readonly IFlagsAttribute flagsAtt; - private IList sentenceTokenAttrs = new List(); + private readonly IList sentenceTokenAttrs = new List(); // LUCENENET: marked readonly private IEnumerator sentenceTokenAttrsIter = null; private bool moreTokensAvailable = true; private string[] sentenceTokens = null; // non-keyword tokens @@ -126,5 +126,29 @@ private void Clear() lemmas = null; lemmaNum = 0; } + + /// + /// Releases resources used by the and + /// if overridden in a derived class, optionally releases unmanaged resources. + /// + /// true to release both managed and unmanaged resources; + /// false to release only unmanaged resources. + + // LUCENENET specific + protected override void Dispose(bool disposing) + { + try + { + if (disposing) + { + sentenceTokenAttrsIter?.Dispose(); + sentenceTokenAttrsIter = null; + } + } + finally + { + base.Dispose(disposing); + } + } } } diff --git a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPLemmatizerFilterFactory.cs b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPLemmatizerFilterFactory.cs index 99415615d3..5d4b6970b2 100644 --- a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPLemmatizerFilterFactory.cs +++ b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPLemmatizerFilterFactory.cs @@ -62,7 +62,7 @@ public OpenNLPLemmatizerFilterFactory(IDictionary args) if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPPOSFilter.cs b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPPOSFilter.cs index 42f823054c..3e7e20063a 100644 --- a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPPOSFilter.cs +++ b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPPOSFilter.cs @@ -29,7 +29,7 @@ namespace Lucene.Net.Analysis.OpenNlp public sealed class OpenNLPPOSFilter : TokenFilter { private readonly IList sentenceTokenAttrs = new List(); - string[] tags = null; + private string[] tags = null; private int tokenNum = 0; private bool moreTokensAvailable = true; diff --git a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPPOSFilterFactory.cs b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPPOSFilterFactory.cs index 5295668494..5f112c2d33 100644 --- a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPPOSFilterFactory.cs +++ b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPPOSFilterFactory.cs @@ -48,7 +48,7 @@ public OpenNLPPOSFilterFactory(IDictionary args) posTaggerModelFile = Require(args, POS_TAGGER_MODEL); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPSentenceBreakIterator.cs b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPSentenceBreakIterator.cs index 99c9b07e47..6e12869a25 100644 --- a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPSentenceBreakIterator.cs +++ b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPSentenceBreakIterator.cs @@ -35,7 +35,7 @@ public sealed class OpenNLPSentenceBreakIterator : BreakIterator private CharacterIterator text; private int currentSentence; private int[] sentenceStarts; - private NLPSentenceDetectorOp sentenceOp; + private readonly NLPSentenceDetectorOp sentenceOp; // LUCENENET: marked readonly public OpenNLPSentenceBreakIterator(NLPSentenceDetectorOp sentenceOp) { @@ -256,9 +256,8 @@ public override void SetText(CharacterIterator newText) private string CharacterIteratorToString() { string fullText; - if (text is CharArrayIterator) + if (text is CharArrayIterator charArrayIterator) { - CharArrayIterator charArrayIterator = (CharArrayIterator)text; fullText = new string(charArrayIterator.Text, charArrayIterator.Start, charArrayIterator.Length); } else diff --git a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPTokenizer.cs b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPTokenizer.cs index 68c1b84bf5..1eed0803f5 100644 --- a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPTokenizer.cs +++ b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPTokenizer.cs @@ -42,7 +42,7 @@ public sealed class OpenNLPTokenizer : SegmentingTokenizerBase private int termNum = 0; private int sentenceStart = 0; - private readonly NLPSentenceDetectorOp sentenceOp = null; + //private readonly NLPSentenceDetectorOp sentenceOp = null; // LUCENENET: Never read private readonly NLPTokenizerOp tokenizerOp = null; /// @@ -59,7 +59,7 @@ public sealed class OpenNLPTokenizer : SegmentingTokenizerBase { throw new ArgumentException("OpenNLPTokenizer: both a Sentence Detector and a Tokenizer are required"); } - this.sentenceOp = sentenceOp; + //this.sentenceOp = sentenceOp; // LUCENENET: Never read this.tokenizerOp = tokenizerOp; this.termAtt = AddAttribute(); this.flagsAtt = AddAttribute(); diff --git a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPTokenizerFactory.cs b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPTokenizerFactory.cs index 20afc8b7ee..22ddd9b06d 100644 --- a/src/Lucene.Net.Analysis.OpenNLP/OpenNLPTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.OpenNLP/OpenNLPTokenizerFactory.cs @@ -51,7 +51,7 @@ public OpenNLPTokenizerFactory(IDictionary args) tokenizerModelFile = Require(args, TOKENIZER_MODEL); if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.OpenNLP/Tools/NLPLemmatizerOp.cs b/src/Lucene.Net.Analysis.OpenNLP/Tools/NLPLemmatizerOp.cs index 65a5509ce6..f5112964c2 100644 --- a/src/Lucene.Net.Analysis.OpenNLP/Tools/NLPLemmatizerOp.cs +++ b/src/Lucene.Net.Analysis.OpenNLP/Tools/NLPLemmatizerOp.cs @@ -45,7 +45,7 @@ public NLPLemmatizerOp(Stream dictionary, LemmatizerModel lemmatizerModel) public virtual string[] Lemmatize(string[] words, string[] postags) { - string[] lemmas = null; + string[] lemmas; // LUCENENET: IDE0059: Remove unnecessary value assignment string[] maxEntLemmas = null; if (dictionaryLemmatizer != null) { diff --git a/src/Lucene.Net.Analysis.OpenNLP/Tools/OpenNLPOpsFactory.cs b/src/Lucene.Net.Analysis.OpenNLP/Tools/OpenNLPOpsFactory.cs index e4b3dc570e..7a4fda10f7 100644 --- a/src/Lucene.Net.Analysis.OpenNLP/Tools/OpenNLPOpsFactory.cs +++ b/src/Lucene.Net.Analysis.OpenNLP/Tools/OpenNLPOpsFactory.cs @@ -171,22 +171,20 @@ public static string GetLemmatizerDictionary(string dictionaryFile, IResourceLoa { if (!lemmaDictionaries.TryGetValue(dictionaryFile, out string dictionary) || dictionary == null) { - using (TextReader reader = new StreamReader(loader.OpenResource(dictionaryFile), Encoding.UTF8)) + using TextReader reader = new StreamReader(loader.OpenResource(dictionaryFile), Encoding.UTF8); + StringBuilder builder = new StringBuilder(); + char[] chars = new char[8092]; + int numRead = 0; + do { - StringBuilder builder = new StringBuilder(); - char[] chars = new char[8092]; - int numRead = 0; - do + numRead = reader.Read(chars, 0, chars.Length); + if (numRead > 0) { - numRead = reader.Read(chars, 0, chars.Length); - if (numRead > 0) - { - builder.Append(chars, 0, numRead); - } - } while (numRead > 0); - dictionary = builder.ToString(); - lemmaDictionaries[dictionaryFile] = dictionary; - } + builder.Append(chars, 0, numRead); + } + } while (numRead > 0); + dictionary = builder.ToString(); + lemmaDictionaries[dictionaryFile] = dictionary; } return dictionary; } diff --git a/src/Lucene.Net.Analysis.Phonetic/BeiderMorseFilterFactory.cs b/src/Lucene.Net.Analysis.Phonetic/BeiderMorseFilterFactory.cs index d4331bb7ea..e669c15660 100644 --- a/src/Lucene.Net.Analysis.Phonetic/BeiderMorseFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Phonetic/BeiderMorseFilterFactory.cs @@ -57,9 +57,9 @@ public BeiderMorseFilterFactory(IDictionary args) // LanguageSet: defaults to automagic, otherwise a comma-separated list. ISet langs = GetSet(args, "languageSet"); languageSet = (null == langs || (1 == langs.Count && langs.Contains("auto"))) ? null : LanguageSet.From(langs); - if (!(args.Count == 0)) + if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Phonetic/DoubleMetaphoneFilter.cs b/src/Lucene.Net.Analysis.Phonetic/DoubleMetaphoneFilter.cs index 6065b011be..b4e2592275 100644 --- a/src/Lucene.Net.Analysis.Phonetic/DoubleMetaphoneFilter.cs +++ b/src/Lucene.Net.Analysis.Phonetic/DoubleMetaphoneFilter.cs @@ -96,7 +96,7 @@ public override bool IncrementToken() if (saveState) { remainingTokens.Enqueue(CaptureState()); - saveState = false; + //saveState = false; // LUCENENET: IDE0059: Remove unnecessary value assignment } posAtt.PositionIncrement = firstAlternativeIncrement; termAtt.SetEmpty().Append(alternatePhoneticValue); diff --git a/src/Lucene.Net.Analysis.Phonetic/DoubleMetaphoneFilterFactory.cs b/src/Lucene.Net.Analysis.Phonetic/DoubleMetaphoneFilterFactory.cs index d70fd41b55..8f4d014554 100644 --- a/src/Lucene.Net.Analysis.Phonetic/DoubleMetaphoneFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Phonetic/DoubleMetaphoneFilterFactory.cs @@ -53,9 +53,9 @@ public DoubleMetaphoneFilterFactory(IDictionary args) { inject = GetBoolean(args, INJECT, true); maxCodeLength = GetInt32(args, MAX_CODE_LENGTH, DEFAULT_MAX_CODE_LENGTH); - if (!(args.Count == 0)) + if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/AbstractCaverphone .cs b/src/Lucene.Net.Analysis.Phonetic/Language/AbstractCaverphone .cs index 4510d6f7a1..49ced5c601 100644 --- a/src/Lucene.Net.Analysis.Phonetic/Language/AbstractCaverphone .cs +++ b/src/Lucene.Net.Analysis.Phonetic/Language/AbstractCaverphone .cs @@ -35,31 +35,12 @@ public abstract class AbstractCaverphone : IStringEncoder /// /// Creates an instance of the Caverphone encoder /// - public AbstractCaverphone() + protected AbstractCaverphone() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : base() { } - // LUCENENET specific - in .NET we don't need an object overload, since strings are sealed anyway. - // /** - // * Encodes an Object using the caverphone algorithm. This method is provided in order to satisfy the requirements of - // * the Encoder interface, and will throw an EncoderException if the supplied object is not of type java.lang.String. - // * - // * @param source - // * Object to encode - // * @return An object (or type java.lang.String) containing the caverphone code which corresponds to the String - // * supplied. - // * @throws EncoderException - // * if the parameter supplied is not of type java.lang.String - // */ - // @Override - //public Object encode(final Object source) throws EncoderException - // { - // if (!(source instanceof String)) { - // throw new EncoderException("Parameter supplied to Caverphone encode is not of type java.lang.String"); - // } - // return this.encode((String) source); - // } + // LUCENENET specific - in .NET we don't need an object overload of Encode(), since strings are sealed anyway. // LUCENENET specific - must provide implementation for IStringEncoder public abstract string Encode(string source); diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Lang.cs b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Lang.cs index 26cfe1c877..201cd97214 100644 --- a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Lang.cs +++ b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Lang.cs @@ -110,7 +110,7 @@ public bool Matches(string txt) } // LUCENENET specific - need to load this first for LoadLangs() to work - private static readonly string LANGUAGE_RULES_RN = "lang.txt"; + private const string LANGUAGE_RULES_RN = "lang.txt"; private static readonly IDictionary langs = LoadLangs(); @@ -131,8 +131,7 @@ private static IDictionary LoadLangs() // LUCENENET: Avoid stati /// A Lang encapsulating the language guessing rules for that name type. public static Lang GetInstance(NameType nameType) { - Lang result; - langs.TryGetValue(nameType, out result); + langs.TryGetValue(nameType, out Lang result); return result; } diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Languages.cs b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Languages.cs index 2b33ebb01b..63d7886dc4 100644 --- a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Languages.cs +++ b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Languages.cs @@ -82,8 +82,7 @@ private static IDictionary LoadLanguages() // LUCENENET: Av public static Languages GetInstance(NameType nameType) { - Languages result; - LANGUAGES.TryGetValue(nameType, out result); + LANGUAGES.TryGetValue(nameType, out Languages result); return result; } diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/PhoneticEngine.cs b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/PhoneticEngine.cs index 9602d80e17..fedc8ded7e 100644 --- a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/PhoneticEngine.cs +++ b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/PhoneticEngine.cs @@ -73,8 +73,10 @@ public static PhonemeBuilder Empty(LanguageSet languages) private PhonemeBuilder(Phoneme phoneme) { - this.phonemes = new JCG.LinkedHashSet(); - this.phonemes.Add(phoneme); + this.phonemes = new JCG.LinkedHashSet + { + phoneme + }; } internal PhonemeBuilder(ISet phonemes) @@ -212,11 +214,7 @@ private sealed class RulesApplication public RulesApplication(IDictionary> finalRules, string input, PhonemeBuilder phonemeBuilder, int i, int maxPhonemes) { - if (finalRules == null) - { - throw new ArgumentNullException("The finalRules argument must not be null"); - } - this.finalRules = finalRules; + this.finalRules = finalRules ?? throw new ArgumentNullException(nameof(finalRules), "The finalRules argument must not be null"); this.phonemeBuilder = phonemeBuilder; this.input = input; this.i = i; @@ -237,8 +235,7 @@ public RulesApplication Invoke() { this.found = false; int patternLength = 1; - IList rules; - if (this.finalRules.TryGetValue(input.Substring(i, patternLength), out rules) && rules != null) + if (this.finalRules.TryGetValue(input.Substring(i, patternLength), out IList rules) && rules != null) { foreach (Rule rule in rules) { @@ -269,13 +266,14 @@ public RulesApplication Invoke() private static IDictionary> LoadNamePrefixes() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006) { - var namePrefixes = new Dictionary>(); - namePrefixes[NameType.ASHKENAZI] = new JCG.HashSet() { "bar", "ben", "da", "de", "van", "von" }.AsReadOnly(); - namePrefixes[NameType.SEPHARDIC] = new JCG.HashSet() { "al", "el", "da", "dal", "de", "del", "dela", "de la", - "della", "des", "di", "do", "dos", "du", "van", "von" }.AsReadOnly(); - namePrefixes[NameType.GENERIC] = new JCG.HashSet() { "da", "dal", "de", "del", "dela", "de la", "della", - "des", "di", "do", "dos", "du", "van", "von" }.AsReadOnly(); - return namePrefixes; + return new Dictionary> + { + [NameType.ASHKENAZI] = new JCG.HashSet() { "bar", "ben", "da", "de", "van", "von" }.AsReadOnly(), + [NameType.SEPHARDIC] = new JCG.HashSet() { "al", "el", "da", "dal", "de", "del", "dela", "de la", + "della", "des", "di", "do", "dos", "du", "van", "von" }.AsReadOnly(), + [NameType.GENERIC] = new JCG.HashSet() { "da", "dal", "de", "del", "dela", "de la", "della", + "des", "di", "do", "dos", "du", "van", "von" }.AsReadOnly() + }; } /// diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/ResourceConstants.cs b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/ResourceConstants.cs index c70d40471e..bff6704494 100644 --- a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/ResourceConstants.cs +++ b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/ResourceConstants.cs @@ -29,9 +29,9 @@ namespace Lucene.Net.Analysis.Phonetic.Language.Bm /// internal class ResourceConstants { - public static readonly string CMT = "//"; + public const string CMT = "//"; public static readonly Encoding ENCODING = Encoding.UTF8; - public static readonly string EXT_CMT_END = "*/"; - public static readonly string EXT_CMT_START = "/*"; + public const string EXT_CMT_END = "*/"; + public const string EXT_CMT_START = "/*"; } } diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Rule.cs b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Rule.cs index b93d9808ed..23eb7ec273 100644 --- a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Rule.cs +++ b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Rule.cs @@ -158,7 +158,9 @@ private static IDictionary> GetInstanceMap(NameType nameType, public static IDictionary> GetInstanceMap(NameType nameType, RuleType rt, string lang) { - IDictionary>>> nameTypes; - IDictionary>> ruleTypes; - IDictionary> rules = null; - - if (RULES.TryGetValue(nameType, out nameTypes) && nameTypes != null && - nameTypes.TryGetValue(rt, out ruleTypes) && ruleTypes != null && - ruleTypes.TryGetValue(lang, out rules) && rules != null) + if (RULES.TryGetValue(nameType, out var nameTypes) && nameTypes != null && + nameTypes.TryGetValue(rt, out var ruleTypes) && ruleTypes != null && + ruleTypes.TryGetValue(lang, out var rules) && rules != null) { } else @@ -500,8 +500,7 @@ private static IDictionary> ParseRules(TextReader reader, st Rule r = new RuleAnonymousHelper(pat, lCon, rCon, ph, cLine, location); string patternKey = r.pattern.Substring(0, 1 - 0); - IList rules; - if (!lines.TryGetValue(patternKey, out rules) || rules == null) + if (!lines.TryGetValue(patternKey, out IList rules) || rules == null) { rules = new List(); lines[patternKey] = rules; diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/DaitchMokotoffSoundex.cs b/src/Lucene.Net.Analysis.Phonetic/Language/DaitchMokotoffSoundex.cs index a4bb0005bc..88c92a4bde 100644 --- a/src/Lucene.Net.Analysis.Phonetic/Language/DaitchMokotoffSoundex.cs +++ b/src/Lucene.Net.Analysis.Phonetic/Language/DaitchMokotoffSoundex.cs @@ -6,6 +6,7 @@ using System; using System.Collections.Generic; using System.IO; +using System.Runtime.CompilerServices; using System.Text; using System.Text.RegularExpressions; @@ -200,11 +201,13 @@ public string[] GetReplacements(string context, bool atStart) return replacementDefault; } - private bool IsVowel(char ch) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static bool IsVowel(char ch) // LUCENENET: CA1822: Mark members as static { return ch == 'a' || ch == 'e' || ch == 'i' || ch == 'o' || ch == 'u'; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public bool Matches(string context) { return context.StartsWith(pattern, StringComparison.Ordinal); @@ -240,6 +243,7 @@ public override string ToString() private class DaitchMokotoffRuleComparer : IComparer { + [MethodImpl(MethodImplOptions.AggressiveInlining)] public int Compare(Rule rule1, Rule rule2) { return rule2.PatternLength - rule1.PatternLength; @@ -275,7 +279,7 @@ private static void ParseRules(TextReader scanner, string location, string rawLine; while ((rawLine = scanner.ReadLine()) != null) - { + { currentLine++; string line = rawLine; @@ -352,8 +356,7 @@ private static void ParseRules(TextReader scanner, string location, Rule r = new Rule(pattern, replacement1, replacement2, replacement3); char patternKey = r.Pattern[0]; - IList rules; - if (!ruleMapping.TryGetValue(patternKey, out rules) || rules == null) + if (!ruleMapping.TryGetValue(patternKey, out IList rules) || rules == null) { rules = new List(); ruleMapping[patternKey] = rules; @@ -437,34 +440,7 @@ private string Cleanup(string input) return sb.ToString(); } - // LUCENENET specific - in .NET we don't need an object overload, since strings are sealed anyway. - //** - // * Encodes an Object using the Daitch-Mokotoff soundex algorithm without branching. - // *

- // * This method is provided in order to satisfy the requirements of the Encoder interface, and will throw an - // * EncoderException if the supplied object is not of type java.lang.String. - // *

- // * - // * @see #soundex(String) - // * - // * @param obj - // * Object to encode - // * @return An object (of type java.lang.String) containing the DM soundex code, which corresponds to the String - // * supplied. - // * @throws EncoderException - // * if the parameter supplied is not of type java.lang.String - // * @throws IllegalArgumentException - // * if a character is not mapped - // */ - //@Override - // public Object encode(object obj) - //{ - // if (!(obj instanceof String)) { - // throw new EncoderException( - // "Parameter supplied to DaitchMokotoffSoundex encode is not of type java.lang.String"); - // } - // return encode((String) obj); - //} + // LUCENENET specific - in .NET we don't need an object overload of Encode(), since strings are sealed anyway. /// /// Encodes a string using the Daitch-Mokotoff soundex algorithm without branching. @@ -473,6 +449,7 @@ private string Cleanup(string input) /// A DM Soundex code corresponding to the string supplied. /// If a character is not mapped. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual string Encode(string source) { if (source == null) @@ -532,8 +509,10 @@ private string[] GetSoundex(string source, bool branching) string input = Cleanup(source); // LinkedHashSet preserves input order. In .NET we can use List for that purpose. - IList currentBranches = new List(); - currentBranches.Add(new Branch()); + IList currentBranches = new List + { + new Branch() + }; char lastChar = '\0'; for (int index = 0; index < input.Length; index++) @@ -547,8 +526,7 @@ private string[] GetSoundex(string source, bool branching) } string inputContext = input.Substring(index); - IList rules; - if (!RULES.TryGetValue(ch, out rules) || rules == null) + if (!RULES.TryGetValue(ch, out IList rules) || rules == null) { continue; } diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/DoubleMetaphone.cs b/src/Lucene.Net.Analysis.Phonetic/Language/DoubleMetaphone.cs index 73eb64f549..17bd705f74 100644 --- a/src/Lucene.Net.Analysis.Phonetic/Language/DoubleMetaphone.cs +++ b/src/Lucene.Net.Analysis.Phonetic/Language/DoubleMetaphone.cs @@ -1,6 +1,7 @@ // commons-codec version compatibility level: 1.9 using System; using System.Globalization; +using System.Runtime.CompilerServices; using System.Text; namespace Lucene.Net.Analysis.Phonetic.Language @@ -40,7 +41,7 @@ public class DoubleMetaphone : IStringEncoder /// /// "Vowels" to test for /// - private static readonly string VOWELS = "AEIOUY"; + private const string VOWELS = "AEIOUY"; /// /// Prefixes when present which are not pronounced @@ -72,6 +73,7 @@ public DoubleMetaphone() /// /// String to encode. /// An encoded string. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual string GetDoubleMetaphone(string value) { return GetDoubleMetaphone(value, false); @@ -194,29 +196,14 @@ public virtual string GetDoubleMetaphone(string value, bool alternate) return alternate ? result.Alternate : result.Primary; } - // LUCENENET specific - in .NET we don't need an object overload, since strings are sealed anyway. - // /** - // * Encode the value using DoubleMetaphone. It will only work if - // * obj is a String (like Metaphone). - // * - // * @param obj Object to encode (should be of type String) - // * @return An encoded Object (will be of type String) - // * @throws EncoderException encode parameter is not of type String - // */ - - //public virtual object Encode(object obj) - // { - // if (!(obj is String)) { - // throw new EncoderException("DoubleMetaphone encode parameter is not of type String"); - // } - // return GetDoubleMetaphone((String) obj); - // } + // LUCENENET specific - in .NET we don't need an object overload of Encode(), since strings are sealed anyway. /// /// Encode the value using DoubleMetaphone. /// /// String to encode. /// An encoded string. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual string Encode(string value) { return GetDoubleMetaphone(value); @@ -229,6 +216,7 @@ public virtual string Encode(string value) /// The left-hand side of the encoded . /// The right-hand side of the encoded . /// true if the encoded s are equal; false otherwise. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual bool IsDoubleMetaphoneEqual(string value1, string value2) { return IsDoubleMetaphoneEqual(value1, value2, false); @@ -242,6 +230,7 @@ public virtual bool IsDoubleMetaphoneEqual(string value1, string value2) /// The right-hand side of the encoded . /// Use the alternate value if true. /// true if the encoded s are equal; false otherwise. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual bool IsDoubleMetaphoneEqual(string value1, string value2, bool alternate) { return GetDoubleMetaphone(value1, alternate).Equals(GetDoubleMetaphone(value2, alternate), StringComparison.Ordinal); @@ -261,6 +250,7 @@ public virtual int MaxCodeLen /// /// Handles 'A', 'E', 'I', 'O', 'U', and 'Y' cases. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] private int HandleAEIOUY(DoubleMetaphoneResult result, int index) { if (index == 0) @@ -382,7 +372,7 @@ private int HandleCC(string value, DoubleMetaphoneResult result, int index) /// /// Handles 'CH' cases. /// - private int HandleCH(string value, DoubleMetaphoneResult result, int index) + private static int HandleCH(string value, DoubleMetaphoneResult result, int index) // LUCENENET: CA1822: Mark members as static { if (index > 0 && Contains(value, index, 4, "CHAE")) { // Michael @@ -1011,7 +1001,7 @@ private bool ConditionC0(string value, int index) /// /// Complex condition 0 for 'CH'. /// - private bool ConditionCH0(string value, int index) + private static bool ConditionCH0(string value, int index) // LUCENENET: CA1822: Mark members as static { if (index != 0) { @@ -1035,7 +1025,7 @@ private bool ConditionCH0(string value, int index) /// /// Complex condition 1 for 'CH'. /// - private bool ConditionCH1(string value, int index) + private static bool ConditionCH1(string value, int index) // LUCENENET: CA1822: Mark members as static { return ((Contains(value, 0, 4, "VAN ", "VON ") || Contains(value, 0, 3, "SCH")) || Contains(value, index - 2, 6, "ORCHES", "ARCHIT", "ORCHID") || @@ -1047,7 +1037,7 @@ private bool ConditionCH1(string value, int index) /// /// Complex condition 0 for 'L'. /// - private bool ConditionL0(string value, int index) + private static bool ConditionL0(string value, int index) // LUCENENET: CA1822: Mark members as static { if (index == value.Length - 3 && Contains(value, index - 1, 4, "ILLO", "ILLA", "ALLE")) @@ -1085,6 +1075,7 @@ private bool ConditionM0(string value, int index) /// Determines whether or not a value is of slavo-germanic origin. A value is /// of slavo-germanic origin if it contians any of 'W', 'K', 'CZ', or 'WITZ'. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] private bool IsSlavoGermanic(string value) { return value.IndexOf('W') > -1 || value.IndexOf('K') > -1 || @@ -1094,7 +1085,8 @@ private bool IsSlavoGermanic(string value) /// /// Determines whether or not a character is a vowel or not /// - private bool IsVowel(char ch) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static bool IsVowel(char ch) // LUCENENET: CA1822: Mark members as static { return VOWELS.IndexOf(ch) != -1; } @@ -1193,18 +1185,21 @@ public DoubleMetaphoneResult(int maxLength) this.alternate = new StringBuilder(maxLength); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void Append(char value) { AppendPrimary(value); AppendAlternate(value); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void Append(char primary, char alternate) { AppendPrimary(primary); AppendAlternate(alternate); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void AppendPrimary(char value) { if (this.primary.Length < this.maxLength) @@ -1213,6 +1208,7 @@ public virtual void AppendPrimary(char value) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void AppendAlternate(char value) { if (this.alternate.Length < this.maxLength) @@ -1221,12 +1217,14 @@ public virtual void AppendAlternate(char value) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void Append(string value) { AppendPrimary(value); AppendAlternate(value); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void Append(string primary, string alternate) { AppendPrimary(primary); diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/MatchRatingApproachEncoder.cs b/src/Lucene.Net.Analysis.Phonetic/Language/MatchRatingApproachEncoder.cs index 4cc3b347d9..ce3ab003b5 100644 --- a/src/Lucene.Net.Analysis.Phonetic/Language/MatchRatingApproachEncoder.cs +++ b/src/Lucene.Net.Analysis.Phonetic/Language/MatchRatingApproachEncoder.cs @@ -98,26 +98,7 @@ internal string CleanName(string name) return upperName; } - // LUCENENET specific - in .NET we don't need an object overload, since strings are sealed anyway. - // ** - // * Encodes an Object using the Match Rating Approach algorithm. Method is here to satisfy the requirements of the - // * Encoder interface Throws an EncoderException if input object is not of type java.lang.string. - // * - // * @param pObject - // * Object to encode - // * @return An object (or type java.lang.string) containing the Match Rating Approach code which corresponds to the - // * string supplied. - // * @throws EncoderException - // * if the parameter supplied is not of type java.lang.string - // */ - //public Object encode(Object pObject) throws EncoderException - //{ - //if (!(pObject instanceof string)) { - // throw new EncoderException( - // "Parameter supplied to Match Rating Approach encoder is not of type java.lang.string"); - // } - //return encode((string) pObject); - //} + // LUCENENET specific - in .NET we don't need an object overload of Encode(), since strings are sealed anyway. /// /// Encodes a string using the Match Rating Approach (MRA) algorithm. @@ -178,7 +159,7 @@ internal string GetFirst3Last3(string name) /// The min rating value. internal int GetMinRating(int sumLength) { - int minRating = 0; + int minRating; // LUCENENET: IDE0059: Remove unnecessary value assignment if (sumLength <= FOUR) { @@ -259,7 +240,7 @@ public virtual bool IsEncodeEquals(string name1, string name2) // 5. Obtain the minimum rating value by calculating the length sum of the // encoded strings and sending it down. int sumLength = Math.Abs(name1.Length + name2.Length); - int minRating = 0; + int minRating; // LUCENENET: IDE0059: Remove unnecessary value assignment minRating = GetMinRating(sumLength); // 6. Process the encoded strings from left to right and remove any @@ -277,7 +258,7 @@ public virtual bool IsEncodeEquals(string name1, string name2) /// /// The letter under investiagtion. /// true if a vowel, else false. - internal bool IsVowel(string letter) + internal static bool IsVowel(string letter) // LUCENENET: CA1822: Mark members as static { return letter.Equals("E", StringComparison.OrdinalIgnoreCase) || letter.Equals("A", StringComparison.OrdinalIgnoreCase) || letter.Equals("O", StringComparison.OrdinalIgnoreCase) || letter.Equals("I", StringComparison.OrdinalIgnoreCase) || letter.Equals("U", StringComparison.OrdinalIgnoreCase); @@ -298,11 +279,11 @@ internal int LeftToRightThenRightToLeftProcessing(string name1, string name2) int name1Size = name1.Length - 1; int name2Size = name2.Length - 1; - string name1LtRStart = EMPTY; - string name1LtREnd = EMPTY; + string name1LtRStart/* = EMPTY*/; // LUCENENET: IDE0059: Remove unnecessary value assignment + string name1LtREnd/* = EMPTY*/; // LUCENENET: IDE0059: Remove unnecessary value assignment - string name2RtLStart = EMPTY; - string name2RtLEnd = EMPTY; + string name2RtLStart/* = EMPTY*/; // LUCENENET: IDE0059: Remove unnecessary value assignment + string name2RtLEnd/* = EMPTY*/; // LUCENENET: IDE0059: Remove unnecessary value assignment for (int i = 0; i < name1Char.Length; i++) { @@ -353,7 +334,7 @@ internal int LeftToRightThenRightToLeftProcessing(string name1, string name2) ///
/// The word that may have accents in it. /// De-accented word. - internal string RemoveAccents(string accentedWord) + internal static string RemoveAccents(string accentedWord) // LUCENENET: CA1822: Mark members as static { if (accentedWord == null) { diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Metaphone.cs b/src/Lucene.Net.Analysis.Phonetic/Language/Metaphone.cs index 5487368081..c10b9926f5 100644 --- a/src/Lucene.Net.Analysis.Phonetic/Language/Metaphone.cs +++ b/src/Lucene.Net.Analysis.Phonetic/Language/Metaphone.cs @@ -53,17 +53,17 @@ public class Metaphone : IStringEncoder /// /// Five values in the English language /// - private static readonly string VOWELS = "AEIOU"; + private const string VOWELS = "AEIOU"; /// /// Variable used in Metaphone algorithm /// - private static readonly string FRONTV = "EIY"; + private const string FRONTV = "EIY"; /// /// Variable used in Metaphone algorithm /// - private static readonly string VARSON = "CSPTG"; + private const string VARSON = "CSPTG"; /// /// The max code length for metaphone is 4 @@ -91,7 +91,7 @@ public Metaphone() /// A metaphone code corresponding to the string supplied. public virtual string GetMetaphone(string txt) { - bool hard = false; + bool hard; // LUCENENET: IDE0059: Remove unnecessary value assignment if (txt == null || txt.Length == 0) { return ""; @@ -442,27 +442,7 @@ private bool IsLastChar(int wdsz, int n) return n + 1 == wdsz; } - // LUCENENET specific - in .NET we don't need an object overload, since strings are sealed anyway. - // /** - // * Encodes an Object using the metaphone algorithm. This method - // * is provided in order to satisfy the requirements of the - // * Encoder interface, and will throw an EncoderException if the - // * supplied object is not of type java.lang.String. - // * - // * @param obj Object to encode - // * @return An object (or type java.lang.String) containing the - // * metaphone code which corresponds to the String supplied. - // * @throws EncoderException if the parameter supplied is not - // * of type java.lang.String - // */ - // @Override - //public object encode(object obj) - // { - // if (!(obj is String)) { - // throw new EncoderException("Parameter supplied to Metaphone encode is not of type java.lang.String"); - // } - // return GetMetaphone((String) obj); - // } + // LUCENENET specific - in .NET we don't need an object overload of Encode(), since strings are sealed anyway. /// /// Encodes a string using the algorithm. diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Nysiis.cs b/src/Lucene.Net.Analysis.Phonetic/Language/Nysiis.cs index d2b713e6ca..a09a70a1b8 100644 --- a/src/Lucene.Net.Analysis.Phonetic/Language/Nysiis.cs +++ b/src/Lucene.Net.Analysis.Phonetic/Language/Nysiis.cs @@ -119,8 +119,8 @@ public class Nysiis : IStringEncoder private static readonly Regex PAT_EE_IE = new Regex("(EE|IE)$", RegexOptions.Compiled); private static readonly Regex PAT_DT_ETC = new Regex("(DT|RT|RD|NT|ND)$", RegexOptions.Compiled); - private static readonly char SPACE = ' '; - private static readonly int TRUE_LENGTH = 6; + private const char SPACE = ' '; + private const int TRUE_LENGTH = 6; /// /// Tests if the given character is a vowel. @@ -234,28 +234,7 @@ public Nysiis(bool strict) this.strict = strict; } - // LUCENENET specific - in .NET we don't need an object overload, since strings are sealed anyway. - // /** - // * Encodes an Object using the NYSIIS algorithm. This method is provided in order to satisfy the requirements of the - // * Encoder interface, and will throw an if the supplied object is not of type - // * . - // * - // * @param obj - // * Object to encode - // * @return An object (or a ) containing the NYSIIS code which corresponds to the given String. - // * @throws EncoderException - // * if the parameter supplied is not of a - // * @throws IllegalArgumentException - // * if a character is not mapped - // */ - // @Override - //public object Encode(object obj) - // { - // if (!(obj is String)) { - // throw new EncoderException("Parameter supplied to Nysiis encode is not of type java.lang.String"); - // } - // return this.nysiis((String) obj); - // } + // LUCENENET specific - in .NET we don't need an object overload of Encode(), since strings are sealed anyway. /// /// Encodes a string using the NYSIIS algorithm. diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/RefinedSoundex.cs b/src/Lucene.Net.Analysis.Phonetic/Language/RefinedSoundex.cs index e0f9071932..bf6383b452 100644 --- a/src/Lucene.Net.Analysis.Phonetic/Language/RefinedSoundex.cs +++ b/src/Lucene.Net.Analysis.Phonetic/Language/RefinedSoundex.cs @@ -107,28 +107,7 @@ public virtual int Difference(string s1, string s2) return SoundexUtils.Difference(this, s1, s2); } - // LUCENENET specific - in .NET we don't need an object overload, since strings are sealed anyway. - // /** - // * Encodes an Object using the refined soundex algorithm. This method is - // * provided in order to satisfy the requirements of the Encoder interface, - // * and will throw an EncoderException if the supplied object is not of type - // * java.lang.String. - // * - // * @param obj - // * Object to encode - // * @return An object (or type java.lang.String) containing the refined - // * soundex code which corresponds to the String supplied. - // * @throws EncoderException - // * if the parameter supplied is not of type java.lang.String - // */ - // @Override - //public virtual object Encode(object obj) - // { - // if (!(obj is String)) { - // throw new EncoderException("Parameter supplied to RefinedSoundex encode is not of type java.lang.String"); - // } - // return soundex((String) obj); - // } + // LUCENENET specific - in .NET we don't need an object overload of Encode(), since strings are sealed anyway. /// /// Encodes a string using the refined soundex algorithm. diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Soundex.cs b/src/Lucene.Net.Analysis.Phonetic/Language/Soundex.cs index aaea825341..570c21c174 100644 --- a/src/Lucene.Net.Analysis.Phonetic/Language/Soundex.cs +++ b/src/Lucene.Net.Analysis.Phonetic/Language/Soundex.cs @@ -213,27 +213,7 @@ public virtual int Difference(string s1, string s2) return SoundexUtils.Difference(this, s1, s2); } - // LUCENENET specific - in .NET we don't need an object overload, since strings are sealed anyway. - // /** - // * Encodes an Object using the soundex algorithm. This method is provided in order to satisfy the requirements of - // * the Encoder interface, and will throw an EncoderException if the supplied object is not of type java.lang.String. - // * - // * @param obj - // * Object to encode - // * @return An object (or type java.lang.String) containing the soundex code which corresponds to the String - // * supplied. - // * @throws EncoderException - // * if the parameter supplied is not of type java.lang.String - // * @throws IllegalArgumentException - // * if a character is not mapped - // */ - //public virtual Object encode(object obj) - // { - // if (!(obj is string)) { - // throw new EncoderException("Parameter supplied to Soundex encode is not of type java.lang.String"); - // } - // return soundex((string) obj); - // } + // LUCENENET specific - in .NET we don't need an object overload of Encode(), since strings are sealed anyway. /// /// Encodes a string using the soundex algorithm. diff --git a/src/Lucene.Net.Analysis.Phonetic/PhoneticFilter.cs b/src/Lucene.Net.Analysis.Phonetic/PhoneticFilter.cs index bffc0e5ac1..a6664f302d 100644 --- a/src/Lucene.Net.Analysis.Phonetic/PhoneticFilter.cs +++ b/src/Lucene.Net.Analysis.Phonetic/PhoneticFilter.cs @@ -29,9 +29,9 @@ namespace Lucene.Net.Analysis.Phonetic public sealed class PhoneticFilter : TokenFilter { /// true if encoded tokens should be added as synonyms - private bool inject = true; + private readonly bool inject = true; // LUCENENET: marked readonly /// phonetic encoder - private IStringEncoder encoder = null; + private readonly IStringEncoder encoder = null; // LUCENENET: marked readonly /// captured state, non-null when inject=true and a token is buffered private State save = null; private readonly ICharTermAttribute termAtt; diff --git a/src/Lucene.Net.Analysis.Phonetic/PhoneticFilterFactory.cs b/src/Lucene.Net.Analysis.Phonetic/PhoneticFilterFactory.cs index c4c8880429..529e3159a3 100644 --- a/src/Lucene.Net.Analysis.Phonetic/PhoneticFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Phonetic/PhoneticFilterFactory.cs @@ -69,12 +69,12 @@ namespace Lucene.Net.Analysis.Phonetic public class PhoneticFilterFactory : TokenFilterFactory, IResourceLoaderAware { /// parameter name: either a short name or a full class name - public static readonly string ENCODER = "encoder"; + public const string ENCODER = "encoder"; /// parameter name: true if encoded tokens should be added as synonyms - public static readonly string INJECT = "inject"; // boolean + public const string INJECT = "inject"; // boolean /** parameter name: restricts the length of the phonetic code */ - public static readonly string MAX_CODE_LENGTH = "maxCodeLength"; - private static readonly string PACKAGE_CONTAINING_ENCODERS = "Lucene.Net.Analysis.Phonetic.Language."; + public const string MAX_CODE_LENGTH = "maxCodeLength"; + private const string PACKAGE_CONTAINING_ENCODERS = "Lucene.Net.Analysis.Phonetic.Language."; //Effectively constants; uppercase keys private static readonly IDictionary registry = new Dictionary // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006) @@ -108,9 +108,9 @@ public PhoneticFilterFactory(IDictionary args) { maxCodeLength = null; } - if (!(args.Count == 0)) + if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.SmartCn/AnalyzerProfile.cs b/src/Lucene.Net.Analysis.SmartCn/AnalyzerProfile.cs index ae180143ca..a3a9456ebc 100644 --- a/src/Lucene.Net.Analysis.SmartCn/AnalyzerProfile.cs +++ b/src/Lucene.Net.Analysis.SmartCn/AnalyzerProfile.cs @@ -39,7 +39,7 @@ namespace Lucene.Net.Analysis.Cn.Smart /// /// @lucene.experimental /// - public class AnalyzerProfile + public static class AnalyzerProfile // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { /// /// Global indicating the configured analysis data directory @@ -80,28 +80,6 @@ private static void Init() string currentPath = System.AppContext.BaseDirectory; #endif - //FileInfo[] cadidateFiles = new FileInfo[] { new FileInfo(currentPath + "/" + dirName), - // new FileInfo(currentPath + "/bin/" + dirName)/*, new FileInfo("./" + propName), - // new FileInfo("./lib/" + propName)*/ }; - //for (int i = 0; i < cadidateFiles.Length; i++) - //{ - // FileInfo file = cadidateFiles[i]; - // if (file.Exists) - // { - // ANALYSIS_DATA_DIR = file.FullName; - - // //if (file.isDirectory()) - // //{ - // // ANALYSIS_DATA_DIR = file.getAbsolutePath(); - // //} - // //else if (file.isFile() && GetAnalysisDataDir(file).Length != 0) - // //{ - // // ANALYSIS_DATA_DIR = GetAnalysisDataDir(file); - // //} - // break; - // } - //} - string candidatePath = System.IO.Path.Combine(currentPath, dirName); if (Directory.Exists(candidatePath)) { @@ -127,45 +105,6 @@ private static void Init() { // ignore security errors } - - - //for (int i = 0; i < cadidateDirectories.Count; i++) - //{ - // DirectoryInfo dir = cadidateDirectories[i]; - // if (dir.Exists) - // { - // ANALYSIS_DATA_DIR = dir.FullName; - // break; - // } - //} - - //if (ANALYSIS_DATA_DIR.Length == 0) - //{ - // // Dictionary directory cannot be found. - // throw new Exception("WARNING: Can not find lexical dictionary directory!" - // + " This will cause unpredictable exceptions in your application!" - // + " Please refer to the manual to download the dictionaries."); - //} - } - - //private static string GetAnalysisDataDir(FileInfo propFile) - //{ - // Properties prop = new Properties(); - // try - // { - // string dir; - // using (FileStream input = new FileStream(propFile.FullName, FileMode.Open, FileAccess.Read)) - // { - // prop.load(new StreamReader(input, Encoding.UTF8)); - // dir = prop.getProperty("analysis.data.dir", ""); - // } - // return dir; - // } - // catch (IOException e) - // { - // return ""; - // } - //} } } diff --git a/src/Lucene.Net.Analysis.SmartCn/HMMChineseTokenizer.cs b/src/Lucene.Net.Analysis.SmartCn/HMMChineseTokenizer.cs index 88cc069c83..8485fe28b3 100644 --- a/src/Lucene.Net.Analysis.SmartCn/HMMChineseTokenizer.cs +++ b/src/Lucene.Net.Analysis.SmartCn/HMMChineseTokenizer.cs @@ -89,7 +89,32 @@ protected override bool IncrementWord() public override void Reset() { base.Reset(); + tokens?.Dispose(); // LUCENENET specific: Dispose tokens before letting it go out of scope tokens = null; } + + /// + /// Releases resources used by the and + /// if overridden in a derived class, optionally releases unmanaged resources. + /// + /// true to release both managed and unmanaged resources; + /// false to release only unmanaged resources. + + // LUCENENET specific + protected override void Dispose(bool disposing) + { + try + { + if (disposing) + { + tokens?.Dispose(); // LUCENENET specific - dispose tokens and set to null + tokens = null; + } + } + finally + { + base.Dispose(disposing); + } + } } } diff --git a/src/Lucene.Net.Analysis.SmartCn/HMMChineseTokenizerFactory.cs b/src/Lucene.Net.Analysis.SmartCn/HMMChineseTokenizerFactory.cs index 32e9885f71..abf93da0df 100644 --- a/src/Lucene.Net.Analysis.SmartCn/HMMChineseTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.SmartCn/HMMChineseTokenizerFactory.cs @@ -44,7 +44,7 @@ public HMMChineseTokenizerFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.SmartCn/Hhmm/AbstractDictionary.cs b/src/Lucene.Net.Analysis.SmartCn/Hhmm/AbstractDictionary.cs index 83b4614445..67e9a843d8 100644 --- a/src/Lucene.Net.Analysis.SmartCn/Hhmm/AbstractDictionary.cs +++ b/src/Lucene.Net.Analysis.SmartCn/Hhmm/AbstractDictionary.cs @@ -36,18 +36,18 @@ internal abstract class AbstractDictionary /// First Chinese Character in GB2312 (15 * 94) /// Characters in GB2312 are arranged in a grid of 94 * 94, 0-14 are unassigned or punctuation. /// - public static readonly int GB2312_FIRST_CHAR = 1410; + public const int GB2312_FIRST_CHAR = 1410; /// /// Last Chinese Character in GB2312 (87 * 94). /// Characters in GB2312 are arranged in a grid of 94 * 94, 88-94 are unassigned. /// - public static readonly int GB2312_CHAR_NUM = 87 * 94; + public const int GB2312_CHAR_NUM = 87 * 94; /// /// Dictionary data contains 6768 Chinese characters with frequency statistics. /// - public static readonly int CHAR_NUM_IN_FILE = 6768; + public const int CHAR_NUM_IN_FILE = 6768; // ===================================================== // code +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +A +B +C +D +E +F diff --git a/src/Lucene.Net.Analysis.SmartCn/Hhmm/BiSegGraph.cs b/src/Lucene.Net.Analysis.SmartCn/Hhmm/BiSegGraph.cs index 9861a31eb7..fd012940e9 100644 --- a/src/Lucene.Net.Analysis.SmartCn/Hhmm/BiSegGraph.cs +++ b/src/Lucene.Net.Analysis.SmartCn/Hhmm/BiSegGraph.cs @@ -32,11 +32,11 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm /// internal class BiSegGraph { - private IDictionary> tokenPairListTable = new Dictionary>(); + private readonly IDictionary> tokenPairListTable = new Dictionary>(); // LUCENENET: marked readonly private IList segTokenList; - private static BigramDictionary bigramDict = BigramDictionary.GetInstance(); + private static readonly BigramDictionary bigramDict = BigramDictionary.GetInstance(); // LUCENENET: marked readonly public BiSegGraph(SegGraph segGraph) { @@ -50,7 +50,7 @@ public BiSegGraph(SegGraph segGraph) private void GenerateBiSegGraph(SegGraph segGraph) { double smooth = 0.1; - int wordPairFreq = 0; + int wordPairFreq; // LUCENENET: IDE0059: Remove unnecessary value assignment int maxStart = segGraph.MaxStart; double oneWordFreq, weight, tinyDouble = 1.0 / Utility.MAX_FREQUENCE; @@ -60,7 +60,7 @@ private void GenerateBiSegGraph(SegGraph segGraph) segTokenList = segGraph.MakeIndex(); // Because the beginning position of startToken is -1, therefore startToken can be obtained when key = -1 int key = -1; - IList nextTokens = null; + IList nextTokens; // LUCENENET: IDE0059: Remove unnecessary value assignment while (key < maxStart) { if (segGraph.IsStartExist(key)) @@ -140,8 +140,7 @@ public virtual bool IsToExist(int to) /// of token pairs. public virtual IList GetToList(int to) { - IList result; - tokenPairListTable.TryGetValue(to, out result); + tokenPairListTable.TryGetValue(to, out IList result); return result; } @@ -154,8 +153,10 @@ public virtual void AddSegTokenPair(SegTokenPair tokenPair) int to = tokenPair.To; if (!IsToExist(to)) { - List newlist = new List(); - newlist.Add(tokenPair); + List newlist = new List + { + tokenPair + }; tokenPairListTable[to] = newlist; } else diff --git a/src/Lucene.Net.Analysis.SmartCn/Hhmm/BigramDictionary.cs b/src/Lucene.Net.Analysis.SmartCn/Hhmm/BigramDictionary.cs index 9ca0b579ce..ab7bb5d3eb 100644 --- a/src/Lucene.Net.Analysis.SmartCn/Hhmm/BigramDictionary.cs +++ b/src/Lucene.Net.Analysis.SmartCn/Hhmm/BigramDictionary.cs @@ -35,11 +35,11 @@ private BigramDictionary() { } - public static readonly char WORD_SEGMENT_CHAR = '@'; + public const char WORD_SEGMENT_CHAR = '@'; private static BigramDictionary singleInstance; - public static readonly int PRIME_BIGRAM_LENGTH = 402137; + public const int PRIME_BIGRAM_LENGTH = 402137; /// /// The word associations are stored as FNV1 hashcodes, which have a small probability of collision, but save memory. @@ -50,11 +50,11 @@ private BigramDictionary() private int max = 0; - private int repeat = 0; + //private int repeat = 0; // LUCENENET: Never read // static Logger log = Logger.getLogger(BigramDictionary.class); - private static object syncLock = new object(); + private static readonly object syncLock = new object(); public static BigramDictionary GetInstance() { @@ -78,21 +78,6 @@ public static BigramDictionary GetInstance() { singleInstance.Load(dictRoot); } - - - //try - //{ - // singleInstance.Load(); - //} - //catch (IOException e) - //{ - // string dictRoot = AnalyzerProfile.ANALYSIS_DATA_DIR; - // singleInstance.Load(dictRoot); - //} - //catch (TypeLoadException e) - //{ - // throw new Exception(e.ToString(), e); - //} } return singleInstance; } @@ -173,30 +158,21 @@ private bool LoadFromObj(FileInfo serialObj) private void LoadFromInputStream(Stream serialObjectInputStream) { - //ObjectInputStream input = new ObjectInputStream(serialObjectInputStream); - //bigramHashTable = (long[])input.readObject(); - //frequencyTable = (int[])input.readObject(); - //// log.info("load bigram dict from serialization."); - //input.close(); - - using (var reader = new BinaryReader(serialObjectInputStream)) - //using (var reader = new DataInputStream(serialObjectInputStream)) + using var reader = new BinaryReader(serialObjectInputStream); + // Read bigramHashTable + int bhLen = reader.ReadInt32(); + bigramHashTable = new long[bhLen]; + for (int i = 0; i < bhLen; i++) { - // Read bigramHashTable - int bhLen = reader.ReadInt32(); - bigramHashTable = new long[bhLen]; - for (int i = 0; i < bhLen; i++) - { - bigramHashTable[i] = reader.ReadInt64(); - } + bigramHashTable[i] = reader.ReadInt64(); + } - // Read frequencyTable - int fLen = reader.ReadInt32(); - frequencyTable = new int[fLen]; - for (int i = 0; i < fLen; i++) - { - frequencyTable[i] = reader.ReadInt32(); - } + // Read frequencyTable + int fLen = reader.ReadInt32(); + frequencyTable = new int[fLen]; + for (int i = 0; i < fLen; i++) + { + frequencyTable[i] = reader.ReadInt32(); } // log.info("load bigram dict from serialization."); @@ -206,36 +182,26 @@ private void SaveToObj(FileInfo serialObj) { try { - //ObjectOutputStream output = new ObjectOutputStream(new FileStream( - // serialObj.FullName, FileMode.Create, FileAccess.Write)); - //output.writeObject(bigramHashTable); - //output.writeObject(frequencyTable); - //output.close(); - - using (Stream output = new FileStream(serialObj.FullName, FileMode.Create, FileAccess.Write)) + using Stream output = new FileStream(serialObj.FullName, FileMode.Create, FileAccess.Write); + using BinaryWriter writer = new BinaryWriter(output); + int bhLen = bigramHashTable.Length; + writer.Write(bhLen); + for (int i = 0; i < bhLen; i++) { - using (BinaryWriter writer = new BinaryWriter(output)) - { - int bhLen = bigramHashTable.Length; - writer.Write(bhLen); - for (int i = 0; i < bhLen; i++) - { - writer.Write(bigramHashTable[i]); - } + writer.Write(bigramHashTable[i]); + } - int fLen = frequencyTable.Length; - writer.Write(fLen); - for (int i = 0; i < fLen; i++) - { - writer.Write(frequencyTable[i]); - } - } + int fLen = frequencyTable.Length; + writer.Write(fLen); + for (int i = 0; i < fLen; i++) + { + writer.Write(frequencyTable[i]); } // log.info("serialize bigram dict."); } -#pragma warning disable 168 +#pragma warning disable 168, IDE0059 catch (Exception e) -#pragma warning restore 168 +#pragma warning restore 168, IDE0059 { // log.warn(e.getMessage()); } @@ -243,10 +209,8 @@ private void SaveToObj(FileInfo serialObj) private void Load() { - using (Stream input = this.GetType().FindAndGetManifestResourceStream("bigramdict.mem")) - { - LoadFromInputStream(input); - } + using Stream input = this.GetType().FindAndGetManifestResourceStream("bigramdict.mem"); + LoadFromInputStream(input); } private void Load(string dictRoot) @@ -296,64 +260,62 @@ public virtual void LoadFromFile(string dctFilePath) byte[] intBuffer = new byte[4]; string tmpword; //using (RandomAccessFile dctFile = new RandomAccessFile(dctFilePath, "r")) - using (var dctFile = new FileStream(dctFilePath, FileMode.Open, FileAccess.Read)) - { + using var dctFile = new FileStream(dctFilePath, FileMode.Open, FileAccess.Read); - // GB2312 characters 0 - 6768 - for (i = GB2312_FIRST_CHAR; i < GB2312_FIRST_CHAR + CHAR_NUM_IN_FILE; i++) + // GB2312 characters 0 - 6768 + for (i = GB2312_FIRST_CHAR; i < GB2312_FIRST_CHAR + CHAR_NUM_IN_FILE; i++) + { + string currentStr = GetCCByGB2312Id(i); + // if (i == 5231) + // System.out.println(i); + + dctFile.Read(intBuffer, 0, intBuffer.Length); + // the dictionary was developed for C, and byte order must be converted to work with Java + cnt = ByteBuffer.Wrap(intBuffer).SetOrder(ByteOrder.LittleEndian).GetInt32(); + if (cnt <= 0) + { + continue; + } + total += cnt; + int j = 0; + while (j < cnt) { - string currentStr = GetCCByGB2312Id(i); - // if (i == 5231) - // System.out.println(i); - dctFile.Read(intBuffer, 0, intBuffer.Length); - // the dictionary was developed for C, and byte order must be converted to work with Java - cnt = ByteBuffer.Wrap(intBuffer).SetOrder(ByteOrder.LittleEndian).GetInt32(); - if (cnt <= 0) - { - continue; - } - total += cnt; - int j = 0; - while (j < cnt) + buffer[0] = ByteBuffer.Wrap(intBuffer).SetOrder(ByteOrder.LittleEndian) + .GetInt32();// frequency + dctFile.Read(intBuffer, 0, intBuffer.Length); + buffer[1] = ByteBuffer.Wrap(intBuffer).SetOrder(ByteOrder.LittleEndian) + .GetInt32();// length + dctFile.Read(intBuffer, 0, intBuffer.Length); + // buffer[2] = ByteBuffer.wrap(intBuffer).order( + // ByteOrder.LITTLE_ENDIAN).getInt();// handle + + length = buffer[1]; + if (length > 0) { - dctFile.Read(intBuffer, 0, intBuffer.Length); - buffer[0] = ByteBuffer.Wrap(intBuffer).SetOrder(ByteOrder.LittleEndian) - .GetInt32();// frequency - dctFile.Read(intBuffer, 0, intBuffer.Length); - buffer[1] = ByteBuffer.Wrap(intBuffer).SetOrder(ByteOrder.LittleEndian) - .GetInt32();// length - dctFile.Read(intBuffer, 0, intBuffer.Length); - // buffer[2] = ByteBuffer.wrap(intBuffer).order( - // ByteOrder.LITTLE_ENDIAN).getInt();// handle - - length = buffer[1]; - if (length > 0) + byte[] lchBuffer = new byte[length]; + dctFile.Read(lchBuffer, 0, lchBuffer.Length); + //tmpword = new String(lchBuffer, "GB2312"); + tmpword = Encoding.GetEncoding("GB2312").GetString(lchBuffer); + //tmpword = Encoding.GetEncoding("hz-gb-2312").GetString(lchBuffer); + if (i != 3755 + GB2312_FIRST_CHAR) { - byte[] lchBuffer = new byte[length]; - dctFile.Read(lchBuffer, 0, lchBuffer.Length); - //tmpword = new String(lchBuffer, "GB2312"); - tmpword = Encoding.GetEncoding("GB2312").GetString(lchBuffer); - //tmpword = Encoding.GetEncoding("hz-gb-2312").GetString(lchBuffer); - if (i != 3755 + GB2312_FIRST_CHAR) - { - tmpword = currentStr + tmpword; - } - char[] carray = tmpword.ToCharArray(); - long hashId = Hash1(carray); - int index = GetAvaliableIndex(hashId, carray); - if (index != -1) + tmpword = currentStr + tmpword; + } + char[] carray = tmpword.ToCharArray(); + long hashId = Hash1(carray); + int index = GetAvaliableIndex(hashId, carray); + if (index != -1) + { + if (bigramHashTable[index] == 0) { - if (bigramHashTable[index] == 0) - { - bigramHashTable[index] = hashId; - // bigramStringTable[index] = tmpword; - } - frequencyTable[index] += buffer[0]; + bigramHashTable[index] = hashId; + // bigramStringTable[index] = tmpword; } + frequencyTable[index] += buffer[0]; } - j++; } + j++; } } // log.info("load dictionary done! " + dctFilePath + " total:" + total); @@ -400,13 +362,13 @@ private int GetBigramItemIndex(char[] carray) hash2 = PRIME_BIGRAM_LENGTH + hash2; int index = hash1; int i = 1; - repeat++; + //repeat++; // LUCENENET: Never read while (bigramHashTable[index] != 0 && bigramHashTable[index] != hashId && i < PRIME_BIGRAM_LENGTH) { index = (hash1 + i * hash2) % PRIME_BIGRAM_LENGTH; i++; - repeat++; + //repeat++; // LUCENENET: Never read if (i > max) max = i; } diff --git a/src/Lucene.Net.Analysis.SmartCn/Hhmm/HHMMSegmenter.cs b/src/Lucene.Net.Analysis.SmartCn/Hhmm/HHMMSegmenter.cs index 4940dbaf37..610ce5880a 100644 --- a/src/Lucene.Net.Analysis.SmartCn/Hhmm/HHMMSegmenter.cs +++ b/src/Lucene.Net.Analysis.SmartCn/Hhmm/HHMMSegmenter.cs @@ -28,7 +28,7 @@ namespace Lucene.Net.Analysis.Cn.Smart.Hhmm /// public class HHMMSegmenter { - private static WordDictionary wordDict = WordDictionary.GetInstance(); + private static readonly WordDictionary wordDict = WordDictionary.GetInstance(); // LUCENENET: marked readonly /// /// Create the for a sentence. @@ -43,7 +43,7 @@ private SegGraph CreateSegGraph(string sentence) CharType[] charTypeArray = GetCharTypes(sentence); StringBuilder wordBuf = new StringBuilder(); SegToken token; - int frequency = 0; // the number of times word appears. + int frequency; // the number of times word appears. // LUCENENET: IDE0059: Remove unnecessary value assignment bool hasFullWidth; WordType wordType; char[] charArray; diff --git a/src/Lucene.Net.Analysis.SmartCn/Hhmm/SegGraph.cs b/src/Lucene.Net.Analysis.SmartCn/Hhmm/SegGraph.cs index af563a0a3b..a3323112ab 100644 --- a/src/Lucene.Net.Analysis.SmartCn/Hhmm/SegGraph.cs +++ b/src/Lucene.Net.Analysis.SmartCn/Hhmm/SegGraph.cs @@ -33,7 +33,7 @@ internal class SegGraph /// /// Map of start offsets to of tokens at that position /// - private IDictionary> tokenListTable = new Dictionary>(); + private readonly IDictionary> tokenListTable = new Dictionary>(); // LUCENENET: marked readonly private int maxStart = -1; @@ -45,8 +45,7 @@ internal class SegGraph public virtual bool IsStartExist(int s) { //return tokenListTable.get(s) != null; - IList result; - return tokenListTable.TryGetValue(s, out result) && result != null; + return tokenListTable.TryGetValue(s, out IList result) && result != null; } /// @@ -56,8 +55,7 @@ public virtual bool IsStartExist(int s) /// of tokens at the specified start offset. public virtual IList GetStartList(int s) { - IList result; - tokenListTable.TryGetValue(s, out result); + tokenListTable.TryGetValue(s, out IList result); return result; } @@ -103,8 +101,10 @@ public virtual void AddToken(SegToken token) int s = token.StartOffset; if (!IsStartExist(s)) { - List newlist = new List(); - newlist.Add(token); + List newlist = new List + { + token + }; tokenListTable[s] = newlist; } else diff --git a/src/Lucene.Net.Analysis.SmartCn/Hhmm/SegToken.cs b/src/Lucene.Net.Analysis.SmartCn/Hhmm/SegToken.cs index f0bdea4e82..85c1fec3d3 100644 --- a/src/Lucene.Net.Analysis.SmartCn/Hhmm/SegToken.cs +++ b/src/Lucene.Net.Analysis.SmartCn/Hhmm/SegToken.cs @@ -1,5 +1,6 @@ // lucene version compatibility level: 4.8.1 using Lucene.Net.Support; +using System.Diagnostics.CodeAnalysis; namespace Lucene.Net.Analysis.Cn.Smart.Hhmm { @@ -31,6 +32,7 @@ public class SegToken /// Character array containing token text /// [WritableArray] + [SuppressMessage("Performance", "CA1819:Properties should not return arrays", Justification = "Lucene's design requires some array properties")] public char[] CharArray { get; set; } /// diff --git a/src/Lucene.Net.Analysis.SmartCn/Hhmm/WordDictionary.cs b/src/Lucene.Net.Analysis.SmartCn/Hhmm/WordDictionary.cs index 8b4bce38c1..2aadb8d3c6 100644 --- a/src/Lucene.Net.Analysis.SmartCn/Hhmm/WordDictionary.cs +++ b/src/Lucene.Net.Analysis.SmartCn/Hhmm/WordDictionary.cs @@ -40,7 +40,7 @@ private WordDictionary() /// /// Large prime number for hash function /// - public static readonly int PRIME_INDEX_LENGTH = 12071; + public const int PRIME_INDEX_LENGTH = 12071; /// /// wordIndexTable guarantees to hash all Chinese characters in Unicode into @@ -68,7 +68,7 @@ private WordDictionary() // static Logger log = Logger.getLogger(WordDictionary.class); - private static object syncLock = new object(); + private static readonly object syncLock = new object(); /// /// Get the singleton dictionary instance. @@ -96,21 +96,6 @@ public static WordDictionary GetInstance() { singleInstance.Load(dictRoot); } - - - //try - //{ - // singleInstance.Load(); - //} - //catch (IOException e) - //{ - // string wordDictRoot = AnalyzerProfile.ANALYSIS_DATA_DIR; - // singleInstance.Load(wordDictRoot); - //} - //catch (TypeLoadException e) - //{ - // throw new Exception(e.ToString(), e); - //} } return singleInstance; } @@ -165,10 +150,8 @@ public virtual void Load(string dctFileRoot) /// If there is a low-level I/O error. public virtual void Load() { - using (Stream input = this.GetType().FindAndGetManifestResourceStream("coredict.mem")) - { - LoadFromObjectInputStream(input); - } + using Stream input = this.GetType().FindAndGetManifestResourceStream("coredict.mem"); + LoadFromObjectInputStream(input); } private bool LoadFromObj(FileInfo serialObj) @@ -217,76 +200,65 @@ private bool LoadFromObj(FileInfo serialObj) private void LoadFromObjectInputStream(Stream serialObjectInputStream) { - //ObjectInputStream input = new ObjectInputStream(serialObjectInputStream); - //wordIndexTable = (short[])input.ReadObject(); - //charIndexTable = (char[])input.ReadObject(); - //wordItem_charArrayTable = (char[][][])input.ReadObject(); - //wordItem_frequencyTable = (int[][])input.ReadObject(); - //// log.info("load core dict from serialization."); - //input.close(); - - using (var reader = new BinaryReader(serialObjectInputStream)) - //using (var reader = new DataInputStream(serialObjectInputStream)) - { + using var reader = new BinaryReader(serialObjectInputStream); - // Read wordIndexTable - int wiLen = reader.ReadInt32(); - wordIndexTable = new short[wiLen]; - for (int i = 0; i < wiLen; i++) - { - wordIndexTable[i] = reader.ReadInt16(); - } + // Read wordIndexTable + int wiLen = reader.ReadInt32(); + wordIndexTable = new short[wiLen]; + for (int i = 0; i < wiLen; i++) + { + wordIndexTable[i] = reader.ReadInt16(); + } - // Read charIndexTable - int ciLen = reader.ReadInt32(); - charIndexTable = new char[ciLen]; - for (int i = 0; i < ciLen; i++) - { - charIndexTable[i] = reader.ReadChar(); - } + // Read charIndexTable + int ciLen = reader.ReadInt32(); + charIndexTable = new char[ciLen]; + for (int i = 0; i < ciLen; i++) + { + charIndexTable[i] = reader.ReadChar(); + } - // Read wordItem_charArrayTable - int caDim1 = reader.ReadInt32(); - if (caDim1 > -1) + // Read wordItem_charArrayTable + int caDim1 = reader.ReadInt32(); + if (caDim1 > -1) + { + wordItem_charArrayTable = new char[caDim1][][]; + for (int i = 0; i < caDim1; i++) { - wordItem_charArrayTable = new char[caDim1][][]; - for (int i = 0; i < caDim1; i++) + int caDim2 = reader.ReadInt32(); + if (caDim2 > -1) { - int caDim2 = reader.ReadInt32(); - if (caDim2 > -1) + wordItem_charArrayTable[i] = new char[caDim2][]; + for (int j = 0; j < caDim2; j++) { - wordItem_charArrayTable[i] = new char[caDim2][]; - for (int j = 0; j < caDim2; j++) + int caDim3 = reader.ReadInt32(); + if (caDim3 > -1) { - int caDim3 = reader.ReadInt32(); - if (caDim3 > -1) + wordItem_charArrayTable[i][j] = new char[caDim3]; + for (int k = 0; k < caDim3; k++) { - wordItem_charArrayTable[i][j] = new char[caDim3]; - for (int k = 0; k < caDim3; k++) - { - wordItem_charArrayTable[i][j][k] = reader.ReadChar(); - } + wordItem_charArrayTable[i][j][k] = reader.ReadChar(); } } } } } + } - // Read wordItem_frequencyTable - int fDim1 = reader.ReadInt32(); - if (fDim1 > -1) + // Read wordItem_frequencyTable + int fDim1 = reader.ReadInt32(); + if (fDim1 > -1) + { + wordItem_frequencyTable = new int[fDim1][]; + for (int i = 0; i < fDim1; i++) { - wordItem_frequencyTable = new int[fDim1][]; - for (int i = 0; i < fDim1; i++) + int fDim2 = reader.ReadInt32(); + if (fDim2 > -1) { - int fDim2 = reader.ReadInt32(); - if (fDim2 > -1) + wordItem_frequencyTable[i] = new int[fDim2]; + for (int j = 0; j < fDim2; j++) { - wordItem_frequencyTable[i] = new int[fDim2]; - for (int j = 0; j < fDim2; j++) - { - wordItem_frequencyTable[i][j] = reader.ReadInt32(); - } + wordItem_frequencyTable[i][j] = reader.ReadInt32(); } } } @@ -299,73 +271,60 @@ private void SaveToObj(FileInfo serialObj) { try { - //ObjectOutputStream output = new ObjectOutputStream(new FileStream( - // serialObj.FullName, FileMode.Create, FileAccess.Write)); - //output.writeObject(wordIndexTable); - //output.writeObject(charIndexTable); - //output.writeObject(wordItem_charArrayTable); - //output.writeObject(wordItem_frequencyTable); - //output.close(); - //// log.info("serialize core dict."); - - using (Stream stream = new FileStream(serialObj.FullName, FileMode.Create, FileAccess.Write)) + using Stream stream = new FileStream(serialObj.FullName, FileMode.Create, FileAccess.Write); + using var writer = new BinaryWriter(stream); + // Write wordIndexTable + int wiLen = wordIndexTable.Length; + writer.Write(wiLen); + for (int i = 0; i < wiLen; i++) { - using (var writer = new BinaryWriter(stream)) - { - // Write wordIndexTable - int wiLen = wordIndexTable.Length; - writer.Write(wiLen); - for (int i = 0; i < wiLen; i++) - { - writer.Write(wordIndexTable[i]); - } + writer.Write(wordIndexTable[i]); + } - // Write charIndexTable - int ciLen = charIndexTable.Length; - writer.Write(ciLen); - for (int i = 0; i < ciLen; i++) - { - writer.Write(charIndexTable[i]); - } + // Write charIndexTable + int ciLen = charIndexTable.Length; + writer.Write(ciLen); + for (int i = 0; i < ciLen; i++) + { + writer.Write(charIndexTable[i]); + } - // Write wordItem_charArrayTable - int caDim1 = wordItem_charArrayTable == null ? -1 : wordItem_charArrayTable.Length; - writer.Write(caDim1); - for (int i = 0; i < caDim1; i++) + // Write wordItem_charArrayTable + int caDim1 = wordItem_charArrayTable == null ? -1 : wordItem_charArrayTable.Length; + writer.Write(caDim1); + for (int i = 0; i < caDim1; i++) + { + int caDim2 = wordItem_charArrayTable[i] == null ? -1 : wordItem_charArrayTable[i].Length; + writer.Write(caDim2); + for (int j = 0; j < caDim2; j++) + { + int caDim3 = wordItem_charArrayTable[i][j] == null ? -1 : wordItem_charArrayTable[i][j].Length; + writer.Write(caDim3); + for (int k = 0; k < caDim3; k++) { - int caDim2 = wordItem_charArrayTable[i] == null ? -1 : wordItem_charArrayTable[i].Length; - writer.Write(caDim2); - for (int j = 0; j < caDim2; j++) - { - int caDim3 = wordItem_charArrayTable[i][j] == null ? -1 : wordItem_charArrayTable[i][j].Length; - writer.Write(caDim3); - for (int k = 0; k < caDim3; k++) - { - writer.Write(wordItem_charArrayTable[i][j][k]); - } - } + writer.Write(wordItem_charArrayTable[i][j][k]); } + } + } - // Write wordItem_frequencyTable - int fDim1 = wordItem_frequencyTable == null ? -1 : wordItem_frequencyTable.Length; - writer.Write(fDim1); - for (int i = 0; i < fDim1; i++) - { - int fDim2 = wordItem_frequencyTable[i] == null ? -1 : wordItem_frequencyTable[i].Length; - writer.Write(fDim2); - for (int j = 0; j < fDim2; j++) - { - writer.Write(wordItem_frequencyTable[i][j]); - } - } + // Write wordItem_frequencyTable + int fDim1 = wordItem_frequencyTable == null ? -1 : wordItem_frequencyTable.Length; + writer.Write(fDim1); + for (int i = 0; i < fDim1; i++) + { + int fDim2 = wordItem_frequencyTable[i] == null ? -1 : wordItem_frequencyTable[i].Length; + writer.Write(fDim2); + for (int j = 0; j < fDim2; j++) + { + writer.Write(wordItem_frequencyTable[i][j]); } } // log.info("serialize core dict."); } -#pragma warning disable 168 +#pragma warning disable 168, IDE0059 catch (Exception e) -#pragma warning restore 168 +#pragma warning restore 168, IDE0059 { // log.warn(e.getMessage()); } @@ -386,7 +345,6 @@ private int LoadMainDataFromFile(string dctFilePath) buffer = new int[3]; byte[] intBuffer = new byte[4]; string tmpword; - //using (RandomAccessFile dctFile = new RandomAccessFile(dctFilePath, "r")) using (var dctFile = new FileStream(dctFilePath, FileMode.Open, FileAccess.Read)) { @@ -430,11 +388,7 @@ private int LoadMainDataFromFile(string dctFilePath) { byte[] lchBuffer = new byte[length]; dctFile.Read(lchBuffer, 0, lchBuffer.Length); - //tmpword = new String(lchBuffer, "GB2312"); tmpword = Encoding.GetEncoding("GB2312").GetString(lchBuffer); - //tmpword = Encoding.GetEncoding("hz-gb-2312").GetString(lchBuffer); - // indexTable[i].wordItems[j].word = tmpword; - // wordItemTable[i][j].charArray = tmpword.toCharArray(); wordItem_charArrayTable[i][j] = tmpword.ToCharArray(); } else diff --git a/src/Lucene.Net.Analysis.SmartCn/SentenceTokenizer.cs b/src/Lucene.Net.Analysis.SmartCn/SentenceTokenizer.cs index 08b5a317f9..791cd1dbba 100644 --- a/src/Lucene.Net.Analysis.SmartCn/SentenceTokenizer.cs +++ b/src/Lucene.Net.Analysis.SmartCn/SentenceTokenizer.cs @@ -36,7 +36,7 @@ public sealed class SentenceTokenizer : Tokenizer /// /// End of sentence punctuation: 。,!?;,!?; /// - private readonly static string PUNCTION = "。,!?;,!?;"; + private const string PUNCTION = "。,!?;,!?;"; private readonly StringBuilder buffer = new StringBuilder(); diff --git a/src/Lucene.Net.Analysis.SmartCn/SmartChineseAnalyzer.cs b/src/Lucene.Net.Analysis.SmartCn/SmartChineseAnalyzer.cs index a4c6cb1a0b..8e2e62c22b 100644 --- a/src/Lucene.Net.Analysis.SmartCn/SmartChineseAnalyzer.cs +++ b/src/Lucene.Net.Analysis.SmartCn/SmartChineseAnalyzer.cs @@ -137,7 +137,7 @@ public SmartChineseAnalyzer(LuceneVersion matchVersion, bool useDefaultStopWords /// of stopwords to use. public SmartChineseAnalyzer(LuceneVersion matchVersion, CharArraySet stopWords) { - this.stopWords = stopWords == null ? CharArraySet.EMPTY_SET : stopWords; + this.stopWords = stopWords ?? CharArraySet.EMPTY_SET; this.matchVersion = matchVersion; } diff --git a/src/Lucene.Net.Analysis.SmartCn/SmartChineseSentenceTokenizerFactory.cs b/src/Lucene.Net.Analysis.SmartCn/SmartChineseSentenceTokenizerFactory.cs index bd24d13225..52c5470aac 100644 --- a/src/Lucene.Net.Analysis.SmartCn/SmartChineseSentenceTokenizerFactory.cs +++ b/src/Lucene.Net.Analysis.SmartCn/SmartChineseSentenceTokenizerFactory.cs @@ -40,7 +40,7 @@ public SmartChineseSentenceTokenizerFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.SmartCn/SmartChineseWordTokenFilterFactory.cs b/src/Lucene.Net.Analysis.SmartCn/SmartChineseWordTokenFilterFactory.cs index b7a2385b81..771448f85b 100644 --- a/src/Lucene.Net.Analysis.SmartCn/SmartChineseWordTokenFilterFactory.cs +++ b/src/Lucene.Net.Analysis.SmartCn/SmartChineseWordTokenFilterFactory.cs @@ -43,7 +43,7 @@ public SmartChineseWordTokenFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.SmartCn/Utility.cs b/src/Lucene.Net.Analysis.SmartCn/Utility.cs index 0615bbfc32..2750386d3b 100644 --- a/src/Lucene.Net.Analysis.SmartCn/Utility.cs +++ b/src/Lucene.Net.Analysis.SmartCn/Utility.cs @@ -23,7 +23,7 @@ namespace Lucene.Net.Analysis.Cn.Smart /// /// @lucene.experimental /// - public class Utility + public static class Utility // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { public static readonly char[] STRING_CHAR_ARRAY = "未##串".ToCharArray(); diff --git a/src/Lucene.Net.Analysis.SmartCn/WordSegmenter.cs b/src/Lucene.Net.Analysis.SmartCn/WordSegmenter.cs index 6ad831dee1..66bd89c76b 100644 --- a/src/Lucene.Net.Analysis.SmartCn/WordSegmenter.cs +++ b/src/Lucene.Net.Analysis.SmartCn/WordSegmenter.cs @@ -29,9 +29,9 @@ namespace Lucene.Net.Analysis.Cn.Smart /// internal class WordSegmenter { - private HHMMSegmenter hhmmSegmenter = new HHMMSegmenter(); + private readonly HHMMSegmenter hhmmSegmenter = new HHMMSegmenter(); // LUCENENET: marked readonly - private SegTokenFilter tokenFilter = new SegTokenFilter(); + private readonly SegTokenFilter tokenFilter = new SegTokenFilter(); // LUCENENET: marked readonly /// /// Segment a sentence into words with diff --git a/src/Lucene.Net.Analysis.SmartCn/WordTokenFilter.cs b/src/Lucene.Net.Analysis.SmartCn/WordTokenFilter.cs index 5af9a4e808..84c97d8fe4 100644 --- a/src/Lucene.Net.Analysis.SmartCn/WordTokenFilter.cs +++ b/src/Lucene.Net.Analysis.SmartCn/WordTokenFilter.cs @@ -31,7 +31,7 @@ namespace Lucene.Net.Analysis.Cn.Smart [Obsolete("Use HMMChineseTokenizer instead.")] public sealed class WordTokenFilter : TokenFilter { - private WordSegmenter wordSegmenter; + private readonly WordSegmenter wordSegmenter; // LUCENENET: marked readonly private IEnumerator tokenIter; @@ -109,7 +109,32 @@ public override bool IncrementToken() public override void Reset() { base.Reset(); + tokenIter?.Dispose(); // LUCENENET specific tokenIter = null; } + + /// + /// Releases resources used by the and + /// if overridden in a derived class, optionally releases unmanaged resources. + /// + /// true to release both managed and unmanaged resources; + /// false to release only unmanaged resources. + + // LUCENENET specific + protected override void Dispose(bool disposing) + { + try + { + if (disposing) + { + tokenIter?.Dispose(); // LUCENENET specific - dispose tokenIter and set to null + tokenIter = null; + } + } + finally + { + base.Dispose(disposing); + } + } } } diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Compile.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Compile.cs index afec5573ec..06ed0841fa 100644 --- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Compile.cs +++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Compile.cs @@ -66,16 +66,11 @@ namespace Egothor.Stemmer /// /// The Compile class is used to compile a stemmer table. /// - public class Compile + public static class Compile // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { - static bool backward; - static bool multi; - static Trie trie; - - /// - /// no instantiation - /// - private Compile() { } + private static bool backward; + private static bool multi; + private static Trie trie; /// /// Entry point to the Compile application. @@ -219,12 +214,10 @@ public static void Main(string[] args) trie.PrintInfo(Console.Out, prefix + " "); } - using (DataOutputStream os = new DataOutputStream( - new FileStream(stemmerTable + ".out", FileMode.OpenOrCreate, FileAccess.Write))) - { - os.WriteUTF(args[0]); - trie.Store(os); - } + using DataOutputStream os = new DataOutputStream( + new FileStream(stemmerTable + ".out", FileMode.OpenOrCreate, FileAccess.Write)); + os.WriteUTF(args[0]); + trie.Store(os); } } diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Diff.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Diff.cs index 556a8e5364..713062977d 100644 --- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Diff.cs +++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Diff.cs @@ -71,15 +71,15 @@ namespace Egothor.Stemmer /// public class Diff { - int sizex = 0; - int sizey = 0; - int[][] net; - int[][] way; + private int sizex = 0; + private int sizey = 0; + private int[][] net; + private int[][] way; - int INSERT; - int DELETE; - int REPLACE; - int NOOP; + private readonly int INSERT; // LUCENENET: marked readonly + private readonly int DELETE; // LUCENENET: marked readonly + private readonly int REPLACE; // LUCENENET: marked readonly + private readonly int NOOP; // LUCENENET: marked readonly /// /// Constructor for the Diff object. @@ -324,7 +324,7 @@ public string Exec(string a, string b) if (deletes != @base) { result.Append("D" + (deletes)); - deletes = @base; + //deletes = @base; // LUCENENET: IDE0059: Remove unnecessary value assignment } return result.ToString(); diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/DiffIt.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/DiffIt.cs index 21c5a3b02a..874ad53a23 100644 --- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/DiffIt.cs +++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/DiffIt.cs @@ -68,17 +68,11 @@ namespace Egothor.Stemmer /// The DiffIt class is a means generate patch commands from an already prepared /// stemmer table. /// - public class DiffIt + public static class DiffIt // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { - /// - /// no instantiation - /// - private DiffIt() { } - internal static int Get(int i, string s) { - int result; - if (!int.TryParse(s.Substring(i, 1), NumberStyles.Integer, CultureInfo.InvariantCulture, out result)) + if (!int.TryParse(s.Substring(i, 1), NumberStyles.Integer, CultureInfo.InvariantCulture, out int result)) { return 1; } @@ -124,31 +118,29 @@ public static void Main(string[] args) // System.out.println("[" + args[i] + "]"); Diff diff = new Diff(ins, del, rep, nop); - using (TextReader input = new StreamReader(new FileStream(stemmerTable, FileMode.Open, FileAccess.Read), Encoding.GetEncoding(charset))) + using TextReader input = new StreamReader(new FileStream(stemmerTable, FileMode.Open, FileAccess.Read), Encoding.GetEncoding(charset)); + string line; + while ((line = input.ReadLine()) != null) { - string line; - while ((line = input.ReadLine()) != null) + try { - try + line = line.ToLowerInvariant(); + StringTokenizer st = new StringTokenizer(line); + st.MoveNext(); + string stem = st.Current; + Console.WriteLine(stem + " -a"); + while (st.MoveNext()) { - line = line.ToLowerInvariant(); - StringTokenizer st = new StringTokenizer(line); - st.MoveNext(); - string stem = st.Current; - Console.WriteLine(stem + " -a"); - while (st.MoveNext()) + string token = st.Current; + if (token.Equals(stem, StringComparison.Ordinal) == false) { - string token = st.Current; - if (token.Equals(stem, StringComparison.Ordinal) == false) - { - Console.WriteLine(stem + " " + diff.Exec(token, stem)); - } + Console.WriteLine(stem + " " + diff.Exec(token, stem)); } } - catch (InvalidOperationException /*x*/) - { - // no base token (stem) on a line - } + } + catch (InvalidOperationException /*x*/) + { + // no base token (stem) on a line } } } diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Gener.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Gener.cs index b8defb9957..05a32136c5 100644 --- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Gener.cs +++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Gener.cs @@ -77,7 +77,7 @@ public Gener() { } public override Trie Optimize(Trie orig) { IList cmds = orig.cmds; - IList rows = new List(); + IList rows; // LUCENENET: IDE0059: Remove unnecessary value assignment IList orows = orig.rows; int[] remap = new int[orows.Count]; @@ -103,6 +103,7 @@ public override Trie Optimize(Trie orig) /// the Row to test /// Description of the Parameter /// true if the Row should remain; otherwise, false + [System.Diagnostics.CodeAnalysis.SuppressMessage("Performance", "CA1822:Mark members as static", Justification = "This is a shipped public API")] public bool Eat(Row @in, int[] remap) { int sum = 0; diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Lift.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Lift.cs index d14a45c07c..d5d2282905 100644 --- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Lift.cs +++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Lift.cs @@ -67,7 +67,7 @@ namespace Egothor.Stemmer /// public class Lift : Reduce { - bool changeSkip; + private readonly bool changeSkip; // LUCENENET: marked readonly /// /// Constructor for the Lift object. @@ -90,7 +90,7 @@ public Lift(bool changeSkip) public override Trie Optimize(Trie orig) { IList cmds = orig.cmds; - IList rows = new List(); + IList rows; // LUCENENET: IDE0059: Remove unnecessary value assignment IList orows = orig.rows; int[] remap = new int[orows.Count]; diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie.cs index c69c6792be..80e152e7d9 100644 --- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie.cs +++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/MultiTrie.cs @@ -72,7 +72,7 @@ public class MultiTrie : Trie protected List m_tries = new List(); - int BY = 1; + private readonly int BY = 1; // LUCENENET: marked readonly /// /// Constructor for the object. diff --git a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Trie.cs b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Trie.cs index 0e45f78b88..f438c3cc1c 100644 --- a/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Trie.cs +++ b/src/Lucene.Net.Analysis.Stempel/Egothor.Stemmer/Trie.cs @@ -248,7 +248,7 @@ public virtual string GetFully(string key) int cmd = -1; StrEnum e = new StrEnum(key, forward); char ch; - char aux; + //char aux; // LUCENENET: IDE0059: Remove unnecessary value assignment for (int i = 0; i < key.Length;) { @@ -267,7 +267,7 @@ public virtual string GetFully(string key) { if (i < key.Length) { - aux = e.Next(); + /*aux =*/e.Next(); // LUCENENET: IDE0059: Remove unnecessary value assignment } else { @@ -429,9 +429,9 @@ public virtual void PrintInfo(TextWriter @out, string prefix) /// internal class StrEnum { - private string s; + private readonly string s; // LUCENENET: marked readonly private int from; - private int by; + private readonly int by; // LUCENENET: marked readonly /// /// Constructor for the object diff --git a/src/Lucene.Net.Analysis.Stempel/Stempel/StempelPolishStemFilterFactory.cs b/src/Lucene.Net.Analysis.Stempel/Stempel/StempelPolishStemFilterFactory.cs index fad0cf5839..3cfe6b25d1 100644 --- a/src/Lucene.Net.Analysis.Stempel/Stempel/StempelPolishStemFilterFactory.cs +++ b/src/Lucene.Net.Analysis.Stempel/Stempel/StempelPolishStemFilterFactory.cs @@ -35,7 +35,7 @@ public StempelPolishStemFilterFactory(IDictionary args) { if (args.Count > 0) { - throw new ArgumentException("Unknown parameters: " + args); + throw new ArgumentException(string.Format(J2N.Text.StringFormatter.CurrentCulture, "Unknown parameters: {0}", args)); } } diff --git a/src/Lucene.Net.Analysis.Stempel/Stempel/StempelStemmer.cs b/src/Lucene.Net.Analysis.Stempel/Stempel/StempelStemmer.cs index 546ffb2197..8a6f6aed4b 100644 --- a/src/Lucene.Net.Analysis.Stempel/Stempel/StempelStemmer.cs +++ b/src/Lucene.Net.Analysis.Stempel/Stempel/StempelStemmer.cs @@ -33,8 +33,8 @@ namespace Lucene.Net.Analysis.Stempel /// public class StempelStemmer { - private Trie stemmer = null; - private StringBuilder buffer = new StringBuilder(); + private readonly Trie stemmer = null; // LUCENENET: marked readonly + private readonly StringBuilder buffer = new StringBuilder(); // LUCENENET: marked readonly /// /// Create a Stemmer using selected stemmer table diff --git a/src/Lucene.Net.Benchmark/ByTask/Benchmark.cs b/src/Lucene.Net.Benchmark/ByTask/Benchmark.cs index 6d54e007a3..3fb7a8f276 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Benchmark.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Benchmark.cs @@ -44,8 +44,8 @@ namespace Lucene.Net.Benchmarks.ByTask /// public class Benchmark { - private PerfRunData runData; - private Algorithm algorithm; + private readonly PerfRunData runData; // LUCENENET: marked readonly + private readonly Algorithm algorithm; // LUCENENET: marked readonly private bool executed; public Benchmark(TextReader algReader) diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/DemoHTMLParser.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/DemoHTMLParser.cs index 4bf4e8fc02..07f2a19ead 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Feeds/DemoHTMLParser.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/DemoHTMLParser.cs @@ -229,8 +229,7 @@ public virtual DocData Parse(DocData docData, string name, DateTime? date, Input // properties IDictionary props = p.MetaTags; - string dateStr; - if (props.TryGetValue("date", out dateStr) && dateStr != null) + if (props.TryGetValue("date", out string dateStr) && dateStr != null) { DateTime? newDate = trecSrc.ParseDate(dateStr); if (newDate != null) diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/DirContentSource.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/DirContentSource.cs index e39ada646b..3868ba3d24 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Feeds/DirContentSource.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/DirContentSource.cs @@ -82,7 +82,7 @@ which reverses again */ /* this seems silly ... there must be a better way ... not that this is good, but can it matter? */ - private Comparer c = new Comparer(); + private readonly Comparer c = new Comparer(); // LUCENENET: marked readonly private FileInfo current; @@ -151,8 +151,7 @@ public virtual void Reset() private DateTime? ParseDate(string dateStr) { - DateTime temp; - if (DateTime.TryParseExact(dateStr, "dd-MMM-yyyy hh:mm:ss.fff", CultureInfo.InvariantCulture, DateTimeStyles.None, out temp)) + if (DateTime.TryParseExact(dateStr, "dd-MMM-yyyy hh:mm:ss.fff", CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime temp)) { return temp; } @@ -164,10 +163,17 @@ public virtual void Reset() return null; } + /// + /// Releases resources used by the and + /// if overridden in a derived class, optionally releases unmanaged resources. + /// + /// true to release both managed and unmanaged resources; + /// false to release only unmanaged resources. protected override void Dispose(bool disposing) { if (disposing) { + inputFiles?.Dispose(); // LUCENENET specific - dispose inputFiles inputFiles = null; } } diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/DocMaker.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/DocMaker.cs index 2dc49e215b..1ac16f3bab 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Feeds/DocMaker.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/DocMaker.cs @@ -123,8 +123,7 @@ internal Field GetField(string name, FieldType ft) return new Field(name, "", ft); } - Field f; - if (!fields.TryGetValue(name, out f) || f == null) + if (!fields.TryGetValue(name, out Field f) || f == null) { f = new Field(name, "", ft); fields[name] = f; @@ -249,13 +248,12 @@ private Document CreateDocument(DocData docData, int size, int cnt) if (dateString != null) { // LUCENENET: TryParseExact needs a non-nullable DateTime to work. - DateTime temp; if (DateTime.TryParseExact(dateString, new string[] { // Original format from Java "dd-MMM-yyyy HH:mm:ss", // Actual format from the test files... "yyyyMMddHHmmss" - }, CultureInfo.InvariantCulture, DateTimeStyles.None, out temp)) + }, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime temp)) { date = temp; } @@ -294,7 +292,7 @@ private Document CreateDocument(DocData docData, int size, int cnt) // Set TITLE_FIELD string title = docData.Title; Field titleField = ds.GetField(TITLE_FIELD, m_valType); - titleField.SetStringValue(title == null ? "" : title); + titleField.SetStringValue(title ?? ""); doc.Add(titleField); string body = docData.Body; @@ -385,7 +383,9 @@ protected virtual void Dispose(bool disposing) { if (disposing) { - m_source.Dispose(); + m_source?.Dispose(); + leftovr?.Dispose(); // LUCENENET specific + docState?.Dispose(); // LUCENENET specific } } diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/EnwikiContentSource.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/EnwikiContentSource.cs index a41899d205..07bb721a06 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Feeds/EnwikiContentSource.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/EnwikiContentSource.cs @@ -329,8 +329,7 @@ public EnwikiContentSource() /// private static int GetElementType(string elem) { - int? val; - ELEMENTS.TryGetValue(elem, out val); + ELEMENTS.TryGetValue(elem, out int? val); return val == null ? -1 : val.Value; } diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/EnwikiQueryMaker.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/EnwikiQueryMaker.cs index 69179db618..8967321418 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Feeds/EnwikiQueryMaker.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/EnwikiQueryMaker.cs @@ -37,7 +37,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds public class EnwikiQueryMaker : AbstractQueryMaker, IQueryMaker { // common and a few uncommon queries from wikipedia search logs - private static string[] STANDARD_QUERIES = { "Images catbox gif", + private static readonly string[] STANDARD_QUERIES = { "Images catbox gif", // LUCENENET: marked readonly "Imunisasi haram", "Favicon ico", "Michael jackson", "Unknown artist", "Lily Thai", "Neda", "The Last Song", "Metallica", "Nicola Tesla", "Max B", "Skil Corporation", "\"The 100 Greatest Artists of All Time\"", @@ -98,18 +98,16 @@ private static Query[] CreateQueries(IList qs, Analyzer a) { try { - object query = qs[i]; Query q = null; - if (query is string) + if (query is string queryString) { - q = qp.Parse((string)query); + q = qp.Parse(queryString); } - else if (query is Query) + else if (query is Query queryObj) { - q = (Query)query; - + q = queryObj; } else { diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/LineDocSource.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/LineDocSource.cs index 219beb6c87..e3d2f0f7f9 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Feeds/LineDocSource.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/LineDocSource.cs @@ -186,7 +186,7 @@ public abstract class LineParser /// Construct with the header /// /// header line found in the input file, or null if none. - public LineParser(string[] header) + protected LineParser(string[] header) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.m_header = header; } diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/LongToEnglishQueryMaker.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/LongToEnglishQueryMaker.cs index 78ac92427a..42f4a5ce7a 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Feeds/LongToEnglishQueryMaker.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/LongToEnglishQueryMaker.cs @@ -33,7 +33,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds /// public class Int64ToEnglishQueryMaker : IQueryMaker { - long counter = long.MinValue + 10; + private long counter = long.MinValue + 10; protected QueryParser m_parser; //// TODO: we could take param to specify locale... diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/ReutersContentSource.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/ReutersContentSource.cs index a98c0546a7..e7f38f73f6 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Feeds/ReutersContentSource.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/ReutersContentSource.cs @@ -39,7 +39,7 @@ public class ReutersContentSource : ContentSource // LUCENENET specific: DateFormatInfo not used private DirectoryInfo dataDir = null; - private List inputFiles = new List(); + private readonly List inputFiles = new List(); // LUCENENET: marked readonly private int nextFile = 0; private int iteration = 0; @@ -48,7 +48,7 @@ public override void SetConfig(Config config) base.SetConfig(config); DirectoryInfo workDir = new DirectoryInfo(config.Get("work.dir", "work")); string d = config.Get("docs.dir", "reuters-out"); - dataDir = new DirectoryInfo(d); + dataDir = new DirectoryInfo(Path.Combine(workDir.FullName, d)); inputFiles.Clear(); CollectFiles(dataDir, inputFiles); if (inputFiles.Count == 0) @@ -61,8 +61,7 @@ public override void SetConfig(Config config) private DateTime? ParseDate(string dateStr) { - DateTime temp; - if (DateTime.TryParseExact(dateStr, "dd-MMM-yyyy hh:mm:ss.fff", CultureInfo.InvariantCulture, DateTimeStyles.None, out temp)) + if (DateTime.TryParseExact(dateStr, "dd-MMM-yyyy hh:mm:ss.fff", CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime temp)) { return temp; } @@ -99,33 +98,31 @@ public override DocData GetNextDocData(DocData docData) name = f.GetCanonicalPath() + "_" + iteration; } - using (TextReader reader = new StreamReader(new FileStream(f.FullName, FileMode.Open, FileAccess.Read), Encoding.UTF8)) + using TextReader reader = new StreamReader(new FileStream(f.FullName, FileMode.Open, FileAccess.Read), Encoding.UTF8); + // First line is the date, 3rd is the title, rest is body + string dateStr = reader.ReadLine(); + reader.ReadLine();// skip an empty line + string title = reader.ReadLine(); + reader.ReadLine();// skip an empty line + StringBuilder bodyBuf = new StringBuilder(1024); + string line = null; + while ((line = reader.ReadLine()) != null) { - // First line is the date, 3rd is the title, rest is body - string dateStr = reader.ReadLine(); - reader.ReadLine();// skip an empty line - string title = reader.ReadLine(); - reader.ReadLine();// skip an empty line - StringBuilder bodyBuf = new StringBuilder(1024); - string line = null; - while ((line = reader.ReadLine()) != null) - { - bodyBuf.Append(line).Append(' '); - } - reader.Dispose(); + bodyBuf.Append(line).Append(' '); + } + reader.Dispose(); - AddBytes(f.Length); + AddBytes(f.Length); - DateTime? date = ParseDate(dateStr.Trim()); + DateTime? date = ParseDate(dateStr.Trim()); - docData.Clear(); - docData.Name = name; - docData.Body = bodyBuf.ToString(); - docData.Title = title; - docData.SetDate(date); - return docData; - } + docData.Clear(); + docData.Name = name; + docData.Body = bodyBuf.ToString(); + docData.Title = title; + docData.SetDate(date); + return docData; } public override void ResetInputs() diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/ReutersQueryMaker.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/ReutersQueryMaker.cs index 63ba165fb0..238979d0a0 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Feeds/ReutersQueryMaker.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/ReutersQueryMaker.cs @@ -35,7 +35,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds /// public class ReutersQueryMaker : AbstractQueryMaker, IQueryMaker { - private static string[] STANDARD_QUERIES = { + private static readonly string[] STANDARD_QUERIES = { // LUCENENET: marked readonly //Start with some short queries "Salomon", "Comex", "night trading", "Japan Sony", //Try some Phrase Queries @@ -79,18 +79,16 @@ private static Query[] CreateQueries(IList qs, Analyzer a) { try { - object query = qs[i]; Query q = null; - if (query is string) + if (query is string queryString) { - q = qp.Parse((string)query); + q = qp.Parse(queryString); } - else if (query is Query) + else if (query is Query queryObj) { - q = (Query)query; - + q = queryObj; } else { diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/SimpleQueryMaker.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/SimpleQueryMaker.cs index 130a3abf14..fa14e8bb7f 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Feeds/SimpleQueryMaker.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/SimpleQueryMaker.cs @@ -54,9 +54,11 @@ protected override Query[] PrepareQueries() qq.Add(q1); Query q2 = new TermQuery(new Term(DocMaker.BODY_FIELD, "simple")); qq.Add(q2); - BooleanQuery bq = new BooleanQuery(); - bq.Add(q1, Occur.MUST); - bq.Add(q2, Occur.MUST); + BooleanQuery bq = new BooleanQuery + { + { q1, Occur.MUST }, + { q2, Occur.MUST } + }; qq.Add(bq); qq.Add(qp.Parse("synthetic body")); qq.Add(qp.Parse("\"synthetic body\"")); diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/SingleDocSource.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/SingleDocSource.cs index 6b01faf8dc..6fed31496c 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Feeds/SingleDocSource.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/SingleDocSource.cs @@ -24,7 +24,7 @@ public class SingleDocSource : ContentSource { private int docID = 0; - internal static readonly string DOC_TEXT = + internal const string DOC_TEXT = "Well, this is just some plain text we use for creating the " + "test documents. It used to be a text from an online collection " + "devoted to first aid, but if there was there an (online) lawyers " + diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/SortableSingleDocSource.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/SortableSingleDocSource.cs index c83828cd80..1c91e81588 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Feeds/SortableSingleDocSource.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/SortableSingleDocSource.cs @@ -32,7 +32,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds /// public class SortableSingleDocSource : SingleDocSource { - private static string[] COUNTRIES = new string[] { + private static readonly string[] COUNTRIES = new string[] { // LUCENENET: marked readonly "European Union", "United States", "Japan", "Germany", "China (PRC)", "United Kingdom", "France", "Italy", "Spain", "Canada", "Brazil", "Russia", "India", "South Korea", "Australia", "Mexico", "Netherlands", "Turkey", @@ -79,10 +79,11 @@ public class SortableSingleDocSource : SingleDocSource public override DocData GetNextDocData(DocData docData) { docData = base.GetNextDocData(docData); - var props = new Dictionary(); - - // random int - props["sort_field"] = r.Next(sortRange).ToString(CultureInfo.InvariantCulture); + var props = new Dictionary + { + // random int + ["sort_field"] = r.Next(sortRange).ToString(CultureInfo.InvariantCulture) + }; // random string int len = NextInt32(2, 20); diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/SpatialDocMaker.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/SpatialDocMaker.cs index f44512a867..8953a474b1 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Feeds/SpatialDocMaker.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/SpatialDocMaker.cs @@ -43,7 +43,7 @@ public class SpatialDocMaker : DocMaker public static readonly string SPATIAL_FIELD = "spatial"; //cache spatialStrategy by round number - private static IDictionary spatialStrategyCache = new Dictionary(); + private static readonly IDictionary spatialStrategyCache = new Dictionary(); // LUCENENET: marked readonly private SpatialStrategy strategy; private IShapeConverter shapeConverter; @@ -55,8 +55,7 @@ public class SpatialDocMaker : DocMaker /// public static SpatialStrategy GetSpatialStrategy(int roundNumber) { - SpatialStrategy result; - if (!spatialStrategyCache.TryGetValue(roundNumber, out result) || result == null) + if (!spatialStrategyCache.TryGetValue(roundNumber, out SpatialStrategy result) || result == null) { throw new InvalidOperationException("Strategy should have been init'ed by SpatialDocMaker by now"); } @@ -121,8 +120,7 @@ public RecursivePrefixTreeStrategyAnonymousHelper(SpatialPrefixTree grid, string public override void SetConfig(Config config, ContentSource source) { base.SetConfig(config, source); - SpatialStrategy existing; - if (!spatialStrategyCache.TryGetValue(config.RoundNumber, out existing) || existing == null) + if (!spatialStrategyCache.TryGetValue(config.RoundNumber, out SpatialStrategy existing) || existing == null) { //new round; we need to re-initialize strategy = MakeSpatialStrategy(config); @@ -164,9 +162,8 @@ public ShapeConverterAnonymousHelper(SpatialStrategy spatialStrategy, double rad public IShape Convert(IShape shape) { - if (shape is IPoint && (radiusDegrees != 0.0 || plusMinus != 0.0)) + if ((radiusDegrees != 0.0 || plusMinus != 0.0) && shape is IPoint point) { - IPoint point = (IPoint)shape; double radius = radiusDegrees; if (plusMinus > 0.0) { diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/SpatialFileQueryMaker.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/SpatialFileQueryMaker.cs index d583d222ee..9d12160ed0 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Feeds/SpatialFileQueryMaker.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/SpatialFileQueryMaker.cs @@ -117,9 +117,9 @@ protected virtual Query MakeQueryFromShape(IShape shape) // makeFilter() and wrap Filter filter = m_strategy.MakeFilter(args); - if (filter is QueryWrapperFilter) + if (filter is QueryWrapperFilter queryWrapperFilter) { - return ((QueryWrapperFilter)filter).Query; + return queryWrapperFilter.Query; } else { diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecContentSource.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecContentSource.cs index 2e360f546e..7149191051 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecContentSource.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecContentSource.cs @@ -89,7 +89,7 @@ public class TrecContentSource : ContentSource private readonly List inputFiles = new List(); private int nextFile = 0; // Use to synchronize threads on reading from the TREC documents. - private object @lock = new object(); + private readonly object @lock = new object(); // LUCENENET: marked readonly // Required for test internal TextReader reader; @@ -143,7 +143,7 @@ private void Read(StringBuilder buf, string lineStart, if (collectMatchLine) { buf.Append(sep).Append(line); - sep = NEW_LINE; + //sep = NEW_LINE; // LUCENENET: IDE0059: Remove unnecessary value assignment - this skips out of the loop } return; } @@ -158,7 +158,7 @@ private void Read(StringBuilder buf, string lineStart, internal virtual void OpenNextFile() { - Dispose(); + DoClose(); //currPathType = null; while (true) { @@ -216,7 +216,7 @@ internal virtual void OpenNextFile() return null; } - protected override void Dispose(bool disposing) + private void DoClose() // LUCENENET specific - separate disposing from closing so those tasks that "reopen" can continue { if (reader == null) { @@ -225,7 +225,7 @@ protected override void Dispose(bool disposing) try { - reader.Dispose(); + reader?.Dispose(); } catch (IOException e) { @@ -238,6 +238,21 @@ protected override void Dispose(bool disposing) reader = null; } + /// + /// Releases resources used by the and + /// if overridden in a derived class, optionally releases unmanaged resources. + /// + /// true to release both managed and unmanaged resources; + /// false to release only unmanaged resources. + protected override void Dispose(bool disposing) + { + if (disposing) + { + DoClose(); + trecDocBuffer?.Dispose(); // LUCENENET specific + } + } + public override DocData GetNextDocData(DocData docData) { string name = null; @@ -293,7 +308,7 @@ public override void ResetInputs() lock (@lock) { base.ResetInputs(); - Dispose(); + DoClose(); nextFile = 0; iteration = 0; } @@ -305,7 +320,7 @@ public override void SetConfig(Config config) // dirs DirectoryInfo workDir = new DirectoryInfo(config.Get("work.dir", "work")); string d = config.Get("docs.dir", "trec"); - dataDir = new DirectoryInfo(d); + dataDir = new DirectoryInfo(Path.Combine(workDir.FullName, d)); // files CollectFiles(dataDir, inputFiles); if (inputFiles.Count == 0) diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecDocParser.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecDocParser.cs index 7f42706259..03e991a04a 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecDocParser.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecDocParser.cs @@ -58,7 +58,7 @@ public enum ParsePathType { GOV2, FBIS, FT, FR94, LATIMES, UNKNOWN } /// max length of walk up from file to its ancestors when looking for a known path type. - private static readonly int MAX_PATH_LENGTH = 10; + private const int MAX_PATH_LENGTH = 10; /// /// Compute the path type of a file by inspecting name of file and its parents. @@ -66,8 +66,7 @@ public enum ParsePathType { GOV2, FBIS, FT, FR94, LATIMES, UNKNOWN } public static ParsePathType PathType(FileInfo f) { int pathLength = 0; - ParsePathType? ppt; - if (pathName2Type.TryGetValue(f.Name.ToUpperInvariant(), out ppt) && ppt != null) + if (pathName2Type.TryGetValue(f.Name.ToUpperInvariant(), out ParsePathType? ppt) && ppt != null) { return ppt.Value; } diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFBISParser.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFBISParser.cs index 530ccda458..3d7f3c6e1d 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFBISParser.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFBISParser.cs @@ -26,15 +26,15 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds /// public class TrecFBISParser : TrecDocParser { - private static readonly string HEADER = "
"; - private static readonly string HEADER_END = "
"; + private const string HEADER = "
"; + private const string HEADER_END = "
"; private static readonly int HEADER_END_LENGTH = HEADER_END.Length; - private static readonly string DATE1 = ""; - private static readonly string DATE1_END = ""; + private const string DATE1 = ""; + private const string DATE1_END = ""; - private static readonly string TI = ""; - private static readonly string TI_END = ""; + private const string TI = ""; + private const string TI_END = ""; public override DocData Parse(DocData docData, string name, TrecContentSource trecSrc, StringBuilder docBuf, ParsePathType pathType) diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFR94Parser.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFR94Parser.cs index d4a022de2e..b02dc4e76c 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFR94Parser.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFR94Parser.cs @@ -26,17 +26,17 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds /// public class TrecFR94Parser : TrecDocParser { - private static readonly string TEXT = ""; + private const string TEXT = ""; private static readonly int TEXT_LENGTH = TEXT.Length; - private static readonly string TEXT_END = ""; + private const string TEXT_END = ""; - private static readonly string DATE = ""; + private const string DATE = ""; private static readonly string[] DATE_NOISE_PREFIXES = { "DATE:", "date:", //TODO improve date extraction for this format "t.c.", }; - private static readonly string DATE_END = ""; + private const string DATE_END = ""; //TODO can we also extract title for this format? diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFTParser.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFTParser.cs index 189f6cb964..49fc7c131a 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFTParser.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecFTParser.cs @@ -25,11 +25,11 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds /// public class TrecFTParser : TrecDocParser { - private static readonly string DATE = ""; - private static readonly string DATE_END = ""; + private const string DATE = ""; + private const string DATE_END = ""; - private static readonly string HEADLINE = ""; - private static readonly string HEADLINE_END = ""; + private const string HEADLINE = ""; + private const string HEADLINE_END = ""; public override DocData Parse(DocData docData, string name, TrecContentSource trecSrc, StringBuilder docBuf, ParsePathType pathType) diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecGov2Parser.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecGov2Parser.cs index 595b566b4a..d8af6e844d 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecGov2Parser.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecGov2Parser.cs @@ -27,11 +27,11 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds /// public class TrecGov2Parser : TrecDocParser { - private static readonly string DATE = "Date: "; + private const string DATE = "Date: "; private static readonly string DATE_END = TrecContentSource.NEW_LINE; - private static readonly string DOCHDR = ""; - private static readonly string TERMINATING_DOCHDR = ""; + private const string DOCHDR = ""; + private const string TERMINATING_DOCHDR = ""; public override DocData Parse(DocData docData, string name, TrecContentSource trecSrc, StringBuilder docBuf, ParsePathType pathType) diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecLATimesParser.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecLATimesParser.cs index c5611e2701..6cd00541c5 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecLATimesParser.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/TrecLATimesParser.cs @@ -25,14 +25,14 @@ namespace Lucene.Net.Benchmarks.ByTask.Feeds /// public class TrecLATimesParser : TrecDocParser { - private static readonly string DATE = ""; - private static readonly string DATE_END = ""; - private static readonly string DATE_NOISE = "day,"; // anything aftre the ',' + private const string DATE = ""; + private const string DATE_END = ""; + private const string DATE_NOISE = "day,"; // anything aftre the ',' - private static readonly string SUBJECT = ""; - private static readonly string SUBJECT_END = ""; - private static readonly string HEADLINE = ""; - private static readonly string HEADLINE_END = ""; + private const string SUBJECT = ""; + private const string SUBJECT_END = ""; + private const string HEADLINE = ""; + private const string HEADLINE_END = ""; public override DocData Parse(DocData docData, string name, TrecContentSource trecSrc, StringBuilder docBuf, ParsePathType pathType) diff --git a/src/Lucene.Net.Benchmark/ByTask/PerfRunData.cs b/src/Lucene.Net.Benchmark/ByTask/PerfRunData.cs index 007f74f1ec..2101448a5e 100644 --- a/src/Lucene.Net.Benchmark/ByTask/PerfRunData.cs +++ b/src/Lucene.Net.Benchmark/ByTask/PerfRunData.cs @@ -62,31 +62,35 @@ namespace Lucene.Net.Benchmarks.ByTask /// public class PerfRunData : IDisposable { - private Points points; + private readonly Points points; // LUCENENET: marked readonly // objects used during performance test run // directory, analyzer, docMaker - created at startup. - // reader, writer, searcher - maintained by basic tasks. + // reader, writer, searcher - maintained by basic tasks. +#pragma warning disable CA2213 // Disposable fields should be disposed private Store.Directory directory; - private IDictionary analyzerFactories = new Dictionary(); +#pragma warning restore CA2213 // Disposable fields should be disposed + private readonly IDictionary analyzerFactories = new Dictionary(); // LUCENENET: marked readonly private Analyzer analyzer; - private DocMaker docMaker; - private ContentSource contentSource; - private FacetSource facetSource; + private readonly DocMaker docMaker; // LUCENENET: marked readonly + private readonly ContentSource contentSource; // LUCENENET: marked readonly + private readonly FacetSource facetSource; // LUCENENET: marked readonly private CultureInfo locale; +#pragma warning disable CA2213 // Disposable fields should be disposed private Store.Directory taxonomyDir; +#pragma warning restore CA2213 // Disposable fields should be disposed private ITaxonomyWriter taxonomyWriter; private TaxonomyReader taxonomyReader; // we use separate (identical) instances for each "read" task type, so each can iterate the quries separately. - private IDictionary readTaskQueryMaker; - private Type qmkrClass; + private readonly IDictionary readTaskQueryMaker; + private readonly Type qmkrClass; private DirectoryReader indexReader; private IndexSearcher indexSearcher; private IndexWriter indexWriter; - private Config config; + private readonly Config config; private long startTimeMillis; private readonly IDictionary perfObjects = new Dictionary(); @@ -146,9 +150,9 @@ protected virtual void Dispose(bool disposing) List perfObjectsToClose = new List(); foreach (object obj in perfObjects.Values) { - if (obj is IDisposable) + if (obj is IDisposable disposable) { - perfObjectsToClose.Add((IDisposable)obj); + perfObjectsToClose.Add(disposable); } } IOUtils.Dispose(perfObjectsToClose); @@ -207,8 +211,7 @@ public virtual object GetPerfObject(string key) { lock (this) { - object result; - perfObjects.TryGetValue(key, out result); + perfObjects.TryGetValue(key, out object result); return result; } } @@ -443,8 +446,7 @@ public virtual IQueryMaker GetQueryMaker(ReadTask readTask) // mapping the query maker by task class allows extending/adding new search/read tasks // without needing to modify this class. Type readTaskClass = readTask.GetType(); - IQueryMaker qm; - if (!readTaskQueryMaker.TryGetValue(readTaskClass, out qm) || qm == null) + if (!readTaskQueryMaker.TryGetValue(readTaskClass, out IQueryMaker qm) || qm == null) { try { diff --git a/src/Lucene.Net.Benchmark/ByTask/Programmatic/Sample.cs b/src/Lucene.Net.Benchmark/ByTask/Programmatic/Sample.cs index 11f2a30074..7f0078fc40 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Programmatic/Sample.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Programmatic/Sample.cs @@ -25,8 +25,9 @@ namespace Lucene.Net.Benchmarks.ByTask.Programmatic /// /// Sample performance test written programmatically - no algorithm file is needed here. /// - public class Sample + public static class Sample // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { + [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0060:Remove unused parameter", Justification = "Sample shows optional args[] parameter")] public static void Main(string[] args) { var p = InitProps(); @@ -69,22 +70,23 @@ public static void Main(string[] args) // Sample programmatic settings. Could also read from file. private static IDictionary InitProps() { - var p = new Dictionary(); - p["task.max.depth.log"] = "3"; - p["max.buffered"] = "buf:10:10:100:100:10:10:100:100"; - //p["doc.maker"] = "Lucene.Net.Benchmarks.ByTask.Feeds.ReutersContentSource, Lucene.Net.Benchmark"; - p["log.step"] = "2000"; - p["doc.delete.step"] = "8"; - p["analyzer"] = "Lucene.Net.Analysis.Standard.StandardAnalyzer, Lucene.Net.Analysis.Common"; - p["doc.term.vector"] = "false"; - p["directory"] = "FSDirectory"; - p["query.maker"] = "Lucene.Net.Benchmarks.ByTask.Feeds.ReutersQueryMaker, Lucene.Net.Benchmark"; - p["doc.stored"] = "true"; - p["docs.dir"] = "reuters-out"; - p["compound"] = "cmpnd:true:true:true:true:false:false:false:false"; - p["doc.tokenized"] = "true"; - p["merge.factor"] = "mrg:10:100:10:100:10:100:10:100"; - return p; + return new Dictionary + { + ["task.max.depth.log"] = "3", + ["max.buffered"] = "buf:10:10:100:100:10:10:100:100", + //p["doc.maker"] = "Lucene.Net.Benchmarks.ByTask.Feeds.ReutersContentSource, Lucene.Net.Benchmark"; + ["log.step"] = "2000", + ["doc.delete.step"] = "8", + ["analyzer"] = "Lucene.Net.Analysis.Standard.StandardAnalyzer, Lucene.Net.Analysis.Common", + ["doc.term.vector"] = "false", + ["directory"] = "FSDirectory", + ["query.maker"] = "Lucene.Net.Benchmarks.ByTask.Feeds.ReutersQueryMaker, Lucene.Net.Benchmark", + ["doc.stored"] = "true", + ["docs.dir"] = "reuters-out", + ["compound"] = "cmpnd:true:true:true:true:false:false:false:false", + ["doc.tokenized"] = "true", + ["merge.factor"] = "mrg:10:100:10:100:10:100:10:100" + }; } } } diff --git a/src/Lucene.Net.Benchmark/ByTask/Stats/Points.cs b/src/Lucene.Net.Benchmark/ByTask/Stats/Points.cs index f08408c1a5..b12985f3f6 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Stats/Points.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Stats/Points.cs @@ -29,7 +29,7 @@ public class Points // stat points ordered by their start time. // for now we collect points as TaskStats objects. // later might optimize to collect only native data. - private List points = new List(); + private readonly List points = new List(); // LUCENENET: marked readonly private int nextTaskRunNum = 0; @@ -38,7 +38,9 @@ public class Points /// /// Create a Points statistics object. /// +#pragma warning disable IDE0060 // Remove unused parameter public Points(Config config) +#pragma warning restore IDE0060 // Remove unused parameter { } diff --git a/src/Lucene.Net.Benchmark/ByTask/Stats/Report.cs b/src/Lucene.Net.Benchmark/ByTask/Stats/Report.cs index 49fc09619a..35760bb633 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Stats/Report.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Stats/Report.cs @@ -22,10 +22,10 @@ /// public class Report { - private string text; - private int size; - private int outOf; - private int reported; + private readonly string text; // LUCENENET: marked readonly + private readonly int size; // LUCENENET: marked readonly + private readonly int outOf; // LUCENENET: marked readonly + private readonly int reported; // LUCENENET: marked readonly public Report(string text, int size, int reported, int outOf) { diff --git a/src/Lucene.Net.Benchmark/ByTask/Stats/TaskStats.cs b/src/Lucene.Net.Benchmark/ByTask/Stats/TaskStats.cs index 7c6d1a394d..0010f97ff2 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Stats/TaskStats.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Stats/TaskStats.cs @@ -35,13 +35,13 @@ public class TaskStats #endif { /// Task for which data was collected. - private PerfTask task; + private readonly PerfTask task; // LUCENENET: marked readonly /// Round in which task run started. private int round; - /// Task start time. - private long start; + ///// Task start time. + //private long start; // LUCENENET: Never read /// Task elapsed time. elapsed >= 0 indicates run completion! private long elapsed = -1; @@ -53,7 +53,7 @@ public class TaskStats private long maxUsedMem; /// Serial run number of this task run in the perf run. - private int taskRunNum; + private readonly int taskRunNum; // LUCENENET: marked readonly /// Number of other tasks that started to run while this task was still running. private int numParallelTasks; @@ -83,7 +83,7 @@ internal TaskStats(PerfTask task, int taskRunNum, int round) this.round = round; maxTotMem = GC.GetTotalMemory(false); //Runtime.getRuntime().totalMemory(); maxUsedMem = maxTotMem; // - Runtime.getRuntime().freeMemory(); // LUCENENET TODO: available RAM - start = Stopwatch.GetTimestamp(); + //start = Stopwatch.GetTimestamp(); // LUCENENET: Never read } /// diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/AddIndexesTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/AddIndexesTask.cs index f4e2556c15..723610061d 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Tasks/AddIndexesTask.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/AddIndexesTask.cs @@ -94,8 +94,33 @@ public override void SetParams(string @params) public override void TearDown() { - inputDir.Dispose(); + inputDir?.Dispose(); + inputDir = null; // LUCENENET specific base.TearDown(); } + + /// + /// Releases resources used by the and + /// if overridden in a derived class, optionally releases unmanaged resources. + /// + /// true to release both managed and unmanaged resources; + /// false to release only unmanaged resources. + + // LUCENENET specific + protected override void Dispose(bool disposing) + { + try + { + if (disposing) + { + inputDir?.Dispose(); // LUCENENET specific - dispose tokens and set to null + inputDir = null; + } + } + finally + { + base.Dispose(disposing); + } + } } } diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/AnalyzerFactoryTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/AnalyzerFactoryTask.cs index cd730b61fb..50c0236bba 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Tasks/AnalyzerFactoryTask.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/AnalyzerFactoryTask.cs @@ -72,16 +72,16 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks /// public class AnalyzerFactoryTask : PerfTask { - private static readonly string LUCENE_ANALYSIS_PACKAGE_PREFIX = "Lucene.Net.Analysis."; + private const string LUCENE_ANALYSIS_PACKAGE_PREFIX = "Lucene.Net.Analysis."; private static readonly Regex ANALYSIS_COMPONENT_SUFFIX_PATTERN = new Regex("(?s:(?:(?:Token|Char)?Filter|Tokenizer)(?:Factory)?)$", RegexOptions.Compiled); private static readonly Regex TRAILING_DOT_ZERO_PATTERN = new Regex(@"\.0$", RegexOptions.Compiled); private enum ArgType { ANALYZER_ARG, ANALYZER_ARG_OR_CHARFILTER_OR_TOKENIZER, TOKENFILTER } - string factoryName = null; - int? positionIncrementGap = null; - int? offsetGap = null; + private string factoryName = null; + private int? positionIncrementGap = null; + private int? offsetGap = null; private readonly IList charFilterFactories = new List(); private TokenizerFactory tokenizerFactory = null; private readonly IList tokenFilterFactories = new List(); @@ -481,10 +481,10 @@ private void CreateAnalysisPipelineComponent(StreamTokenizer stok, Type clazz) { throw new Exception("Line #" + GetLineNumber(stok) + ": ", e); } - if (instance is IResourceLoaderAware) + if (instance is IResourceLoaderAware resourceLoaderAware) { DirectoryInfo baseDir = new DirectoryInfo(RunData.Config.Get("work.dir", "work")); - ((IResourceLoaderAware)instance).Inform(new FilesystemResourceLoader(baseDir)); + resourceLoaderAware.Inform(new FilesystemResourceLoader(baseDir)); } if (typeof(CharFilterFactory).IsAssignableFrom(clazz)) { diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/CloseIndexTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/CloseIndexTask.cs index 2047494f90..a85683d827 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Tasks/CloseIndexTask.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/CloseIndexTask.cs @@ -34,7 +34,7 @@ public CloseIndexTask(PerfRunData runData) { } - bool doWait = true; + private bool doWait = true; public override int DoLogic() { diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/CommitIndexTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/CommitIndexTask.cs index 63859cde6c..9c7d080a8d 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Tasks/CommitIndexTask.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/CommitIndexTask.cs @@ -25,7 +25,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks /// public class CommitIndexTask : PerfTask { - IDictionary commitUserData; + private IDictionary commitUserData; public CommitIndexTask(PerfRunData runData) : base(runData) @@ -37,8 +37,10 @@ public CommitIndexTask(PerfRunData runData) public override void SetParams(string @params) { base.SetParams(@params); - commitUserData = new Dictionary(); - commitUserData[OpenReaderTask.USER_DATA] = @params; + commitUserData = new Dictionary + { + [OpenReaderTask.USER_DATA] = @params + }; } public override int DoLogic() diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/ConsumeContentSourceTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/ConsumeContentSourceTask.cs index d35687b8aa..50c39c8d06 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Tasks/ConsumeContentSourceTask.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/ConsumeContentSourceTask.cs @@ -44,5 +44,28 @@ public override int DoLogic() dd.Value = source.GetNextDocData(dd.Value); return 1; } + + /// + /// Releases resources used by the and + /// if overridden in a derived class, optionally releases unmanaged resources. + /// + /// true to release both managed and unmanaged resources; + /// false to release only unmanaged resources. + + // LUCENENET specific + protected override void Dispose(bool disposing) + { + try + { + if (disposing) + { + dd.Dispose(); // LUCENENET specific - dispose dd + } + } + finally + { + base.Dispose(disposing); + } + } } } diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/CreateIndexTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/CreateIndexTask.cs index 470fbb3ab2..53a68a58ae 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Tasks/CreateIndexTask.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/CreateIndexTask.cs @@ -191,9 +191,8 @@ public static IndexWriterConfig CreateWriterConfig(Config config, PerfRunData ru throw new Exception("unable to instantiate class '" + mergePolicy + "' as merge policy", e); } iwConf.MergePolicy.NoCFSRatio = isCompound ? 1.0 : 0.0; - if (iwConf.MergePolicy is LogMergePolicy) + if (iwConf.MergePolicy is LogMergePolicy logMergePolicy) { - LogMergePolicy logMergePolicy = (LogMergePolicy)iwConf.MergePolicy; logMergePolicy.MergeFactor = config.Get("merge.factor", OpenIndexTask.DEFAULT_MERGE_PFACTOR); } } diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/ForceMergeTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/ForceMergeTask.cs index 8226e1148c..61c49d173a 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Tasks/ForceMergeTask.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/ForceMergeTask.cs @@ -33,7 +33,7 @@ public ForceMergeTask(PerfRunData runData) { } - int maxNumSegments = -1; + private int maxNumSegments = -1; public override int DoLogic() { diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/NewAnalyzerTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/NewAnalyzerTask.cs index 28e95b3a63..d2a5dc8751 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Tasks/NewAnalyzerTask.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/NewAnalyzerTask.cs @@ -73,8 +73,7 @@ public override int DoLogic() analyzerName = typeof(Lucene.Net.Analysis.Standard.StandardAnalyzer).AssemblyQualifiedName; } // First, lookup analyzerName as a named analyzer factory - AnalyzerFactory factory; - if (RunData.AnalyzerFactories.TryGetValue(analyzerName, out factory) && null != factory) + if (RunData.AnalyzerFactories.TryGetValue(analyzerName, out AnalyzerFactory factory) && null != factory) { analyzer = factory.Create(); } diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/NewLocaleTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/NewLocaleTask.cs index 34f4830e8c..1762b372a7 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Tasks/NewLocaleTask.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/NewLocaleTask.cs @@ -79,7 +79,7 @@ public override void SetParams(string @params) base.SetParams(@params); //language = country = variant = ""; culture = ""; - string ignore; + string _; StringTokenizer st = new StringTokenizer(@params, ","); if (st.MoveNext()) //language = st.nextToken(); @@ -87,7 +87,7 @@ public override void SetParams(string @params) if (st.MoveNext()) culture += "-" + st.Current; if (st.MoveNext()) - ignore = st.Current; + _ = st.Current; } public override bool SupportsParams => true; diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/OpenReaderTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/OpenReaderTask.cs index 28350cdd95..a7967e7684 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Tasks/OpenReaderTask.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/OpenReaderTask.cs @@ -43,7 +43,7 @@ public OpenReaderTask(PerfRunData runData) public override int DoLogic() { Store.Directory dir = RunData.Directory; - DirectoryReader r = null; + DirectoryReader r; // LUCENENET: IDE0059: Remove unnecessary value assignment if (commitUserData != null) { r = DirectoryReader.Open(OpenReaderTask.FindIndexCommit(dir, commitUserData)); diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/PerfTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/PerfTask.cs index feb4fceba6..16214808fd 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Tasks/PerfTask.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/PerfTask.cs @@ -63,16 +63,16 @@ public abstract class PerfTask : IDisposable , System.ICloneable #endif { - internal static readonly int DEFAULT_LOG_STEP = 1000; + internal const int DEFAULT_LOG_STEP = 1000; - private PerfRunData runData; + private readonly PerfRunData runData; // propeties that all tasks have private string name; private int depth = 0; protected int m_logStep; private int logStepCount = 0; - private int maxDepthLogStart = 0; + private readonly int maxDepthLogStart = 0; // LUCENENET: marked readonly private bool disableCounting = false; protected string m_params = null; @@ -121,7 +121,7 @@ public virtual void StopNow() stopNow = true; } - public PerfTask(PerfRunData runData) + protected PerfTask(PerfRunData runData) : this() { this.runData = runData; diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/PrintReaderTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/PrintReaderTask.cs index bd4f5f83c3..0e2f37f3a9 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Tasks/PrintReaderTask.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/PrintReaderTask.cs @@ -44,7 +44,7 @@ public override void SetParams(string @params) public override int DoLogic() { Directory dir = RunData.Directory; - IndexReader r = null; + IndexReader r; // LUCENENET: IDE0059: Remove unnecessary value assignment if (userData == null) r = DirectoryReader.Open(dir); else diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/ReadTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/ReadTask.cs index 43ea456337..02ea9afb53 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Tasks/ReadTask.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/ReadTask.cs @@ -48,7 +48,7 @@ public abstract class ReadTask : PerfTask { private readonly IQueryMaker queryMaker; - public ReadTask(PerfRunData runData) + protected ReadTask(PerfRunData runData) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : base(runData) { if (WithSearch) @@ -89,7 +89,7 @@ public override int DoLogic() // optionally warm and add num docs traversed to count if (WithWarm) { - Document doc = null; + Document doc; // LUCENENET: IDE0059: Remove unnecessary value assignment IBits liveDocs = MultiFields.GetLiveDocs(reader); for (int m = 0; m < reader.MaxDoc; m++) { @@ -260,7 +260,7 @@ protected virtual Document RetrieveDoc(IndexReader ir, int id) /// public virtual int TraversalSize => int.MaxValue; - internal static readonly int DEFAULT_SEARCH_NUM_HITS = 10; + internal const int DEFAULT_SEARCH_NUM_HITS = 10; private int numHits; public override void Setup() diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/ReadTokensTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/ReadTokensTask.cs index d2b60587b7..03665ddec1 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Tasks/ReadTokensTask.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/ReadTokensTask.cs @@ -76,19 +76,17 @@ field is SingleField || continue; } - using (TokenStream stream = field.GetTokenStream(analyzer)) - { - // reset the TokenStream to the first token - stream.Reset(); + using TokenStream stream = field.GetTokenStream(analyzer); + // reset the TokenStream to the first token + stream.Reset(); - ITermToBytesRefAttribute termAtt = stream.GetAttribute(); - while (stream.IncrementToken()) - { - termAtt.FillBytesRef(); - tokenCount++; - } - stream.End(); + ITermToBytesRefAttribute termAtt = stream.GetAttribute(); + while (stream.IncrementToken()) + { + termAtt.FillBytesRef(); + tokenCount++; } + stream.End(); } totalTokenCount += tokenCount; return tokenCount; @@ -103,9 +101,9 @@ field is SingleField || internal sealed class ReusableStringReader : TextReader { - int upto; - int left; - string s; + private int upto; + private int left; + private string s; internal void Init(string s) { this.s = s; @@ -156,5 +154,29 @@ private int Read(char[] c, int off, int len, bool returnZeroWhenComplete) protected override void Dispose(bool disposing) { } } + + /// + /// Releases resources used by the and + /// if overridden in a derived class, optionally releases unmanaged resources. + /// + /// true to release both managed and unmanaged resources; + /// false to release only unmanaged resources. + + // LUCENENET specific + protected override void Dispose(bool disposing) + { + try + { + if (disposing) + { + stringReader?.Dispose(); // LUCENENET specific - dispose stringReader and set to null + stringReader = null; + } + } + finally + { + base.Dispose(disposing); + } + } } } diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByNameRoundTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByNameRoundTask.cs index b551bf62ac..357c6d9ee8 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByNameRoundTask.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByNameRoundTask.cs @@ -64,8 +64,7 @@ protected virtual Report ReportSumByNameRound(IList taskStats) reported++; string name = stat1.Task.GetName(); string rname = stat1.Round + "." + name; // group by round - TaskStats stat2; - if (!p2.TryGetValue(rname, out stat2) || stat2 == null) + if (!p2.TryGetValue(rname, out TaskStats stat2) || stat2 == null) { stat2 = (TaskStats)stat1.Clone(); diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByNameTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByNameTask.cs index cb710b8f97..e340318209 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByNameTask.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByNameTask.cs @@ -63,8 +63,7 @@ protected virtual Report ReportSumByName(IList taskStats) { // consider only tasks that ended reported++; string name = stat1.Task.GetName(); - TaskStats stat2; - if (!p2.TryGetValue(name, out stat2) || stat2 == null) + if (!p2.TryGetValue(name, out TaskStats stat2) || stat2 == null) { stat2 = (TaskStats)stat1.Clone(); p2[name] = stat2; diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByPrefRoundTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByPrefRoundTask.cs index 7b0d2607c1..04383d77e7 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByPrefRoundTask.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByPrefRoundTask.cs @@ -60,8 +60,7 @@ protected virtual Report ReportSumByPrefixRound(IList taskStats) reported++; string name = stat1.Task.GetName(); string rname = stat1.Round + "." + name; // group by round - TaskStats stat2; - if (!p2.TryGetValue(rname, out stat2) || stat2 == null) + if (!p2.TryGetValue(rname, out TaskStats stat2) || stat2 == null) { stat2 = (TaskStats)stat1.Clone(); diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByPrefTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByPrefTask.cs index ca3a6ec827..83ea4e15a5 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByPrefTask.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/RepSumByPrefTask.cs @@ -61,8 +61,7 @@ protected virtual Report ReportSumByPrefix(IList taskStats) { // only ended tasks with proper name reported++; string name = stat1.Task.GetName(); - TaskStats stat2; - if (!p2.TryGetValue(name, out stat2) || stat2 == null) + if (!p2.TryGetValue(name, out TaskStats stat2) || stat2 == null) { stat2 = (TaskStats)stat1.Clone(); p2[name] = stat2; diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/ReportTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/ReportTask.cs index 642947f4e4..30b8004155 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Tasks/ReportTask.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/ReportTask.cs @@ -29,7 +29,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Tasks /// public abstract class ReportTask : PerfTask { - public ReportTask(PerfRunData runData) + protected ReportTask(PerfRunData runData) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : base(runData) { } diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchWithCollectorTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchWithCollectorTask.cs index cb02ede4a0..f7a7f4ad0f 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchWithCollectorTask.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchWithCollectorTask.cs @@ -47,7 +47,7 @@ public override void Setup() protected override ICollector CreateCollector() { - ICollector collector = null; + ICollector collector; // LUCENENET: IDE0059: Remove unnecessary value assignment if (m_clnName.Equals("topScoreDocOrdered", StringComparison.OrdinalIgnoreCase) == true) { collector = TopScoreDocCollector.Create(NumHits, true); diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/TaskSequence.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/TaskSequence.cs index afd9eac06e..cfa6ef063d 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Tasks/TaskSequence.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/TaskSequence.cs @@ -51,11 +51,11 @@ public class TaskSequence : PerfTask private double runTimeSec; // how long to run for private readonly long logByTimeMsec; - public TaskSequence(PerfRunData runData, String name, TaskSequence parent, bool parallel) + public TaskSequence(PerfRunData runData, string name, TaskSequence parent, bool parallel) : base(runData) { - collapsable = (name == null); - name = (name != null ? name : (parallel ? "Par" : "Seq")); + collapsable = name == null; + name = name ?? (parallel ? "Par" : "Seq"); SetName(name); SetSequenceName(); this.parent = parent; @@ -335,9 +335,8 @@ private void UpdateExhausted(PerfTask task) exhausted = false; resetExhausted = true; } - else if (task is TaskSequence) + else if (task is TaskSequence t) { - TaskSequence t = (TaskSequence)task; if (t.resetExhausted) { exhausted = false; @@ -407,7 +406,7 @@ public override void StopNow() } } - ParallelTask[] runningParallelTasks; + private ParallelTask[] runningParallelTasks; private int DoParallelTasks() { @@ -443,23 +442,19 @@ private int DoParallelTasks() { t[i].Join(); count += t[i].Count; - if (t[i].Task is TaskSequence) + if (t[i].Task is TaskSequence sub && sub.countsByTime != null) { - TaskSequence sub = (TaskSequence)t[i].Task; - if (sub.countsByTime != null) + if (countsByTime == null) { - if (countsByTime == null) - { - countsByTime = new int[sub.countsByTime.Length]; - } - else if (countsByTime.Length < sub.countsByTime.Length) - { - countsByTime = ArrayUtil.Grow(countsByTime, sub.countsByTime.Length); - } - for (int j = 0; j < sub.countsByTime.Length; j++) - { - countsByTime[j] += sub.countsByTime[j]; - } + countsByTime = new int[sub.countsByTime.Length]; + } + else if (countsByTime.Length < sub.countsByTime.Length) + { + countsByTime = ArrayUtil.Grow(countsByTime, sub.countsByTime.Length); + } + for (int j = 0; j < sub.countsByTime.Length; j++) + { + countsByTime[j] += sub.countsByTime[j]; } } } @@ -560,9 +555,9 @@ public virtual void SetNoChildReport() letChildReport = false; foreach (PerfTask task in tasks) { - if (task is TaskSequence) + if (task is TaskSequence taskSequence) { - ((TaskSequence)task).SetNoChildReport(); + taskSequence.SetNoChildReport(); } } } diff --git a/src/Lucene.Net.Benchmark/ByTask/Utils/Algorithm.cs b/src/Lucene.Net.Benchmark/ByTask/Utils/Algorithm.cs index ba77fe4cd2..fcf86ef348 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Utils/Algorithm.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Utils/Algorithm.cs @@ -35,7 +35,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Utils /// public class Algorithm { - private TaskSequence sequence; + private readonly TaskSequence sequence; // LUCENENET: marked readonly private readonly string[] taskPackages; /// @@ -80,12 +80,12 @@ public Algorithm(PerfRunData runData) case StreamTokenizer.TokenType_Word: string s = stok.StringValue; - PerfTask task = (PerfTask)Activator.CreateInstance(TaskClass(config, s), runData); + PerfTask task = (PerfTask)Activator.CreateInstance(TaskClass(/*config, // LUCENENET: Not referenced */ s), runData); task.AlgLineNum = stok.LineNumber; task.DisableCounting = isDisableCountNextTask; isDisableCountNextTask = false; currSequence.AddTask(task); - if (task is RepSumByPrefTask) + if (task is RepSumByPrefTask repSumByPrefTask) { stok.NextToken(); string prefix = stok.StringValue; @@ -93,7 +93,7 @@ public Algorithm(PerfRunData runData) { throw new Exception("named report prefix problem - " + stok.ToString()); } - ((RepSumByPrefTask)task).SetPrefix(prefix); + repSumByPrefTask.SetPrefix(prefix); } // check for task param: '(' someParam ')' stok.NextToken(); @@ -185,7 +185,7 @@ public Algorithm(PerfRunData runData) case ':': if (!colonOk) throw new Exception("colon unexpexted: - " + stok.ToString()); - colonOk = false; + //colonOk = false; // LUCENENET: IDE0059: Remove unnecessary value assignment - this is assigned again below without being read // get repetitions number stok.NextToken(); if ((char)stok.TokenType == '*') @@ -346,9 +346,9 @@ public Algorithm(PerfRunData runData) if (t != null && t.Count == 1) { PerfTask p = t[0]; - if (p is TaskSequence) + if (p is TaskSequence taskSequence) { - sequence = (TaskSequence)p; + sequence = taskSequence; continue; } } @@ -384,7 +384,7 @@ private string[] InitTasksPackages(Config config) return result.ToArray(); } - private Type TaskClass(Config config, string taskName) + private Type TaskClass(/*Config config, // LUCENENET: Not referenced */ string taskName) { foreach (string pkg in taskPackages) { @@ -398,7 +398,7 @@ private Type TaskClass(Config config, string taskName) throw new TypeLoadException(taskName + " not found in packages " + Collections.ToString(taskPackages)); } - private Type LoadType(string assemblyName, string typeName) + private static Type LoadType(string assemblyName, string typeName) // LUCENENET: CA1822: Mark members as static { return Assembly.Load(new AssemblyName(assemblyName)).GetTypes().FirstOrDefault(t => t.Name == typeName); } @@ -448,9 +448,9 @@ private void ExtractTasks(IList extrct, TaskSequence seq) return; foreach (PerfTask p in t) { - if (p is TaskSequence) + if (p is TaskSequence taskSequence) { - ExtractTasks(extrct, (TaskSequence)p); + ExtractTasks(extrct, taskSequence); } else { diff --git a/src/Lucene.Net.Benchmark/ByTask/Utils/Config.cs b/src/Lucene.Net.Benchmark/ByTask/Utils/Config.cs index 8abb754566..2deee0b810 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Utils/Config.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Utils/Config.cs @@ -87,11 +87,10 @@ public Config(TextReader algReader) this.props = new Dictionary(); writer.Flush(); ms.Position = 0; - props.LoadProperties(ms); + props.LoadProperties(ms); // make sure work dir is set properly - string temp; - if (!props.TryGetValue("work.dir", out temp) || temp == null) + if (!props.TryGetValue("work.dir", out string temp) || temp == null) { // LUCENENET specific - reformatted with : props["work.dir"] = SystemProperties.GetProperty("benchmark:work:dir", "work"); @@ -126,8 +125,7 @@ public Config(TextReader algReader) public Config(IDictionary props) { this.props = props; - string temp; - if (props.TryGetValue("print.props", out temp)) + if (props.TryGetValue("print.props", out string temp)) { if (temp.Equals("true", StringComparison.OrdinalIgnoreCase)) { @@ -161,15 +159,13 @@ private void PrintProps() public virtual string Get(string name, string dflt) { string[] vals; - object temp; - if (valByRound.TryGetValue(name, out temp) && temp != null) + if (valByRound.TryGetValue(name, out object temp) && temp != null) { vals = (string[])temp; return vals[roundNumber % vals.Length]; } // done if not by round - string sval; - if (!props.TryGetValue(name, out sval)) + if (!props.TryGetValue(name, out string sval)) { sval = dflt; } @@ -206,8 +202,7 @@ public virtual string Get(string name, string dflt) /// Either single or multiple property value (multiple values are separated by ":") public virtual void Set(string name, string value) { - object temp; - if (valByRound.TryGetValue(name, out temp) && temp != null) + if (valByRound.TryGetValue(name, out object temp) && temp != null) { throw new Exception("Cannot modify a multi value property!"); } @@ -228,15 +223,13 @@ public virtual int Get(string name, int dflt) { // use value by round if already parsed int[] vals; - object temp; - if (valByRound.TryGetValue(name, out temp) && temp != null) + if (valByRound.TryGetValue(name, out object temp) && temp != null) { vals = (int[])temp; return vals[roundNumber % vals.Length]; } // done if not by round - string sval; - if (!props.TryGetValue(name, out sval)) + if (!props.TryGetValue(name, out string sval)) { sval = dflt.ToString(CultureInfo.InvariantCulture); } @@ -268,15 +261,13 @@ public virtual double Get(string name, double dflt) { // use value by round if already parsed double[] vals; - object temp; - if (valByRound.TryGetValue(name, out temp) && temp != null) + if (valByRound.TryGetValue(name, out object temp) && temp != null) { vals = (double[])temp; return vals[roundNumber % vals.Length]; } // done if not by round - string sval; - if (!props.TryGetValue(name, out sval)) + if (!props.TryGetValue(name, out string sval)) { sval = dflt.ToString(CultureInfo.InvariantCulture); } @@ -307,15 +298,13 @@ public virtual bool Get(string name, bool dflt) { // use value by round if already parsed bool[] vals; - object temp; - if (valByRound.TryGetValue(name, out temp) && temp != null) + if (valByRound.TryGetValue(name, out object temp) && temp != null) { vals = (bool[])temp; return vals[roundNumber % vals.Length]; } // done if not by round - string sval; - if (!props.TryGetValue(name, out sval)) + if (!props.TryGetValue(name, out string sval)) { sval = dflt.ToString(); // LUCENENET NOTE: bool ignores the IFormatProvider argument, it returns the values of constants } @@ -350,26 +339,23 @@ public virtual int NewRound() foreach (string name in valByRound.Keys) { object a = valByRound[name]; - if (a is int[]) + if (a is int[] ai) { - int[] ai = (int[])a; int n1 = (roundNumber - 1) % ai.Length; int n2 = roundNumber % ai.Length; sb.Append(" ").Append(name).Append(":").Append(ai[n1]).Append("-->").Append(ai[n2]); } - else if (a is double[]) + else if (a is double[] ad) { - double[] ad = (double[])a; int n1 = (roundNumber - 1) % ad.Length; int n2 = roundNumber % ad.Length; sb.Append(" ").Append(name).Append(":").Append(ad[n1]).Append("-->").Append(ad[n2]); } - else if (a is string[]) + else if (a is string[] astr) { - string[] ad = (string[])a; - int n1 = (roundNumber - 1) % ad.Length; - int n2 = roundNumber % ad.Length; - sb.Append(" ").Append(name).Append(":").Append(ad[n1]).Append("-->").Append(ad[n2]); + int n1 = (roundNumber - 1) % astr.Length; + int n2 = roundNumber % astr.Length; + sb.Append(" ").Append(name).Append(":").Append(astr[n1]).Append("-->").Append(astr[n2]); } else { @@ -514,25 +500,21 @@ public virtual string GetColsValuesForValsByRound(int roundNum) else { // append actual values, for that round - object a; - valByRound.TryGetValue(name, out a); - if (a is int[]) + valByRound.TryGetValue(name, out object a); + if (a is int[] ai) { - int[] ai = (int[])a; int n = roundNum % ai.Length; sb.Append(Formatter.Format(ai[n], template)); } - else if (a is double[]) + else if (a is double[] ad) { - double[] ad = (double[])a; int n = roundNum % ad.Length; sb.Append(Formatter.Format(2, ad[n], template)); } - else if (a is string[]) + else if (a is string[] astr) { - string[] ad = (string[])a; - int n = roundNum % ad.Length; - sb.Append(ad[n]); + int n = roundNum % astr.Length; + sb.Append(astr[n]); } else { diff --git a/src/Lucene.Net.Benchmark/ByTask/Utils/FileUtils.cs b/src/Lucene.Net.Benchmark/ByTask/Utils/FileUtils.cs index 3d05db85fe..96bbf5c235 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Utils/FileUtils.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Utils/FileUtils.cs @@ -22,7 +22,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Utils /// /// File utilities. /// - public class FileUtils + public static class FileUtils // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { /// /// Delete files and directories, even if non-empty. diff --git a/src/Lucene.Net.Benchmark/ByTask/Utils/Format.cs b/src/Lucene.Net.Benchmark/ByTask/Utils/Format.cs index 85f1fdd9d9..e9903df490 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Utils/Format.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Utils/Format.cs @@ -22,15 +22,15 @@ namespace Lucene.Net.Benchmarks.ByTask.Utils /// /// Formatting utilities (for reports). /// - public class Formatter // LUCENENET specific - renamed from Format because of method name collision + public static class Formatter // LUCENENET specific - renamed from Format because of method name collision, CA1052 Static holder types should be Static or NotInheritable { - private static string[] numFormat = { + private static readonly string[] numFormat = { "N0", "N1", "N2" }; - private static readonly string padd = " "; + private const string padd = " "; /// /// Padd a number from left. diff --git a/src/Lucene.Net.Benchmark/ByTask/Utils/StreamUtils.cs b/src/Lucene.Net.Benchmark/ByTask/Utils/StreamUtils.cs index f461cee90e..9aa90a9ac7 100644 --- a/src/Lucene.Net.Benchmark/ByTask/Utils/StreamUtils.cs +++ b/src/Lucene.Net.Benchmark/ByTask/Utils/StreamUtils.cs @@ -25,7 +25,7 @@ namespace Lucene.Net.Benchmarks.ByTask.Utils /// /// Stream utilities. /// - public class StreamUtils + public static class StreamUtils // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { /// Buffer size used across the benchmark package public static readonly int BUFFER_SIZE = 1 << 16; // 64K @@ -65,7 +65,7 @@ private static FileType GetFileType(FileInfo file) { extensionToType.TryGetValue(fileName.Substring(idx).ToLowerInvariant(), out type); } - return !type.HasValue ? FileType.PLAIN : type.Value; + return type ?? FileType.PLAIN ; } /// diff --git a/src/Lucene.Net.Benchmark/Constants.cs b/src/Lucene.Net.Benchmark/Constants.cs index 72bdbdcadc..0c3e860aad 100644 --- a/src/Lucene.Net.Benchmark/Constants.cs +++ b/src/Lucene.Net.Benchmark/Constants.cs @@ -20,14 +20,14 @@ /// /// Various benchmarking constants (mostly defaults) /// - public class Constants + public static class Constants // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { - public static readonly int DEFAULT_RUN_COUNT = 5; - public static readonly int DEFAULT_SCALE_UP = 5; - public static readonly int DEFAULT_LOG_STEP = 1000; + public const int DEFAULT_RUN_COUNT = 5; + public const int DEFAULT_SCALE_UP = 5; + public const int DEFAULT_LOG_STEP = 1000; public static bool[] BOOLEANS = new bool[] { false, true }; - public static readonly int DEFAULT_MAXIMUM_DOCUMENTS = int.MaxValue; + public const int DEFAULT_MAXIMUM_DOCUMENTS = int.MaxValue; } } diff --git a/src/Lucene.Net.Benchmark/Quality/QualityQuery.cs b/src/Lucene.Net.Benchmark/Quality/QualityQuery.cs index 4e4af17ccb..f849b49c05 100644 --- a/src/Lucene.Net.Benchmark/Quality/QualityQuery.cs +++ b/src/Lucene.Net.Benchmark/Quality/QualityQuery.cs @@ -39,8 +39,8 @@ namespace Lucene.Net.Benchmarks.Quality /// public class QualityQuery : IComparable { - private string queryID; - private IDictionary nameValPairs; + private readonly string queryID; // LUCENENET: marked readonly + private readonly IDictionary nameValPairs; // LUCENENET: marked readonly /// /// Create a with given ID and name-value pairs. @@ -68,8 +68,7 @@ public virtual string[] GetNames() /// public virtual string GetValue(string name) { - string result; - nameValPairs.TryGetValue(name, out result); + nameValPairs.TryGetValue(name, out string result); return result; } diff --git a/src/Lucene.Net.Benchmark/Quality/QualityStats.cs b/src/Lucene.Net.Benchmark/Quality/QualityStats.cs index 2485eabfc3..11c248838c 100644 --- a/src/Lucene.Net.Benchmark/Quality/QualityStats.cs +++ b/src/Lucene.Net.Benchmark/Quality/QualityStats.cs @@ -35,7 +35,7 @@ public class QualityStats private double maxGoodPoints; private double recall; - private double[] pAt; + private readonly double[] pAt; // LUCENENET: marked readonly private double pReleventSum = 0; private double numPoints = 0; private double numGoodPoints = 0; @@ -48,8 +48,8 @@ public class QualityStats /// public class RecallPoint { - private int rank; - private double recall; + private readonly int rank; // LUCENENET: marked readonly + private readonly double recall; // LUCENENET: marked readonly internal RecallPoint(int rank, double recall) { this.rank = rank; @@ -63,7 +63,7 @@ internal RecallPoint(int rank, double recall) public virtual double Recall => recall; } - private IList recallPoints; + private readonly IList recallPoints; // LUCENENET: marked readonly /// /// Construct a QualityStats object with anticipated maximal number of relevant hits. @@ -162,7 +162,7 @@ public virtual void Log(string title, int paddLines, TextWriter logger, string p { logger.WriteLine(title); } - prefix = prefix == null ? "" : prefix; + prefix = prefix ?? ""; string nf = "{0:F3}"; int M = 19; logger.WriteLine(prefix + Format("Search Seconds: ", M) + @@ -192,10 +192,10 @@ public virtual void Log(string title, int paddLines, TextWriter logger, string p } } - private static string padd = " "; + private const string padd = " "; private string Format(string s, int minLen) { - s = (s == null ? "" : s); + s = (s ?? ""); int n = Math.Max(minLen, s.Length); return (s + padd).Substring(0, n-0); } diff --git a/src/Lucene.Net.Benchmark/Quality/Trec/QueryDriver.cs b/src/Lucene.Net.Benchmark/Quality/Trec/QueryDriver.cs index bc44f5a0b4..ca045a42e7 100644 --- a/src/Lucene.Net.Benchmark/Quality/Trec/QueryDriver.cs +++ b/src/Lucene.Net.Benchmark/Quality/Trec/QueryDriver.cs @@ -32,7 +32,7 @@ namespace Lucene.Net.Benchmarks.Quality.Trec /// /// Command-line tool for doing a TREC evaluation run. /// - public class QueryDriver + public static class QueryDriver // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { public static void Main(string[] args) { @@ -53,44 +53,42 @@ public static void Main(string[] args) FileInfo topicsFile = new FileInfo(args[0]); FileInfo qrelsFile = new FileInfo(args[1]); SubmissionReport submitLog = new SubmissionReport(new StreamWriter(new FileStream(args[2], FileMode.Create, FileAccess.Write), Encoding.UTF8 /* huh, no nio.Charset ctor? */), "lucene"); - using (Store.FSDirectory dir = Store.FSDirectory.Open(new DirectoryInfo(args[3]))) - using (IndexReader reader = DirectoryReader.Open(dir)) - { - string fieldSpec = args.Length == 5 ? args[4] : "T"; // default to Title-only if not specified. - IndexSearcher searcher = new IndexSearcher(reader); + using Store.FSDirectory dir = Store.FSDirectory.Open(new DirectoryInfo(args[3])); + using IndexReader reader = DirectoryReader.Open(dir); + string fieldSpec = args.Length == 5 ? args[4] : "T"; // default to Title-only if not specified. + IndexSearcher searcher = new IndexSearcher(reader); - int maxResults = 1000; - string docNameField = "docname"; + int maxResults = 1000; + string docNameField = "docname"; - TextWriter logger = Console.Out; //new StreamWriter(Console, Encoding.GetEncoding(0)); + TextWriter logger = Console.Out; //new StreamWriter(Console, Encoding.GetEncoding(0)); - // use trec utilities to read trec topics into quality queries - TrecTopicsReader qReader = new TrecTopicsReader(); - QualityQuery[] qqs = qReader.ReadQueries(IOUtils.GetDecodingReader(topicsFile, Encoding.UTF8)); + // use trec utilities to read trec topics into quality queries + TrecTopicsReader qReader = new TrecTopicsReader(); + QualityQuery[] qqs = qReader.ReadQueries(IOUtils.GetDecodingReader(topicsFile, Encoding.UTF8)); - // prepare judge, with trec utilities that read from a QRels file - IJudge judge = new TrecJudge(IOUtils.GetDecodingReader(qrelsFile, Encoding.UTF8)); + // prepare judge, with trec utilities that read from a QRels file + IJudge judge = new TrecJudge(IOUtils.GetDecodingReader(qrelsFile, Encoding.UTF8)); - // validate topics & judgments match each other - judge.ValidateData(qqs, logger); + // validate topics & judgments match each other + judge.ValidateData(qqs, logger); - ISet fieldSet = new JCG.HashSet(); - if (fieldSpec.IndexOf('T') >= 0) fieldSet.Add("title"); - if (fieldSpec.IndexOf('D') >= 0) fieldSet.Add("description"); - if (fieldSpec.IndexOf('N') >= 0) fieldSet.Add("narrative"); + ISet fieldSet = new JCG.HashSet(); + if (fieldSpec.IndexOf('T') >= 0) fieldSet.Add("title"); + if (fieldSpec.IndexOf('D') >= 0) fieldSet.Add("description"); + if (fieldSpec.IndexOf('N') >= 0) fieldSet.Add("narrative"); - // set the parsing of quality queries into Lucene queries. - IQualityQueryParser qqParser = new SimpleQQParser(fieldSet.ToArray(), "body"); + // set the parsing of quality queries into Lucene queries. + IQualityQueryParser qqParser = new SimpleQQParser(fieldSet.ToArray(), "body"); - // run the benchmark - QualityBenchmark qrun = new QualityBenchmark(qqs, qqParser, searcher, docNameField); - qrun.MaxResults = maxResults; - QualityStats[] stats = qrun.Execute(judge, submitLog, logger); + // run the benchmark + QualityBenchmark qrun = new QualityBenchmark(qqs, qqParser, searcher, docNameField); + qrun.MaxResults = maxResults; + QualityStats[] stats = qrun.Execute(judge, submitLog, logger); - // print an avarage sum of the results - QualityStats avg = QualityStats.Average(stats); - avg.Log("SUMMARY", 2, logger, " "); - } + // print an avarage sum of the results + QualityStats avg = QualityStats.Average(stats); + avg.Log("SUMMARY", 2, logger, " "); } } } diff --git a/src/Lucene.Net.Benchmark/Quality/Trec/Trec1MQReader.cs b/src/Lucene.Net.Benchmark/Quality/Trec/Trec1MQReader.cs index d9be721bad..770b44ab9e 100644 --- a/src/Lucene.Net.Benchmark/Quality/Trec/Trec1MQReader.cs +++ b/src/Lucene.Net.Benchmark/Quality/Trec/Trec1MQReader.cs @@ -35,7 +35,7 @@ namespace Lucene.Net.Benchmarks.Quality.Trec /// public class Trec1MQReader { - private string name; + private readonly string name; // LUCENENET: marked readonly /// /// Constructor for Trec's 1MQ TopicsReader @@ -72,8 +72,10 @@ public virtual QualityQuery[] ReadQueries(TextReader reader) // qtext string qtext = line.Substring(k + 1).Trim(); // we got a topic! - IDictionary fields = new Dictionary(); - fields[name] = qtext; + IDictionary fields = new Dictionary + { + [name] = qtext + }; //System.out.println("id: "+id+" qtext: "+qtext+" line: "+line); QualityQuery topic = new QualityQuery(id, fields); res.Add(topic); diff --git a/src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs b/src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs index bbf6dc2b0b..5d6a2c1a09 100644 --- a/src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs +++ b/src/Lucene.Net.Benchmark/Quality/Trec/TrecJudge.cs @@ -30,7 +30,7 @@ namespace Lucene.Net.Benchmarks.Quality.Trec /// public class TrecJudge : IJudge { - IDictionary judgements; + private readonly IDictionary judgements; // LUCENENET: marked readonly /// /// Constructor from a reader. @@ -97,8 +97,7 @@ public TrecJudge(TextReader reader) // inherit javadocs public virtual bool IsRelevant(string docName, QualityQuery query) { - QRelJudgement qrj;// = judgements.get(query.getQueryID()); - judgements.TryGetValue(query.QueryID, out qrj); + judgements.TryGetValue(query.QueryID, out QRelJudgement qrj); return qrj != null && qrj.IsRelevant(docName); } @@ -108,7 +107,7 @@ public virtual bool IsRelevant(string docName, QualityQuery query) private class QRelJudgement { internal string queryID; - private IDictionary relevantDocs; + private readonly IDictionary relevantDocs; // LUCENENET: marked readonly internal QRelJudgement(string queryID) { @@ -171,8 +170,7 @@ public virtual bool ValidateData(QualityQuery[] qq, TextWriter logger) // inherit javadocs public virtual int MaxRecall(QualityQuery query) { - QRelJudgement qrj; - if (judgements.TryGetValue(query.QueryID, out qrj) && qrj != null) + if (judgements.TryGetValue(query.QueryID, out QRelJudgement qrj) && qrj != null) { return qrj.MaxRecall; } diff --git a/src/Lucene.Net.Benchmark/Quality/Trec/TrecTopicsReader.cs b/src/Lucene.Net.Benchmark/Quality/Trec/TrecTopicsReader.cs index eabc3ab9ee..6470750bf9 100644 --- a/src/Lucene.Net.Benchmark/Quality/Trec/TrecTopicsReader.cs +++ b/src/Lucene.Net.Benchmark/Quality/Trec/TrecTopicsReader.cs @@ -123,7 +123,7 @@ public virtual QualityQuery[] ReadQueries(TextReader reader) // read until finding a line that starts with the specified prefix private StringBuilder Read(TextReader reader, string prefix, StringBuilder sb, bool collectMatchLine, bool collectAll) { - sb = (sb == null ? new StringBuilder() : sb); + sb = sb ?? new StringBuilder(); string sep = ""; while (true) { @@ -137,7 +137,7 @@ private StringBuilder Read(TextReader reader, string prefix, StringBuilder sb, b if (collectMatchLine) { sb.Append(sep + line); - sep = newline; + //sep = newline; // LUCENENET: IDE0059: Remove unnecessary value assignment - this skips out of the loop } break; } diff --git a/src/Lucene.Net.Benchmark/Quality/Utils/QualityQueriesFinder.cs b/src/Lucene.Net.Benchmark/Quality/Utils/QualityQueriesFinder.cs index 2b7b8e739b..e958b02ecb 100644 --- a/src/Lucene.Net.Benchmark/Quality/Utils/QualityQueriesFinder.cs +++ b/src/Lucene.Net.Benchmark/Quality/Utils/QualityQueriesFinder.cs @@ -30,7 +30,7 @@ namespace Lucene.Net.Benchmarks.Quality.Utils public class QualityQueriesFinder { private static readonly string newline = Environment.NewLine; - private Store.Directory dir; + private readonly Store.Directory dir; // LUCENENET: marked readonly /// /// Constructor over a directory containing the index. @@ -63,7 +63,9 @@ public static void Main(string[] args) } } +#pragma warning disable IDE0060 // Remove unused parameter private string[] BestQueries(string field, int numQueries) +#pragma warning restore IDE0060 // Remove unused parameter { string[] words = BestTerms("body", 4 * numQueries); int n = words.Length; @@ -82,11 +84,11 @@ private static string FormatQueryAsTrecTopic(int qnum, string title, string desc return "" + newline + " Number: " + qnum + newline + newline + - " " + (title == null ? "" : title) + newline + newline + + "<title> " + (title ?? "") + newline + newline + "<desc> Description:" + newline + - (description == null ? "" : description) + newline + newline + + (description ?? "") + newline + newline + "<narr> Narrative:" + newline + - (narrative == null ? "" : narrative) + newline + newline + + (narrative ?? "") + newline + newline + "</top>"; } diff --git a/src/Lucene.Net.Benchmark/Quality/Utils/SubmissionReport.cs b/src/Lucene.Net.Benchmark/Quality/Utils/SubmissionReport.cs index c31eddca27..6f5d48ed37 100644 --- a/src/Lucene.Net.Benchmark/Quality/Utils/SubmissionReport.cs +++ b/src/Lucene.Net.Benchmark/Quality/Utils/SubmissionReport.cs @@ -31,9 +31,9 @@ namespace Lucene.Net.Benchmarks.Quality.Utils public class SubmissionReport { //private NumberFormat nf; - private string nf; - private TextWriter logger; - private string name; + private readonly string nf; // LUCENENET: marked readonly + private readonly TextWriter logger; // LUCENENET: marked readonly + private readonly string name; // LUCENENET: marked readonly /// <summary> /// Constructor for <see cref="SubmissionReport"/>. @@ -87,10 +87,10 @@ public virtual void Flush() } } - private static string padd = " "; + private const string padd = " "; private string Format(string s, int minLen) { - s = (s == null ? "" : s); + s = (s ?? ""); int n = Math.Max(minLen, s.Length); return (s + padd).Substring(0, n - 0); } diff --git a/src/Lucene.Net.Benchmark/Support/Sax/Ext/Attributes2Impl.cs b/src/Lucene.Net.Benchmark/Support/Sax/Ext/Attributes2Impl.cs index 8c32a042a3..c0db09e244 100644 --- a/src/Lucene.Net.Benchmark/Support/Sax/Ext/Attributes2Impl.cs +++ b/src/Lucene.Net.Benchmark/Support/Sax/Ext/Attributes2Impl.cs @@ -178,14 +178,16 @@ public override void SetAttributes(IAttributes atts) declared = new bool[length]; specified = new bool[length]; - if (atts is Attributes2) { - Attributes2 a2 = (Attributes2)atts; + if (atts is Attributes2 a2) + { for (int i = 0; i < length; i++) { declared[i] = a2.IsDeclared(i); specified[i] = a2.IsSpecified(i); } - } else { + } + else + { for (int i = 0; i < length; i++) { declared[i] = !"CDATA".Equals(atts.GetType(i), StringComparison.Ordinal); diff --git a/src/Lucene.Net.Benchmark/Support/Sax/Ext/Locator2Impl.cs b/src/Lucene.Net.Benchmark/Support/Sax/Ext/Locator2Impl.cs index 32249d65f6..68053f47a5 100644 --- a/src/Lucene.Net.Benchmark/Support/Sax/Ext/Locator2Impl.cs +++ b/src/Lucene.Net.Benchmark/Support/Sax/Ext/Locator2Impl.cs @@ -43,9 +43,8 @@ public Locator2() { } public Locator2(ILocator locator) : base(locator) { - if (locator is Locator2) { - Locator2 l2 = (Locator2)locator; - + if (locator is Locator2 l2) + { version = l2.XMLVersion; encoding = l2.Encoding; } diff --git a/src/Lucene.Net.Benchmark/Support/Sax/Helpers/AttributesImpl.cs b/src/Lucene.Net.Benchmark/Support/Sax/Helpers/AttributesImpl.cs index 6a77008567..96062d7a9c 100644 --- a/src/Lucene.Net.Benchmark/Support/Sax/Helpers/AttributesImpl.cs +++ b/src/Lucene.Net.Benchmark/Support/Sax/Helpers/AttributesImpl.cs @@ -606,7 +606,7 @@ private void BadIndex(int index) // Internal state. //////////////////////////////////////////////////////////////////// - int length; - string[] data; + private int length; + private string[] data; } } diff --git a/src/Lucene.Net.Benchmark/Support/Sax/Helpers/NamespaceSupport.cs b/src/Lucene.Net.Benchmark/Support/Sax/Helpers/NamespaceSupport.cs index a401ba36e8..0ab40c797d 100644 --- a/src/Lucene.Net.Benchmark/Support/Sax/Helpers/NamespaceSupport.cs +++ b/src/Lucene.Net.Benchmark/Support/Sax/Helpers/NamespaceSupport.cs @@ -165,7 +165,7 @@ public void PushContext() { var newContexts = new Context[max * 2]; Array.Copy(contexts, 0, newContexts, 0, max); - max *= 2; + //max *= 2; // LUCENENET: IDE0059: Remove unnecessary value assignment contexts = newContexts; } diff --git a/src/Lucene.Net.Benchmark/Support/Sax/Helpers/XMLFilterImpl.cs b/src/Lucene.Net.Benchmark/Support/Sax/Helpers/XMLFilterImpl.cs index e360e57c25..e27b882c6a 100644 --- a/src/Lucene.Net.Benchmark/Support/Sax/Helpers/XMLFilterImpl.cs +++ b/src/Lucene.Net.Benchmark/Support/Sax/Helpers/XMLFilterImpl.cs @@ -349,7 +349,7 @@ public virtual void UnparsedEntityDecl(string name, string publicId, string syst /// <param name="locator">The document locator.</param> public virtual void SetDocumentLocator(ILocator locator) { - this.locator = locator; + //this.locator = locator; // LUCENENET: Never read if (contentHandler != null) { contentHandler.SetDocumentLocator(locator); @@ -579,7 +579,7 @@ private void SetupParse() //////////////////////////////////////////////////////////////////// private IXMLReader parent = null; - private ILocator locator = null; + //private ILocator locator = null; // LUCENENET: Never read private IEntityResolver entityResolver = null; private IDTDHandler dtdHandler = null; private IContentHandler contentHandler = null; diff --git a/src/Lucene.Net.Benchmark/Support/Sax/SAXException.cs b/src/Lucene.Net.Benchmark/Support/Sax/SAXException.cs index b307e89762..a49448752c 100644 --- a/src/Lucene.Net.Benchmark/Support/Sax/SAXException.cs +++ b/src/Lucene.Net.Benchmark/Support/Sax/SAXException.cs @@ -167,7 +167,7 @@ public override string ToString() /// The embedded exception if tunnelling, or null. /// </summary> [NonSerialized] - private Exception exception; + private readonly Exception exception; // LUCENENET: marked readonly } } diff --git a/src/Lucene.Net.Benchmark/Support/TagSoup/HTMLScanner.cs b/src/Lucene.Net.Benchmark/Support/TagSoup/HTMLScanner.cs index 3d91951602..62833aeb7b 100644 --- a/src/Lucene.Net.Benchmark/Support/TagSoup/HTMLScanner.cs +++ b/src/Lucene.Net.Benchmark/Support/TagSoup/HTMLScanner.cs @@ -96,7 +96,7 @@ public class HTMLScanner : IScanner, ILocator private const int A_STAGC = 30; private const int A_UNGET = 31; private const int A_UNSAVE_PCDATA = 32; - private static int[] statetable = { + private static readonly int[] statetable = { // LUCENENET: marked readonly S_ANAME, '/', A_ANAME_ADUP, S_EMPTYTAG, S_ANAME, '=', A_ANAME, S_AVAL, S_ANAME, '>', A_ANAME_ADUP_STAGC, S_PCDATA, @@ -247,8 +247,10 @@ public class HTMLScanner : IScanner, ILocator S_XNCR, -1, A_ENTITY, S_DONE, }; - private static readonly string[] debug_actionnames = { "", "A_ADUP", "A_ADUP_SAVE", "A_ADUP_STAGC", "A_ANAME", "A_ANAME_ADUP", "A_ANAME_ADUP_STAGC", "A_AVAL", "A_AVAL_STAGC", "A_CDATA", "A_CMNT", "A_DECL", "A_EMPTYTAG", "A_ENTITY", "A_ENTITY_START", "A_ETAG", "A_GI", "A_GI_STAGC", "A_LT", "A_LT_PCDATA", "A_MINUS", "A_MINUS2", "A_MINUS3", "A_PCDATA", "A_PI", "A_PITARGET", "A_PITARGET_PI", "A_SAVE", "A_SKIP", "A_SP", "A_STAGC", "A_UNGET", "A_UNSAVE_PCDATA" }; - private static readonly string[] debug_statenames = { "", "S_ANAME", "S_APOS", "S_AVAL", "S_BB", "S_BBC", "S_BBCD", "S_BBCDA", "S_BBCDAT", "S_BBCDATA", "S_CDATA", "S_CDATA2", "S_CDSECT", "S_CDSECT1", "S_CDSECT2", "S_COM", "S_COM2", "S_COM3", "S_COM4", "S_DECL", "S_DECL2", "S_DONE", "S_EMPTYTAG", "S_ENT", "S_EQ", "S_ETAG", "S_GI", "S_NCR", "S_PCDATA", "S_PI", "S_PITARGET", "S_QUOT", "S_STAGC", "S_TAG", "S_TAGWS", "S_XNCR" }; + + // LUCENENET: Never read + //private static readonly string[] debug_actionnames = { "", "A_ADUP", "A_ADUP_SAVE", "A_ADUP_STAGC", "A_ANAME", "A_ANAME_ADUP", "A_ANAME_ADUP_STAGC", "A_AVAL", "A_AVAL_STAGC", "A_CDATA", "A_CMNT", "A_DECL", "A_EMPTYTAG", "A_ENTITY", "A_ENTITY_START", "A_ETAG", "A_GI", "A_GI_STAGC", "A_LT", "A_LT_PCDATA", "A_MINUS", "A_MINUS2", "A_MINUS3", "A_PCDATA", "A_PI", "A_PITARGET", "A_PITARGET_PI", "A_SAVE", "A_SKIP", "A_SP", "A_STAGC", "A_UNGET", "A_UNSAVE_PCDATA" }; + //private static readonly string[] debug_statenames = { "", "S_ANAME", "S_APOS", "S_AVAL", "S_BB", "S_BBC", "S_BBCD", "S_BBCDA", "S_BBCDAT", "S_BBCDATA", "S_CDATA", "S_CDATA2", "S_CDSECT", "S_CDSECT1", "S_CDSECT2", "S_COM", "S_COM2", "S_COM3", "S_COM4", "S_DECL", "S_DECL2", "S_DONE", "S_EMPTYTAG", "S_ENT", "S_EQ", "S_ETAG", "S_GI", "S_NCR", "S_PCDATA", "S_PI", "S_PITARGET", "S_QUOT", "S_STAGC", "S_TAG", "S_TAGWS", "S_XNCR" }; // End of state table @@ -259,11 +261,11 @@ public class HTMLScanner : IScanner, ILocator private int theCurrentLine; private int theCurrentColumn; - int theState; // Current state - int theNextState; // Next state - char[] theOutputBuffer = new char[200]; // Output buffer - int theSize; // Current buffer size - int[] theWinMap = { // Windows chars map + private int theState; // Current state + private int theNextState; // Next state + private char[] theOutputBuffer = new char[200]; // Output buffer + private int theSize; // Current buffer size + private readonly int[] theWinMap = { // Windows chars map // LUCENENET: marked readonly 0x20AC, 0xFFFD, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x02C6, 0x2030, 0x0160, 0x2039, 0x0152, 0xFFFD, 0x017D, 0xFFFD, 0xFFFD, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, @@ -294,14 +296,14 @@ public class HTMLScanner : IScanner, ILocator /// next state = statetable[value + 3]. That is, the value points /// to the start of the answer 4-tuple in the statetable. /// </summary> - static short[][] statetableIndex; + private static short[][] statetableIndex; /// <summary> /// The highest character value seen in the statetable. /// See the doc comment for statetableIndex to see how this /// is used. /// </summary> - static int statetableIndexMaxChar; + private static int statetableIndexMaxChar; public HTMLScanner() { int maxState = -1; @@ -346,7 +348,7 @@ public HTMLScanner() else if (statetable[i + 1] == ch) { hit = i; - action = statetable[i + 2]; + //action = statetable[i + 2]; // LUCENENET: IDE0059: Remove unnecessary value assignment break; } } @@ -537,12 +539,12 @@ public virtual void Scan(TextReader r, IScanHandler h) if (ent < 0x20) { // Control becomes space - ent = 0x20; + //ent = 0x20; // LUCENENET: IDE0059: Remove unnecessary value assignment } else if (ent >= 0xD800 && ent <= 0xDFFF) { // Surrogates get dropped - ent = 0; + //ent = 0; // LUCENENET: IDE0059: Remove unnecessary value assignment } else if (ent <= 0xFFFF) { @@ -729,11 +731,11 @@ PYX to the standard output. // } - private static string NiceChar(int value) - { - if (value == '\n') return "\\n"; - if (value < 32) return "0x" + value.ToString("X", CultureInfo.InvariantCulture); - return "'" + ((char)value) + "'"; - } + //private static string NiceChar(int value) // LUCENENET: IDE0051: Remove unused private member + //{ + // if (value == '\n') return "\\n"; + // if (value < 32) return "0x" + value.ToString("X", CultureInfo.InvariantCulture); + // return "'" + ((char)value) + "'"; + //} } } diff --git a/src/Lucene.Net.Benchmark/Support/TagSoup/PYXScanner.cs b/src/Lucene.Net.Benchmark/Support/TagSoup/PYXScanner.cs index 18f6d5ec81..801324aa07 100644 --- a/src/Lucene.Net.Benchmark/Support/TagSoup/PYXScanner.cs +++ b/src/Lucene.Net.Benchmark/Support/TagSoup/PYXScanner.cs @@ -63,7 +63,7 @@ public virtual void Scan(TextReader br, IScanHandler h) if (instag) { h.STagC(buff, 0, 0); - instag = false; + //instag = false; // LUCENENET: IDE0059: Remove unnecessary value assignment } h.GI(buff, 1, size - 1); instag = true; diff --git a/src/Lucene.Net.Benchmark/Support/TagSoup/PYXWriter.cs b/src/Lucene.Net.Benchmark/Support/TagSoup/PYXWriter.cs index 45eebfef1d..b3e349e6ef 100644 --- a/src/Lucene.Net.Benchmark/Support/TagSoup/PYXWriter.cs +++ b/src/Lucene.Net.Benchmark/Support/TagSoup/PYXWriter.cs @@ -29,7 +29,7 @@ namespace TagSoup public class PYXWriter : IScanHandler, IContentHandler, ILexicalHandler { private readonly TextWriter theWriter; // where we Write to - private static char[] dummy = new char[1]; + //private static char[] dummy = new char[1]; // LUCENENET: Never read private string attrName; // saved attribute name // ScanHandler implementation diff --git a/src/Lucene.Net.Benchmark/Support/TagSoup/Parser.cs b/src/Lucene.Net.Benchmark/Support/TagSoup/Parser.cs index 2edb76e652..3308e130ca 100644 --- a/src/Lucene.Net.Benchmark/Support/TagSoup/Parser.cs +++ b/src/Lucene.Net.Benchmark/Support/TagSoup/Parser.cs @@ -366,8 +366,7 @@ public virtual void SetProperty(string name, object value) } else { - var handler = value as ILexicalHandler; - if (handler != null) + if (value is ILexicalHandler handler) { theLexicalHandler = handler; } @@ -379,8 +378,7 @@ public virtual void SetProperty(string name, object value) } else if (name.Equals(SCANNER_PROPERTY, StringComparison.Ordinal)) { - var scanner = value as IScanner; - if (scanner != null) + if (value is IScanner scanner) { theScanner = scanner; } @@ -391,8 +389,7 @@ public virtual void SetProperty(string name, object value) } else if (name.Equals(SCHEMA_PROPERTY, StringComparison.Ordinal)) { - var schema = value as Schema; - if (schema != null) + if (value is Schema schema) { theSchema = schema; } @@ -403,8 +400,7 @@ public virtual void SetProperty(string name, object value) } else if (name.Equals(AUTO_DETECTOR_PROPERTY, StringComparison.Ordinal)) { - var detector = value as IAutoDetector; - if (detector != null) + if (value is IAutoDetector detector) { theAutoDetector = detector; } @@ -449,12 +445,11 @@ public virtual void Parse(InputSource input) TextReader r = GetReader(input); theContentHandler.StartDocument(); theScanner.ResetDocumentLocator(input.PublicId, input.SystemId); - var locator = theScanner as ILocator; - if (locator != null) + if (theScanner is ILocator locator) { theContentHandler.SetDocumentLocator(locator); } - if (!(theSchema.Uri.Equals("", StringComparison.Ordinal))) + if (theSchema.Uri.Length > 0) { theContentHandler.StartPrefixMapping(theSchema.Prefix, theSchema.Uri); } @@ -541,7 +536,9 @@ private TextReader GetReader(InputSource s) /// <param name="publicid"></param> /// <param name="systemid"></param> /// <returns></returns> - private Stream GetInputStream(string publicid, string systemid) +#pragma warning disable IDE0060 // Remove unused parameter + private static Stream GetInputStream(string publicid, string systemid) // LUCENENET: CA1822: Mark members as static +#pragma warning restore IDE0060 // Remove unused parameter { var basis = new Uri("file://" + Directory.GetCurrentDirectory() + Path.DirectorySeparatorChar); var url = new Uri(basis, systemid); @@ -720,7 +717,7 @@ public virtual void EOF(char[] buff, int offset, int length) { Pop(); } - if (!(theSchema.Uri.Equals("", StringComparison.Ordinal))) + if (theSchema.Uri.Length > 0) // LUCENENET: CA1820: Test for empty strings using string length { theContentHandler.EndPrefixMapping(theSchema.Prefix); } @@ -982,7 +979,7 @@ private static string PrefixOf(string name) private bool Foreign(string prefix, string ns) { // System.err.print("%% Testing " + prefix + " and " + namespace + " for foreignness -- "); - bool foreign = !(prefix.Equals("", StringComparison.Ordinal) || ns.Equals("", StringComparison.Ordinal) || ns.Equals(theSchema.Uri, StringComparison.Ordinal)); + bool foreign = !(prefix.Length == 0 || ns.Length == 0 || ns.Equals(theSchema.Uri, StringComparison.Ordinal)); // LUCENENET: CA1820: Test for empty strings using string length // System.err.println(foreign); return foreign; } @@ -1043,8 +1040,7 @@ public virtual void Decl(char[] buff, int offset, int length) theLexicalHandler.EndDTD(); theDoctypeName = name; theDoctypePublicId = publicid; - var locator = theScanner as ILocator; - if (locator != null) + if (theScanner is ILocator locator) { // Must resolve systemid theDoctypeSystemId = locator.SystemId; @@ -1098,7 +1094,7 @@ private static string[] Split(string val) } var l = new List<string>(); int s = 0; - int e = 0; + int e; // LUCENENET: IDE0059: Remove unnecessary value assignment bool sq = false; // single quote bool dq = false; // double quote var lastc = (char)0; @@ -1305,7 +1301,7 @@ public virtual void STagE(char[] buff, int offset, int length) ETagBasic(buff, offset, length); } - private char[] theCommentBuffer = new char[2000]; + //private char[] theCommentBuffer = new char[2000]; // LUCENENET: Never read public virtual void Cmnt(char[] buff, int offset, int length) { theLexicalHandler.Comment(buff, offset, length); diff --git a/src/Lucene.Net.Benchmark/Support/TagSoup/Schema.cs b/src/Lucene.Net.Benchmark/Support/TagSoup/Schema.cs index 1ed74555ab..a7ade2d45a 100644 --- a/src/Lucene.Net.Benchmark/Support/TagSoup/Schema.cs +++ b/src/Lucene.Net.Benchmark/Support/TagSoup/Schema.cs @@ -95,7 +95,9 @@ public virtual void Parent(string name, string parentName) } if (parent == null) { +#pragma warning disable IDE0016 // Use 'throw' expression throw new Exception("No parent " + parentName + " for child " + name); +#pragma warning restore IDE0016 // Use 'throw' expression } child.Parent = parent; } diff --git a/src/Lucene.Net.Benchmark/Support/TagSoup/XMLWriter.cs b/src/Lucene.Net.Benchmark/Support/TagSoup/XMLWriter.cs index f1f20fd0fc..d5b4f8f678 100644 --- a/src/Lucene.Net.Benchmark/Support/TagSoup/XMLWriter.cs +++ b/src/Lucene.Net.Benchmark/Support/TagSoup/XMLWriter.cs @@ -667,7 +667,7 @@ public override void StartElement(string uri, string localName, string qName, IA public override void EndElement(string uri, string localName, string qName) { if ( - !(htmlMode && (uri.Equals("http://www.w3.org/1999/xhtml", StringComparison.Ordinal) || uri.Equals("", StringComparison.Ordinal)) + !(htmlMode && (uri.Equals("http://www.w3.org/1999/xhtml", StringComparison.Ordinal) || uri.Length == 0) // LUCENENET: CA1820: Test for empty strings using string length && (qName.Equals("area", StringComparison.Ordinal) || qName.Equals("base", StringComparison.Ordinal) || qName.Equals("basefont", StringComparison.Ordinal) || qName.Equals("br", StringComparison.Ordinal) || qName.Equals("col", StringComparison.Ordinal) || qName.Equals("frame", StringComparison.Ordinal) || qName.Equals("hr", StringComparison.Ordinal) || qName.Equals("img", StringComparison.Ordinal) || qName.Equals("input", StringComparison.Ordinal) || qName.Equals("isindex", StringComparison.Ordinal) || qName.Equals("link", StringComparison.Ordinal) || qName.Equals("meta", StringComparison.Ordinal) diff --git a/src/Lucene.Net.Benchmark/Support/Util/EnglishNumberFormatExtensions.cs b/src/Lucene.Net.Benchmark/Support/Util/EnglishNumberFormatExtensions.cs index 0a410f3470..cd667b0c6b 100644 --- a/src/Lucene.Net.Benchmark/Support/Util/EnglishNumberFormatExtensions.cs +++ b/src/Lucene.Net.Benchmark/Support/Util/EnglishNumberFormatExtensions.cs @@ -59,7 +59,7 @@ private static StringBuilder ToWords(long value, StringBuilder builder) ToWords(Math.Abs(value), builder); } - long unit = 0; + long unit; if (value >= QUADRILLION) { diff --git a/src/Lucene.Net.Benchmark/Utils/ExtractReuters.cs b/src/Lucene.Net.Benchmark/Utils/ExtractReuters.cs index b7630da5e2..c8f62ad646 100644 --- a/src/Lucene.Net.Benchmark/Utils/ExtractReuters.cs +++ b/src/Lucene.Net.Benchmark/Utils/ExtractReuters.cs @@ -28,8 +28,8 @@ namespace Lucene.Net.Benchmarks.Utils /// </summary> public class ExtractReuters { - private DirectoryInfo reutersDir; - private DirectoryInfo outputDir; + private readonly DirectoryInfo reutersDir; // LUCENENET: marked readonly + private readonly DirectoryInfo outputDir; // LUCENENET: marked readonly private static readonly string LINE_SEPARATOR = Environment.NewLine; public ExtractReuters(DirectoryInfo reutersDir, DirectoryInfo outputDir) @@ -59,11 +59,11 @@ public virtual void Extract() } } - internal Regex EXTRACTION_PATTERN = new Regex("<TITLE>(.*?)|(.*?)|(.*?)", RegexOptions.Compiled); + internal static readonly Regex EXTRACTION_PATTERN = new Regex("(.*?)|(.*?)|(.*?)", RegexOptions.Compiled); - private static string[] META_CHARS = { "&", "<", ">", "\"", "'" }; + private static readonly string[] META_CHARS = { "&", "<", ">", "\"", "'" }; - private static string[] META_CHARS_SERIALIZATIONS = { "&", "<", + private static readonly string[] META_CHARS_SERIALIZATIONS = { "&", "<", ">", """, "'" }; /// @@ -73,58 +73,56 @@ protected virtual void ExtractFile(FileInfo sgmFile) { try { - using (TextReader reader = new StreamReader(new FileStream(sgmFile.FullName, FileMode.Open, FileAccess.Read), Encoding.UTF8)) + using TextReader reader = new StreamReader(new FileStream(sgmFile.FullName, FileMode.Open, FileAccess.Read), Encoding.UTF8); + StringBuilder buffer = new StringBuilder(1024); + StringBuilder outBuffer = new StringBuilder(1024); + + string line = null; + int docNumber = 0; + while ((line = reader.ReadLine()) != null) { - StringBuilder buffer = new StringBuilder(1024); - StringBuilder outBuffer = new StringBuilder(1024); + // when we see a closing reuters tag, flush the file - string line = null; - int docNumber = 0; - while ((line = reader.ReadLine()) != null) + if (line.IndexOf(" public class ExtractWikipedia { - private DirectoryInfo outputDir; + private readonly DirectoryInfo outputDir; // LUCENENET: marked readonly public static int count = 0; - internal static readonly int BASE = 10; + internal const int BASE = 10; protected DocMaker m_docMaker; public ExtractWikipedia(DocMaker docMaker, DirectoryInfo outputDir) @@ -88,8 +88,8 @@ public virtual void Create(string id, string title, string time, string body) try { - using (TextWriter writer = new StreamWriter(new FileStream(f.FullName, FileMode.Create, FileAccess.Write), Encoding.UTF8)) - writer.Write(contents.ToString()); + using TextWriter writer = new StreamWriter(new FileStream(f.FullName, FileMode.Create, FileAccess.Write), Encoding.UTF8); + writer.Write(contents.ToString()); } catch (IOException ioe) { @@ -99,7 +99,7 @@ public virtual void Create(string id, string title, string time, string body) public virtual void Extract() { - Document doc = null; + Document doc; // LUCENENET: IDE0059: Remove unnecessary value assignment Console.WriteLine("Starting Extraction"); long start = J2N.Time.CurrentTimeMilliseconds(); try @@ -143,10 +143,12 @@ public static void Main(string[] args) } } - IDictionary properties = new Dictionary(); - properties["docs.file"] = wikipedia.FullName; - properties["content.source.forever"] = "false"; - properties["keep.image.only.docs"] = keepImageOnlyDocs.ToString(); + IDictionary properties = new Dictionary + { + ["docs.file"] = wikipedia.FullName, + ["content.source.forever"] = "false", + ["keep.image.only.docs"] = keepImageOnlyDocs.ToString() + }; Config config = new Config(properties); ContentSource source = new EnwikiContentSource(); diff --git a/src/Lucene.Net.Classification/KNearestNeighborClassifier.cs b/src/Lucene.Net.Classification/KNearestNeighborClassifier.cs index 1f5e3c03ce..9f198e5139 100644 --- a/src/Lucene.Net.Classification/KNearestNeighborClassifier.cs +++ b/src/Lucene.Net.Classification/KNearestNeighborClassifier.cs @@ -41,8 +41,8 @@ public class KNearestNeighborClassifier : IClassifier private readonly int _k; private Query _query; - private int _minDocsFreq; - private int _minTermFreq; + private readonly int _minDocsFreq; // LUCENENET: marked readonly + private readonly int _minTermFreq; // LUCENENET: marked readonly /// Create a using kNN algorithm /// the number of neighbors to analyze as an diff --git a/src/Lucene.Net.Classification/SimpleNaiveBayesClassifier.cs b/src/Lucene.Net.Classification/SimpleNaiveBayesClassifier.cs index 1da697a49c..98f60d5f8c 100644 --- a/src/Lucene.Net.Classification/SimpleNaiveBayesClassifier.cs +++ b/src/Lucene.Net.Classification/SimpleNaiveBayesClassifier.cs @@ -96,8 +96,10 @@ private int CountDocsWithClass() if (docCount == -1) { // in case codec doesn't support getDocCount TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector(); - BooleanQuery q = new BooleanQuery(); - q.Add(new BooleanClause(new WildcardQuery(new Term(_classFieldName, WildcardQuery.WILDCARD_STRING.ToString())), Occur.MUST)); + BooleanQuery q = new BooleanQuery + { + new BooleanClause(new WildcardQuery(new Term(_classFieldName, WildcardQuery.WILDCARD_STRING.ToString())), Occur.MUST) + }; if (_query != null) { q.Add(_query, Occur.MUST); diff --git a/src/Lucene.Net.Codecs/Appending/AppendingPostingsFormat.cs b/src/Lucene.Net.Codecs/Appending/AppendingPostingsFormat.cs index 0eb57dd2a8..3890477240 100644 --- a/src/Lucene.Net.Codecs/Appending/AppendingPostingsFormat.cs +++ b/src/Lucene.Net.Codecs/Appending/AppendingPostingsFormat.cs @@ -42,21 +42,19 @@ public override FieldsConsumer FieldsConsumer(SegmentWriteState state) public override FieldsProducer FieldsProducer(SegmentReadState state) { #pragma warning disable 612, 618 - using (var postings = new Lucene40PostingsReader(state.Directory, state.FieldInfos, + using var postings = new Lucene40PostingsReader(state.Directory, state.FieldInfos, state.SegmentInfo, - state.Context, state.SegmentSuffix)) - { - var ret = new AppendingTermsReader( - state.Directory, - state.FieldInfos, - state.SegmentInfo, - postings, - state.Context, - state.SegmentSuffix, - state.TermsIndexDivisor); + state.Context, state.SegmentSuffix); + var ret = new AppendingTermsReader( + state.Directory, + state.FieldInfos, + state.SegmentInfo, + postings, + state.Context, + state.SegmentSuffix, + state.TermsIndexDivisor); - return ret; - } + return ret; #pragma warning restore 612, 618 } } diff --git a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs index a0edca65aa..f54f385157 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs @@ -235,8 +235,7 @@ public override Terms GetTerms(string field) { if (Debugging.AssertsEnabled) Debugging.Assert(field != null); - FieldReader result; - fields.TryGetValue(field, out result); + fields.TryGetValue(field, out FieldReader result); return result; } @@ -725,7 +724,9 @@ public override BytesRef Next() /// decode all metadata up to the current term /// /// +#pragma warning disable IDE1006 // Naming Styles private BytesRef _next() +#pragma warning restore IDE1006 // Naming Styles { //System.out.println("BTR._next seg=" + segment + " this=" + this + " termCount=" + state.termBlockOrd + " (vs " + blockTermCount + ")"); if (state.TermBlockOrd == blockTermCount && !NextBlock()) diff --git a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs index 87ecb55586..962f55ce49 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/BlockTermsWriter.cs @@ -36,19 +36,21 @@ namespace Lucene.Net.Codecs.BlockTerms /// public class BlockTermsWriter : FieldsConsumer { - internal readonly static string CODEC_NAME = "BLOCK_TERMS_DICT"; + internal const string CODEC_NAME = "BLOCK_TERMS_DICT"; // Initial format - public readonly static int VERSION_START = 0; - public readonly static int VERSION_APPEND_ONLY = 1; - public readonly static int VERSION_META_ARRAY = 2; - public readonly static int VERSION_CHECKSUM = 3; + public const int VERSION_START = 0; + public const int VERSION_APPEND_ONLY = 1; + public const int VERSION_META_ARRAY = 2; + public const int VERSION_CHECKSUM = 3; public readonly static int VERSION_CURRENT = VERSION_CHECKSUM; /// Extension of terms file public readonly static string TERMS_EXTENSION = "tib"; +#pragma warning disable CA2213 // Disposable fields should be disposed protected IndexOutput m_output; +#pragma warning restore CA2213 // Disposable fields should be disposed private readonly PostingsWriterBase postingsWriter; //private readonly FieldInfos fieldInfos; // LUCENENET: Not used private FieldInfo currentField; @@ -300,7 +302,7 @@ public override void Finish(long sumTotalTermFreq, long sumDocFreq, int docCount } } - private int SharedPrefix(BytesRef term1, BytesRef term2) + private static int SharedPrefix(BytesRef term1, BytesRef term2) // LUCENENET: CA1822: Mark members as static { if (Debugging.AssertsEnabled) { diff --git a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs index f6dfac83dd..4e36e23170 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs @@ -58,7 +58,7 @@ public class FixedGapTermsIndexReader : TermsIndexReaderBase private readonly PagedBytes termBytes = new PagedBytes(PAGED_BYTES_BITS); private readonly PagedBytes.Reader termBytesReader; - readonly IDictionary fields = new Dictionary(); + private readonly IDictionary fields = new Dictionary(); // start of the field info data private long dirOffset; @@ -129,7 +129,8 @@ public FixedGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, string seg throw new CorruptIndexException("invalid packedIndexStart: " + packedIndexStart + " indexStart: " + indexStart + "numIndexTerms: " + numIndexTerms + " (resource=" + input + ")"); } FieldInfo fieldInfo = fieldInfos.FieldInfo(field); - FieldIndexData previous = fields.Put(fieldInfo, new FieldIndexData(this, fieldInfo, numIndexTerms, indexStart, termsStart, packedIndexStart, packedOffsetsStart)); + FieldIndexData previous = fields.Put(fieldInfo, new FieldIndexData(this, /* fieldInfo, // LUCENENET: Not referenced */ + numIndexTerms, indexStart, termsStart, packedIndexStart, packedOffsetsStart)); if (previous != null) { throw new CorruptIndexException("duplicate field: " + fieldInfo.Name + " (resource=" + input + ")"); @@ -276,7 +277,8 @@ private class FieldIndexData private readonly int numIndexTerms; - public FieldIndexData(FixedGapTermsIndexReader outerInstance, FieldInfo fieldInfo, int numIndexTerms, long indexStart, long termsStart, + public FieldIndexData(FixedGapTermsIndexReader outerInstance, /*FieldInfo fieldInfo, // LUCENENET: Not Referenced */ + int numIndexTerms, long indexStart, long termsStart, long packedIndexStart, long packedOffsetsStart) { this.outerInstance = outerInstance; @@ -444,8 +446,7 @@ public long RamBytesUsed() public override FieldIndexEnum GetFieldEnum(FieldInfo fieldInfo) { - FieldIndexData fieldData; - if (!fields.TryGetValue(fieldInfo, out fieldData) || fieldData == null || fieldData.coreIndex == null) + if (!fields.TryGetValue(fieldInfo, out FieldIndexData fieldData) || fieldData == null || fieldData.coreIndex == null) { return null; } diff --git a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs index 430365adeb..e20aca6644 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexWriter.cs @@ -37,15 +37,17 @@ namespace Lucene.Net.Codecs.BlockTerms /// public class FixedGapTermsIndexWriter : TermsIndexWriterBase { +#pragma warning disable CA2213 // Disposable fields should be disposed protected IndexOutput m_output; +#pragma warning restore CA2213 // Disposable fields should be disposed /// Extension of terms index file - internal readonly static string TERMS_INDEX_EXTENSION = "tii"; - internal readonly static string CODEC_NAME = "SIMPLE_STANDARD_TERMS_INDEX"; - internal readonly static int VERSION_START = 0; - internal readonly static int VERSION_APPEND_ONLY = 1; - internal readonly static int VERSION_CHECKSUM = 1000; // 4.x "skipped" trunk's monotonic addressing: give any user a nice exception - internal readonly static int VERSION_CURRENT = VERSION_CHECKSUM; + internal const string TERMS_INDEX_EXTENSION = "tii"; + internal const string CODEC_NAME = "SIMPLE_STANDARD_TERMS_INDEX"; + internal const int VERSION_START = 0; + internal const int VERSION_APPEND_ONLY = 1; + internal const int VERSION_CHECKSUM = 1000; // 4.x "skipped" trunk's monotonic addressing: give any user a nice exception + internal const int VERSION_CURRENT = VERSION_CHECKSUM; private readonly int termIndexInterval; diff --git a/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexReader.cs b/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexReader.cs index 7dc7f86268..90d721a8e4 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexReader.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexReader.cs @@ -81,7 +81,7 @@ public VariableGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, string int field = input.ReadVInt32(); long indexStart = input.ReadVInt64(); FieldInfo fieldInfo = fieldInfos.FieldInfo(field); - FieldIndexData previous = fields.Put(fieldInfo, new FieldIndexData(this, fieldInfo, indexStart)); + FieldIndexData previous = fields.Put(fieldInfo, new FieldIndexData(this, /* fieldInfo, // LUCENENET: Not referenced */ indexStart)); if (previous != null) { throw new CorruptIndexException("duplicate field: " + fieldInfo.Name + " (resource=" + input + ")"); @@ -197,7 +197,7 @@ private class FieldIndexData // Set only if terms index is loaded: internal volatile FST fst; - public FieldIndexData(VariableGapTermsIndexReader outerInstance, FieldInfo fieldInfo, long indexStart) + public FieldIndexData(VariableGapTermsIndexReader outerInstance, /*FieldInfo fieldInfo, // LUCENENET: Not referenced */ long indexStart) { this.outerInstance = outerInstance; diff --git a/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexWriter.cs b/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexWriter.cs index 80fa33b505..f857bfde95 100644 --- a/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexWriter.cs +++ b/src/Lucene.Net.Codecs/BlockTerms/VariableGapTermsIndexWriter.cs @@ -39,13 +39,13 @@ public class VariableGapTermsIndexWriter : TermsIndexWriterBase protected IndexOutput m_output; /// Extension of terms index file. - internal readonly static string TERMS_INDEX_EXTENSION = "tiv"; + internal const string TERMS_INDEX_EXTENSION = "tiv"; - internal readonly static string CODEC_NAME = "VARIABLE_GAP_TERMS_INDEX"; - internal readonly static int VERSION_START = 0; - internal readonly static int VERSION_APPEND_ONLY = 1; - internal readonly static int VERSION_CHECKSUM = 2; - internal readonly static int VERSION_CURRENT = VERSION_CHECKSUM; + internal const string CODEC_NAME = "VARIABLE_GAP_TERMS_INDEX"; + internal const int VERSION_START = 0; + internal const int VERSION_APPEND_ONLY = 1; + internal const int VERSION_CHECKSUM = 2; + internal const int VERSION_CURRENT = VERSION_CHECKSUM; private readonly IList fields = new List(); diff --git a/src/Lucene.Net.Codecs/Bloom/BloomFilteringPostingsFormat.cs b/src/Lucene.Net.Codecs/Bloom/BloomFilteringPostingsFormat.cs index bebdb63538..621850fde8 100644 --- a/src/Lucene.Net.Codecs/Bloom/BloomFilteringPostingsFormat.cs +++ b/src/Lucene.Net.Codecs/Bloom/BloomFilteringPostingsFormat.cs @@ -193,8 +193,7 @@ protected override void Dispose(bool disposing) public override Terms GetTerms(string field) { - FuzzySet filter; - if (!_bloomsByFieldName.TryGetValue(field, out filter) || filter == null) + if (!_bloomsByFieldName.TryGetValue(field, out FuzzySet filter) || filter == null) { return _delegateFieldsProducer.GetTerms(field); } @@ -234,18 +233,12 @@ public override TermsEnum GetEnumerator() public override TermsEnum GetEnumerator(TermsEnum reuse) { -#pragma warning disable IDE0038 // Use pattern matching - if (!(reuse is null) && reuse is BloomFilteredTermsEnum) -#pragma warning restore IDE0038 // Use pattern matching + if (!(reuse is null) && reuse is BloomFilteredTermsEnum bfte && bfte.filter == _filter) { - BloomFilteredTermsEnum bfte = (BloomFilteredTermsEnum)reuse; - if (bfte.filter == _filter) - { - // recycle the existing BloomFilteredTermsEnum by asking the delegate - // to recycle its contained TermsEnum - bfte.Reset(_delegateTerms, bfte.delegateTermsEnum); - return bfte; - } + // recycle the existing BloomFilteredTermsEnum by asking the delegate + // to recycle its contained TermsEnum + bfte.Reset(_delegateTerms, bfte.delegateTermsEnum); + return bfte; } // We have been handed something we cannot reuse (either wrong diff --git a/src/Lucene.Net.Codecs/Bloom/FuzzySet.cs b/src/Lucene.Net.Codecs/Bloom/FuzzySet.cs index c3cb2ce41f..a1bcb9c3d5 100644 --- a/src/Lucene.Net.Codecs/Bloom/FuzzySet.cs +++ b/src/Lucene.Net.Codecs/Bloom/FuzzySet.cs @@ -84,7 +84,7 @@ public enum ContainsResult // a large bitset and then mapped to a smaller set can be looked up using a single // AND operation of the query term's hash rather than needing to perform a 2-step // translation of the query term that mirrors the stored content's reprojections. - private static int[] _usableBitSetSizes = LoadUsableBitSetSizes(); + private static readonly int[] _usableBitSetSizes = LoadUsableBitSetSizes(); // LUCENENET: marked readonly private static int[] LoadUsableBitSetSizes() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006) { var usableBitSetSizes = new int[30]; diff --git a/src/Lucene.Net.Codecs/DiskDV/DiskDocValuesFormat.cs b/src/Lucene.Net.Codecs/DiskDV/DiskDocValuesFormat.cs index 4ff48ab23a..a5a59495ef 100644 --- a/src/Lucene.Net.Codecs/DiskDV/DiskDocValuesFormat.cs +++ b/src/Lucene.Net.Codecs/DiskDV/DiskDocValuesFormat.cs @@ -38,17 +38,14 @@ public DiskDocValuesFormat() public override DocValuesConsumer FieldsConsumer(SegmentWriteState state) { - return new Lucene45DocValuesConsumerAnonymousHelper(this, state); + return new Lucene45DocValuesConsumerAnonymousHelper(state); } private class Lucene45DocValuesConsumerAnonymousHelper : Lucene45DocValuesConsumer { - private readonly DiskDocValuesFormat outerInstance; - - public Lucene45DocValuesConsumerAnonymousHelper(DiskDocValuesFormat outerInstance, SegmentWriteState state) + public Lucene45DocValuesConsumerAnonymousHelper(SegmentWriteState state) : base(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION) { - this.outerInstance = outerInstance; } protected override void AddTermsDict(FieldInfo field, IEnumerable values) diff --git a/src/Lucene.Net.Codecs/DiskDV/DiskNormsFormat.cs b/src/Lucene.Net.Codecs/DiskDV/DiskNormsFormat.cs index 01a19491cc..e148c577ae 100644 --- a/src/Lucene.Net.Codecs/DiskDV/DiskNormsFormat.cs +++ b/src/Lucene.Net.Codecs/DiskDV/DiskNormsFormat.cs @@ -35,9 +35,9 @@ public override DocValuesProducer NormsProducer(SegmentReadState state) return new DiskDocValuesProducer(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); } - private static readonly string DATA_CODEC = "DiskNormsData"; - private static readonly string DATA_EXTENSION = "dnvd"; - private static readonly string META_CODEC = "DiskNormsMetadata"; - private static readonly string META_EXTENSION = "dnvm"; + private const string DATA_CODEC = "DiskNormsData"; + private const string DATA_EXTENSION = "dnvd"; + private const string META_CODEC = "DiskNormsMetadata"; + private const string META_EXTENSION = "dnvm"; } } \ No newline at end of file diff --git a/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexInput.cs b/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexInput.cs index d201869523..357ef3d2af 100644 --- a/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexInput.cs +++ b/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexInput.cs @@ -45,7 +45,7 @@ public abstract class FixedInt32BlockIndexInput : Int32IndexInput private readonly IndexInput input; protected readonly int m_blockSize; - public FixedInt32BlockIndexInput(IndexInput @in) + protected FixedInt32BlockIndexInput(IndexInput @in) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { input = @in; m_blockSize = @in.ReadVInt32(); diff --git a/src/Lucene.Net.Codecs/Memory/DirectDocValuesConsumer.cs b/src/Lucene.Net.Codecs/Memory/DirectDocValuesConsumer.cs index 9e0d307bce..66538ee0f4 100644 --- a/src/Lucene.Net.Codecs/Memory/DirectDocValuesConsumer.cs +++ b/src/Lucene.Net.Codecs/Memory/DirectDocValuesConsumer.cs @@ -34,7 +34,9 @@ namespace Lucene.Net.Codecs.Memory /// internal class DirectDocValuesConsumer : DocValuesConsumer { +#pragma warning disable CA2213 // Disposable fields should be disposed private IndexOutput data, meta; +#pragma warning restore CA2213 // Disposable fields should be disposed //private readonly int maxDoc; // LUCENENET: Not used internal DirectDocValuesConsumer(SegmentWriteState state, string dataCodec, string dataExtension, diff --git a/src/Lucene.Net.Codecs/Memory/DirectDocValuesFormat.cs b/src/Lucene.Net.Codecs/Memory/DirectDocValuesFormat.cs index fec83ed676..b871db357a 100644 --- a/src/Lucene.Net.Codecs/Memory/DirectDocValuesFormat.cs +++ b/src/Lucene.Net.Codecs/Memory/DirectDocValuesFormat.cs @@ -79,9 +79,9 @@ public override DocValuesProducer FieldsProducer(SegmentReadState state) return new DirectDocValuesProducer(state, DATA_CODEC, DATA_EXTENSION, METADATA_CODEC, METADATA_EXTENSION); } - internal static readonly string DATA_CODEC = "DirectDocValuesData"; - internal static readonly string DATA_EXTENSION = "dvdd"; - internal static readonly string METADATA_CODEC = "DirectDocValuesMetadata"; - internal static readonly string METADATA_EXTENSION = "dvdm"; + internal const string DATA_CODEC = "DirectDocValuesData"; + internal const string DATA_EXTENSION = "dvdd"; + internal const string METADATA_CODEC = "DirectDocValuesMetadata"; + internal const string METADATA_EXTENSION = "dvdm"; } } \ No newline at end of file diff --git a/src/Lucene.Net.Codecs/Memory/DirectDocValuesProducer.cs b/src/Lucene.Net.Codecs/Memory/DirectDocValuesProducer.cs index 8df0270d34..9028f835a0 100644 --- a/src/Lucene.Net.Codecs/Memory/DirectDocValuesProducer.cs +++ b/src/Lucene.Net.Codecs/Memory/DirectDocValuesProducer.cs @@ -117,7 +117,7 @@ internal DirectDocValuesProducer(SegmentReadState state, string dataCodec, strin } } - private NumericEntry ReadNumericEntry(IndexInput meta) + private static NumericEntry ReadNumericEntry(IndexInput meta) // LUCENENET: CA1822: Mark members as static { var entry = new NumericEntry { offset = meta.ReadInt64(), count = meta.ReadInt32(), missingOffset = meta.ReadInt64() }; if (entry.missingOffset != -1) @@ -133,7 +133,7 @@ private NumericEntry ReadNumericEntry(IndexInput meta) return entry; } - private BinaryEntry ReadBinaryEntry(IndexInput meta) + private static BinaryEntry ReadBinaryEntry(IndexInput meta) // LUCENENET: CA1822: Mark members as static { var entry = new BinaryEntry(); entry.offset = meta.ReadInt64(); @@ -152,7 +152,7 @@ private BinaryEntry ReadBinaryEntry(IndexInput meta) return entry; } - private SortedEntry ReadSortedEntry(IndexInput meta) + private static SortedEntry ReadSortedEntry(IndexInput meta) // LUCENENET: CA1822: Mark members as static { var entry = new SortedEntry(); entry.docToOrd = ReadNumericEntry(meta); @@ -160,7 +160,7 @@ private SortedEntry ReadSortedEntry(IndexInput meta) return entry; } - private SortedSetEntry ReadSortedSetEntry(IndexInput meta) + private static SortedSetEntry ReadSortedSetEntry(IndexInput meta) // LUCENENET: CA1822: Mark members as static { var entry = new SortedSetEntry(); entry.docToOrdAddress = ReadNumericEntry(meta); @@ -213,8 +213,7 @@ public override NumericDocValues GetNumeric(FieldInfo field) { lock (this) { - NumericDocValues instance; - if (!numericInstances.TryGetValue(field.Number, out instance)) + if (!numericInstances.TryGetValue(field.Number, out NumericDocValues instance)) { // Lazy load instance = LoadNumeric(numerics[field.Number]); @@ -246,7 +245,7 @@ private NumericDocValues LoadNumeric(NumericEntry entry) values[i] = data.ReadInt16(); } ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values)); - return new NumericDocValuesAnonymousInnerClassHelper2(this, values); + return new NumericDocValuesAnonymousInnerClassHelper2(values); } case 4: @@ -295,7 +294,7 @@ private class NumericDocValuesAnonymousInnerClassHelper2 : NumericDocValues { private readonly short[] values; - public NumericDocValuesAnonymousInnerClassHelper2(DirectDocValuesProducer outerInstance, short[] values) + public NumericDocValuesAnonymousInnerClassHelper2(short[] values) { this.values = values; } @@ -340,8 +339,7 @@ public override BinaryDocValues GetBinary(FieldInfo field) { lock (this) { - BinaryDocValues instance; - if (!binaryInstances.TryGetValue(field.Number, out instance)) + if (!binaryInstances.TryGetValue(field.Number, out BinaryDocValues instance)) { // Lazy load instance = LoadBinary(binaries[field.Number]); @@ -393,8 +391,7 @@ public override SortedDocValues GetSorted(FieldInfo field) { lock (this) { - SortedDocValues instance; - if (!sortedInstances.TryGetValue(field.Number, out instance)) + if (!sortedInstances.TryGetValue(field.Number, out SortedDocValues instance)) { // Lazy load instance = LoadSorted(field); @@ -410,20 +407,17 @@ private SortedDocValues LoadSorted(FieldInfo field) NumericDocValues docToOrd = LoadNumeric(entry.docToOrd); BinaryDocValues values = LoadBinary(entry.values); - return new SortedDocValuesAnonymousInnerClassHelper(this, entry, docToOrd, values); + return new SortedDocValuesAnonymousInnerClassHelper(entry, docToOrd, values); } private class SortedDocValuesAnonymousInnerClassHelper : SortedDocValues { - private readonly DirectDocValuesProducer outerInstance; - private readonly SortedEntry entry; private readonly NumericDocValues docToOrd; private readonly BinaryDocValues values; - public SortedDocValuesAnonymousInnerClassHelper(DirectDocValuesProducer outerInstance, SortedEntry entry, NumericDocValues docToOrd, BinaryDocValues values) + public SortedDocValuesAnonymousInnerClassHelper(SortedEntry entry, NumericDocValues docToOrd, BinaryDocValues values) { - this.outerInstance = outerInstance; this.entry = entry; this.docToOrd = docToOrd; this.values = values; @@ -452,8 +446,7 @@ public override SortedSetDocValues GetSortedSet(FieldInfo field) lock (this) { var entry = sortedSets[field.Number]; - SortedSetRawValues instance; - if (!sortedSetInstances.TryGetValue(field.Number, out instance)) + if (!sortedSetInstances.TryGetValue(field.Number, out SortedSetRawValues instance)) { // Lazy load instance = LoadSortedSet(entry); @@ -465,7 +458,7 @@ public override SortedSetDocValues GetSortedSet(FieldInfo field) var values = instance.values; // Must make a new instance since the iterator has state: - return new RandomAccessOrdsAnonymousInnerClassHelper(this, entry, docToOrdAddress, ords, values); + return new RandomAccessOrdsAnonymousInnerClassHelper(entry, docToOrdAddress, ords, values); } } @@ -476,7 +469,7 @@ private class RandomAccessOrdsAnonymousInnerClassHelper : RandomAccessOrds private readonly NumericDocValues ords; private readonly BinaryDocValues values; - public RandomAccessOrdsAnonymousInnerClassHelper(DirectDocValuesProducer outerInstance, SortedSetEntry entry, NumericDocValues docToOrdAddress, NumericDocValues ords, BinaryDocValues values) + public RandomAccessOrdsAnonymousInnerClassHelper(SortedSetEntry entry, NumericDocValues docToOrdAddress, NumericDocValues ords, BinaryDocValues values) { this.entry = entry; this.docToOrdAddress = docToOrdAddress; diff --git a/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs b/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs index 4f3b825773..c31aa8ec3f 100644 --- a/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs +++ b/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs @@ -152,8 +152,7 @@ public override IEnumerator GetEnumerator() public override Terms GetTerms(string field) { - DirectField result; - fields.TryGetValue(field, out result); + fields.TryGetValue(field, out DirectField result); return result; } @@ -987,44 +986,26 @@ public override void SeekExact(BytesRef term, TermState state) public override long Ord => termOrd; - public override int DocFreq - { - get - { - if (outerInstance.terms[termOrd] is LowFreqTerm) - { - return ((LowFreqTerm)outerInstance.terms[termOrd]).DocFreq; - } - else - { - return ((HighFreqTerm)outerInstance.terms[termOrd]).DocIDs.Length; - } - } - } + public override int DocFreq => + outerInstance.terms[termOrd] is LowFreqTerm lowFreqTerm ? + lowFreqTerm.DocFreq : + ((HighFreqTerm)outerInstance.terms[termOrd]).DocIDs.Length; + + + public override long TotalTermFreq => + outerInstance.terms[termOrd] is LowFreqTerm lowFreqTerm ? + lowFreqTerm.TotalTermFreq : + ((HighFreqTerm)outerInstance.terms[termOrd]).TotalTermFreq; - public override long TotalTermFreq - { - get - { - if (outerInstance.terms[termOrd] is LowFreqTerm) - { - return ((LowFreqTerm)outerInstance.terms[termOrd]).TotalTermFreq; - } - else - { - return ((HighFreqTerm)outerInstance.terms[termOrd]).TotalTermFreq; - } - } - } public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) { // TODO: implement reuse, something like Pulsing: // it's hairy! - if (outerInstance.terms[termOrd] is LowFreqTerm) + if (outerInstance.terms[termOrd] is LowFreqTerm lowFreqTerm) { - int[] postings = ((LowFreqTerm) outerInstance.terms[termOrd]).Postings; + int[] postings = lowFreqTerm.Postings; if (outerInstance.hasFreq) { if (outerInstance.hasPos) @@ -1042,10 +1023,8 @@ public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) { posLen++; } - LowFreqDocsEnum docsEnum; - if (reuse is LowFreqDocsEnum) + if (reuse is LowFreqDocsEnum docsEnum) { - docsEnum = (LowFreqDocsEnum) reuse; if (!docsEnum.CanReuse(liveDocs, posLen)) { docsEnum = new LowFreqDocsEnum(liveDocs, posLen); @@ -1060,10 +1039,8 @@ public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) } else { - LowFreqDocsEnumNoPos docsEnum; - if (reuse is LowFreqDocsEnumNoPos) + if (reuse is LowFreqDocsEnumNoPos docsEnum) { - docsEnum = (LowFreqDocsEnumNoPos) reuse; if (!docsEnum.CanReuse(liveDocs)) { docsEnum = new LowFreqDocsEnumNoPos(liveDocs); @@ -1079,10 +1056,8 @@ public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) } else { - LowFreqDocsEnumNoTF docsEnum; - if (reuse is LowFreqDocsEnumNoTF) + if (reuse is LowFreqDocsEnumNoTF docsEnum) { - docsEnum = (LowFreqDocsEnumNoTF) reuse; if (!docsEnum.CanReuse(liveDocs)) { docsEnum = new LowFreqDocsEnumNoTF(liveDocs); @@ -1100,10 +1075,8 @@ public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) { HighFreqTerm term = (HighFreqTerm) outerInstance.terms[termOrd]; - HighFreqDocsEnum docsEnum; - if (reuse is HighFreqDocsEnum) + if (reuse is HighFreqDocsEnum docsEnum) { - docsEnum = (HighFreqDocsEnum) reuse; if (!docsEnum.CanReuse(liveDocs)) { docsEnum = new HighFreqDocsEnum(liveDocs); @@ -1130,11 +1103,10 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPos // TODO: implement reuse, something like Pulsing: // it's hairy! - if (outerInstance.terms[termOrd] is LowFreqTerm) + if (outerInstance.terms[termOrd] is LowFreqTerm lowFreqTerm) { - LowFreqTerm term = ((LowFreqTerm) outerInstance.terms[termOrd]); - int[] postings = term.Postings; - byte[] payloads = term.Payloads; + int[] postings = lowFreqTerm.Postings; + byte[] payloads = lowFreqTerm.Payloads; return (new LowFreqDocsAndPositionsEnum(liveDocs, outerInstance.hasOffsets, outerInstance.hasPayloads)).Reset(postings, payloads); @@ -1159,13 +1131,6 @@ private sealed class DirectIntersectTermsEnum : TermsEnum private sealed class State { - private readonly DirectPostingsFormat.DirectField.DirectIntersectTermsEnum outerInstance; - - public State(DirectPostingsFormat.DirectField.DirectIntersectTermsEnum outerInstance) - { - this.outerInstance = outerInstance; - } - internal int changeOrd; internal int state; internal Transition[] transitions; @@ -1185,7 +1150,7 @@ public DirectIntersectTermsEnum(DirectPostingsFormat.DirectField outerInstance, compiledAutomaton = compiled; termOrd = -1; states = new State[1]; - states[0] = new State(this); + states[0] = new State(); states[0].changeOrd = outerInstance.terms.Length; states[0].state = runAutomaton.InitialState; states[0].transitions = compiledAutomaton.SortedTransitions[states[0].state]; @@ -1380,7 +1345,7 @@ private void Grow() { State[] newStates = new State[states.Length + 1]; Array.Copy(states, 0, newStates, 0, states.Length); - newStates[states.Length] = new State(this); + newStates[states.Length] = new State(); states = newStates; } } @@ -1698,44 +1663,25 @@ public override TermState GetTermState() public override long Ord => termOrd; - public override int DocFreq - { - get - { - if (outerInstance.terms[termOrd] is LowFreqTerm) - { - return ((LowFreqTerm)outerInstance.terms[termOrd]).DocFreq; - } - else - { - return ((HighFreqTerm)outerInstance.terms[termOrd]).DocIDs.Length; - } - } - } + public override int DocFreq => + outerInstance.terms[termOrd] is LowFreqTerm lowFreqTerm ? + lowFreqTerm.DocFreq : + ((HighFreqTerm)outerInstance.terms[termOrd]).DocIDs.Length; + + public override long TotalTermFreq => + outerInstance.terms[termOrd] is LowFreqTerm lowFreqTerm ? + lowFreqTerm.TotalTermFreq : + ((HighFreqTerm)outerInstance.terms[termOrd]).TotalTermFreq; - public override long TotalTermFreq - { - get - { - if (outerInstance.terms[termOrd] is LowFreqTerm) - { - return ((LowFreqTerm)outerInstance.terms[termOrd]).TotalTermFreq; - } - else - { - return ((HighFreqTerm)outerInstance.terms[termOrd]).TotalTermFreq; - } - } - } public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) { // TODO: implement reuse, something like Pulsing: // it's hairy! - if (outerInstance.terms[termOrd] is LowFreqTerm) + if (outerInstance.terms[termOrd] is LowFreqTerm lowFreqTerm) { - int[] postings = ((LowFreqTerm) outerInstance.terms[termOrd]).Postings; + int[] postings = lowFreqTerm.Postings; if (outerInstance.hasFreq) { if (outerInstance.hasPos) diff --git a/src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs b/src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs index 6c4da9fc05..b7b19d3c8c 100644 --- a/src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs +++ b/src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs @@ -1,4 +1,5 @@ -using Lucene.Net.Index; +using Lucene.Net.Diagnostics; +using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Support; using Lucene.Net.Util; @@ -6,10 +7,9 @@ using Lucene.Net.Util.Fst; using System; using System.Collections.Generic; -using System.Diagnostics; -using JCG = J2N.Collections.Generic; +using System.Runtime.CompilerServices; using BitSet = Lucene.Net.Util.OpenBitSet; -using Lucene.Net.Diagnostics; +using JCG = J2N.Collections.Generic; namespace Lucene.Net.Codecs.Memory { @@ -46,7 +46,7 @@ public class FSTOrdTermsReader : FieldsProducer // LUCENENET specific: Use StringComparer.Ordinal to get the same ordering as Java private readonly IDictionary fields = new JCG.SortedDictionary(StringComparer.Ordinal); private readonly PostingsReaderBase postingsReader; - private int version; + private readonly int version; // LUCENENET: marked readonly //static final boolean TEST = false; public FSTOrdTermsReader(SegmentReadState state, PostingsReaderBase postingsReader) @@ -86,10 +86,9 @@ public FSTOrdTermsReader(SegmentReadState state, PostingsReaderBase postingsRead var index = new FST(indexIn, PositiveInt32Outputs.Singleton); var current = new TermsReader(this, fieldInfo, blockIn, numTerms, sumTotalTermFreq, sumDocFreq, docCount, longsSize, index); - TermsReader previous; // LUCENENET NOTE: This simulates a put operation in Java, // getting the prior value first before setting it. - fields.TryGetValue(fieldInfo.Name, out previous); + fields.TryGetValue(fieldInfo.Name, out TermsReader previous); fields[fieldInfo.Name] = current; CheckFieldSummary(state.SegmentInfo, indexIn, blockIn, current, previous); } @@ -167,8 +166,7 @@ public override IEnumerator GetEnumerator() public override Terms GetTerms(string field) { if (Debugging.AssertsEnabled) Debugging.Assert(field != null); - TermsReader result; - fields.TryGetValue(field, out result); + fields.TryGetValue(field, out TermsReader result); return result; } @@ -226,7 +224,7 @@ internal TermsReader(FSTOrdTermsReader outerInstance, FieldInfo fieldInfo, Index this.metaLongsBlock = new byte[(int)blockIn.ReadVInt64()]; this.metaBytesBlock = new byte[(int)blockIn.ReadVInt64()]; - int last = 0, next = 0; + int last = 0, next; // LUCENENET: IDE0059: Remove unnecessary value assignment for (int i = 1; i < numBlocks; i++) { next = numSkipInfo * i; @@ -294,13 +292,13 @@ internal abstract class BaseTermsEnum : TermsEnum private int metaBlockOrd; /* Current buffered metadata (long[] & byte[]) */ - private long[][] longs; - private int[] bytesStart; - private int[] bytesLength; + private readonly long[][] longs; // LUCENENET: marked readonly + private readonly int[] bytesStart; // LUCENENET: marked readonly + private readonly int[] bytesLength; // LUCENENET: marked readonly /* Current buffered stats (df & ttf) */ - private int[] docFreq; - private long[] totalTermFreq; + private readonly int[] docFreq; // LUCENENET: marked readonly + private readonly long[] totalTermFreq; // LUCENENET: marked readonly internal BaseTermsEnum(TermsReader outerInstance) { @@ -559,8 +557,6 @@ public override void SeekExact(BytesRef target, TermState otherState) // Iterates intersect result with automaton (cannot seek!) private sealed class IntersectTermsEnum : BaseTermsEnum { - private readonly FSTOrdTermsReader.TermsReader outerInstance; - /// True when current term's metadata is decoded. private bool decoded; @@ -607,7 +603,6 @@ public override string ToString() internal IntersectTermsEnum(TermsReader outerInstance, CompiledAutomaton compiled, BytesRef startTerm) : base(outerInstance) { //if (TEST) System.out.println("Enum init, startTerm=" + startTerm); - this.outerInstance = outerInstance; this.fst = outerInstance.index; this.fstReader = fst.GetBytesReader(); this.fstOutputs = outerInstance.index.Outputs; @@ -620,7 +615,7 @@ internal IntersectTermsEnum(TermsReader outerInstance, CompiledAutomaton compile } Frame frame; - frame = LoadVirtualFrame(NewFrame()); + /*frame = */LoadVirtualFrame(NewFrame()); // LUCENENET: IDE0059: Remove unnecessary value assignment this.level++; frame = LoadFirstFrame(NewFrame()); PushFrame(frame); @@ -839,22 +834,26 @@ private Frame LoadCeilFrame(int label, Frame top, Frame frame) return frame; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private bool IsAccept(Frame frame) // reach a term both fst&fsa accepts { return fsa.IsAccept(frame.state) && frame.arc.IsFinal; } - private bool IsValid(Frame frame) // reach a prefix both fst&fsa won't reject + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static bool IsValid(Frame frame) // reach a prefix both fst&fsa won't reject // LUCENENET: CA1822: Mark members as static { return frame.state != -1; //frame != null && } - private bool CanGrow(Frame frame) // can walk forward on both fst&fsa + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static bool CanGrow(Frame frame) // can walk forward on both fst&fsa // LUCENENET: CA1822: Mark members as static { return frame.state != -1 && FST.TargetHasArcs(frame.arc); } - private bool CanRewind(Frame frame) // can jump to sibling + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static bool CanRewind(Frame frame) // can jump to sibling // LUCENENET: CA1822: Mark members as static { return !frame.arc.IsLast; } @@ -926,43 +925,7 @@ private BytesRef Shrink() } } - private static void Walk(FST fst) // LUCENENET NOTE: Not referenced anywhere - { - var queue = new List>(); - - // Java version was BitSet(), but in .NET we don't have a zero contructor BitSet. - // Couldn't find the default size in BitSet, so went with zero here. - var seen = new BitSet(); - var reader = fst.GetBytesReader(); - var startArc = fst.GetFirstArc(new FST.Arc()); - queue.Add(startArc); - while (queue.Count > 0) - { - //FST.Arc arc = queue.Remove(0); - var arc = queue[0]; - queue.RemoveAt(0); - - long node = arc.Target; - //System.out.println(arc); - if (FST.TargetHasArcs(arc) && !seen.Get((int)node)) - { - seen.Set((int)node); - fst.ReadFirstRealTargetArc(node, arc, reader); - while (true) - { - queue.Add((new FST.Arc()).CopyFrom(arc)); - if (arc.IsLast) - { - break; - } - else - { - fst.ReadNextRealArc(arc, reader); - } - } - } - } - } + // LUCENENET specific - removed Walk(FST fst) because it is dead code public override long RamBytesUsed() { diff --git a/src/Lucene.Net.Codecs/Memory/FSTOrdTermsWriter.cs b/src/Lucene.Net.Codecs/Memory/FSTOrdTermsWriter.cs index dad29ecbfb..bf86ba3732 100644 --- a/src/Lucene.Net.Codecs/Memory/FSTOrdTermsWriter.cs +++ b/src/Lucene.Net.Codecs/Memory/FSTOrdTermsWriter.cs @@ -148,10 +148,12 @@ public class FSTOrdTermsWriter : FieldsConsumer public const int SKIP_INTERVAL = 8; private readonly PostingsWriterBase postingsWriter; - private readonly FieldInfos fieldInfos; + //private readonly FieldInfos fieldInfos; // LUCENENET: Never read private readonly IList _fields = new List(); +#pragma warning disable CA2213 // Disposable fields should be disposed private IndexOutput blockOut = null; - private IndexOutput indexOut = null; + private readonly IndexOutput indexOut = null; // LUCENENET: marked readonly +#pragma warning restore CA2213 // Disposable fields should be disposed public FSTOrdTermsWriter(SegmentWriteState state, PostingsWriterBase postingsWriter) { @@ -161,7 +163,7 @@ public FSTOrdTermsWriter(SegmentWriteState state, PostingsWriterBase postingsWri TERMS_BLOCK_EXTENSION); this.postingsWriter = postingsWriter; - fieldInfos = state.FieldInfos; + //fieldInfos = state.FieldInfos; // LUCENENET: Never read var success = false; try diff --git a/src/Lucene.Net.Codecs/Memory/FSTTermsReader.cs b/src/Lucene.Net.Codecs/Memory/FSTTermsReader.cs index 74dba55d3c..cf2154b81d 100644 --- a/src/Lucene.Net.Codecs/Memory/FSTTermsReader.cs +++ b/src/Lucene.Net.Codecs/Memory/FSTTermsReader.cs @@ -3,8 +3,8 @@ using Lucene.Net.Util.Fst; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; +using System.Runtime.CompilerServices; using JCG = J2N.Collections.Generic; namespace Lucene.Net.Codecs.Memory @@ -95,10 +95,9 @@ public FSTTermsReader(SegmentReadState state, PostingsReaderBase postingsReader) int docCount = @in.ReadVInt32(); int longsSize = @in.ReadVInt32(); TermsReader current = new TermsReader(this, fieldInfo, @in, numTerms, sumTotalTermFreq, sumDocFreq, docCount, longsSize); - TermsReader previous; // LUCENENET NOTE: This simulates a put operation in Java, // getting the prior value first before setting it. - fields.TryGetValue(fieldInfo.Name, out previous); + fields.TryGetValue(fieldInfo.Name, out TermsReader previous); fields[fieldInfo.Name] = current; CheckFieldSummary(state.SegmentInfo, @in, current, previous); } @@ -134,8 +133,8 @@ private void SeekDir(IndexInput @in) } @in.Seek(@in.ReadInt64()); } - - + + private void CheckFieldSummary(SegmentInfo info, IndexInput @in, TermsReader field, TermsReader previous) { // #docs with field must be <= #docs @@ -167,8 +166,7 @@ public override IEnumerator GetEnumerator() public override Terms GetTerms(string field) { if (Debugging.AssertsEnabled) Debugging.Assert(field != null); - TermsReader result; - fields.TryGetValue(field, out result); + fields.TryGetValue(field, out TermsReader result); return result; } @@ -273,7 +271,7 @@ internal BaseTermsEnum(FSTTermsReader.TermsReader outerInstance) public override TermState GetTermState() { DecodeMetaData(); - return (TermState) state.Clone(); + return (TermState)state.Clone(); } public override BytesRef Term => term; @@ -320,7 +318,7 @@ private sealed class SegmentTermsEnum : BaseTermsEnum /// True when current enum is 'positioned' by . private bool seekPending; - internal SegmentTermsEnum(FSTTermsReader.TermsReader outerInstance) + internal SegmentTermsEnum(FSTTermsReader.TermsReader outerInstance) : base(outerInstance) { this.outerInstance = outerInstance; @@ -439,7 +437,7 @@ private sealed class IntersectTermsEnum : BaseTermsEnum /// True when there is pending term when calling . private bool pending; - + /// /// stack to record how current term is constructed, /// used to accumulate metadata or rewind term: @@ -499,7 +497,7 @@ internal IntersectTermsEnum(FSTTermsReader.TermsReader outerInstance, CompiledAu } Frame frame; - frame = LoadVirtualFrame(NewFrame()); + /*frame = */LoadVirtualFrame(NewFrame()); // LUCENENET: IDE0059: Remove unnecessary value assignment this.level++; frame = LoadFirstFrame(NewFrame()); PushFrame(frame); @@ -752,19 +750,26 @@ private Frame LoadCeilFrame(int label, Frame top, Frame frame) return frame; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private bool IsAccept(Frame frame) // reach a term both fst&fsa accepts { return fsa.IsAccept(frame.fsaState) && frame.fstArc.IsFinal; } - private bool IsValid(Frame frame) // reach a prefix both fst&fsa won't reject + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static bool IsValid(Frame frame) // reach a prefix both fst&fsa won't reject // LUCENENET: CA1822: Mark members as static { return frame.fsaState != -1; //frame != null && } - private bool CanGrow(Frame frame) // can walk forward on both fst&fsa + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static bool CanGrow(Frame frame) // can walk forward on both fst&fsa // LUCENENET: CA1822: Mark members as static { return frame.fsaState != -1 && FST.TargetHasArcs(frame.fstArc); } - private bool CanRewind(Frame frame) // can jump to sibling + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static bool CanRewind(Frame frame) // can jump to sibling // LUCENENET: CA1822: Mark members as static { return !frame.fstArc.IsLast; } @@ -837,39 +842,7 @@ private BytesRef Shrink() } } - //internal static void Walk(FST fst) // LUCENENET NOTE: Not referenced - //{ - // List> queue = new List>(); - // FST.BytesReader reader = fst.GetBytesReader(); - // FST.Arc startArc = fst.GetFirstArc(new FST.Arc()); - // queue.Add(startArc); - // BitSet seen = new BitSet(queue.Count); - // while (queue.Count > 0) - // { - // FST.Arc arc = queue[0]; - // queue.RemoveAt(0); - - // long node = arc.Target; - // //System.out.println(arc); - // if (FST.TargetHasArcs(arc) && !seen.Get((int)node)) - // { - // seen.Set((int)node); - // fst.ReadFirstRealTargetArc(node, arc, reader); - // while (true) - // { - // queue.Add((new FST.Arc()).CopyFrom(arc)); - // if (arc.IsLast) - // { - // break; - // } - // else - // { - // fst.ReadNextRealArc(arc, reader); - // } - // } - // } - // } - //} + // LUCENENET specific - removed Walk(FST fst) because it is dead code public override long RamBytesUsed() { diff --git a/src/Lucene.Net.Codecs/Memory/FSTTermsWriter.cs b/src/Lucene.Net.Codecs/Memory/FSTTermsWriter.cs index f28707dfc9..f9a08b54c1 100644 --- a/src/Lucene.Net.Codecs/Memory/FSTTermsWriter.cs +++ b/src/Lucene.Net.Codecs/Memory/FSTTermsWriter.cs @@ -121,8 +121,10 @@ public class FSTTermsWriter : FieldsConsumer public const int TERMS_VERSION_CURRENT = TERMS_VERSION_CHECKSUM; private readonly PostingsWriterBase _postingsWriter; - private readonly FieldInfos _fieldInfos; + //private readonly FieldInfos _fieldInfos; // LUCENENET: Never read +#pragma warning disable CA2213 // Disposable fields should be disposed private IndexOutput _output; +#pragma warning restore CA2213 // Disposable fields should be disposed private readonly IList _fields = new List(); public FSTTermsWriter(SegmentWriteState state, PostingsWriterBase postingsWriter) @@ -131,7 +133,7 @@ public FSTTermsWriter(SegmentWriteState state, PostingsWriterBase postingsWriter TERMS_EXTENSION); _postingsWriter = postingsWriter; - _fieldInfos = state.FieldInfos; + //_fieldInfos = state.FieldInfos; // LUCENENET: Never read _output = state.Directory.CreateOutput(termsFileName, state.Context); var success = false; @@ -243,7 +245,7 @@ internal sealed class TermsWriter : TermsConsumer private long _numTerms; private readonly Int32sRef _scratchTerm = new Int32sRef(); - private readonly RAMOutputStream _statsWriter = new RAMOutputStream(); + //private readonly RAMOutputStream _statsWriter = new RAMOutputStream(); // LUCENENET: Never read private readonly RAMOutputStream _metaWriter = new RAMOutputStream(); internal TermsWriter(FSTTermsWriter outerInstance, FieldInfo fieldInfo) diff --git a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs index cb6dc17fb1..5776bcf76d 100644 --- a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs +++ b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs @@ -49,7 +49,9 @@ namespace Lucene.Net.Codecs.Memory /// internal class MemoryDocValuesConsumer : DocValuesConsumer { +#pragma warning disable CA2213 // Disposable fields should be disposed private IndexOutput data, meta; +#pragma warning restore CA2213 // Disposable fields should be disposed private readonly int maxDoc; private readonly float acceptableOverheadRatio; @@ -401,7 +403,7 @@ public override void AddSortedSetField(FieldInfo field, IEnumerable va IEnumerable docToOrdCount, IEnumerable ords) { // write the ordinals as a binary field - AddBinaryField(field, new IterableAnonymousInnerClassHelper(this, docToOrdCount, ords)); + AddBinaryField(field, new IterableAnonymousInnerClassHelper(docToOrdCount, ords)); // write the values as FST WriteFST(field, values); @@ -412,8 +414,7 @@ private class IterableAnonymousInnerClassHelper : IEnumerable private readonly IEnumerable _docToOrdCount; private readonly IEnumerable _ords; - public IterableAnonymousInnerClassHelper(MemoryDocValuesConsumer outerInstance, - IEnumerable docToOrdCount, IEnumerable ords) + public IterableAnonymousInnerClassHelper(IEnumerable docToOrdCount, IEnumerable ords) { _docToOrdCount = docToOrdCount; _ords = ords; @@ -434,8 +435,8 @@ IEnumerator IEnumerable.GetEnumerator() internal class SortedSetIterator : IEnumerator { private byte[] buffer = new byte[10]; - private ByteArrayDataOutput @out = new ByteArrayDataOutput(); - private BytesRef _current = new BytesRef(); + private readonly ByteArrayDataOutput @out = new ByteArrayDataOutput(); // LUCENENET: marked readonly + private readonly BytesRef _current = new BytesRef(); // LUCENENET: marked readonly private readonly IEnumerator counts; private readonly IEnumerator ords; diff --git a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesProducer.cs b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesProducer.cs index 905193ad44..a1c261846b 100644 --- a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesProducer.cs +++ b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesProducer.cs @@ -83,7 +83,7 @@ internal MemoryDocValuesProducer(SegmentReadState state, string dataCodec, strin numerics = new Dictionary(); binaries = new Dictionary(); fsts = new Dictionary(); - ReadFields(@in, state.FieldInfos); + ReadFields(@in /*, state.FieldInfos // LUCENENET: Not referenced */); if (version >= VERSION_CHECKSUM) { CodecUtil.CheckFooter(@in); @@ -133,7 +133,7 @@ internal MemoryDocValuesProducer(SegmentReadState state, string dataCodec, strin } } - private void ReadFields(IndexInput meta, FieldInfos infos) + private void ReadFields(IndexInput meta /*, FieldInfos infos // LUCENENET: Not referenced */) { int fieldNumber = meta.ReadVInt32(); while (fieldNumber != -1) @@ -209,8 +209,7 @@ public override NumericDocValues GetNumeric(FieldInfo field) { lock (this) { - NumericDocValues instance; - if (!numericInstances.TryGetValue(field.Number, out instance)) + if (!numericInstances.TryGetValue(field.Number, out NumericDocValues instance)) { instance = LoadNumeric(field); numericInstances[field.Number] = instance; @@ -252,7 +251,7 @@ private NumericDocValues LoadNumeric(FieldInfo field) var ordsReader = PackedInt32s.GetReaderNoHeader(data, PackedInt32s.Format.ById(formatID), entry.packedIntsVersion, maxDoc, bitsPerValue); ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(decode) + ordsReader.RamBytesUsed()); - return new NumericDocValuesAnonymousInnerClassHelper(this, decode, ordsReader); + return new NumericDocValuesAnonymousInnerClassHelper(decode, ordsReader); case DELTA_COMPRESSED: int blockSize = data.ReadVInt32(); var reader = new BlockPackedReader(data, entry.packedIntsVersion, blockSize, maxDoc, @@ -264,7 +263,7 @@ private NumericDocValues LoadNumeric(FieldInfo field) data.ReadBytes(bytes, 0, bytes.Length); ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(bytes)); // LUCENENET: IMPORTANT - some bytes are negative here, so we need to pass as sbyte - return new NumericDocValuesAnonymousInnerClassHelper2(this, (sbyte[])(Array)bytes); + return new NumericDocValuesAnonymousInnerClassHelper2((sbyte[])(Array)bytes); case GCD_COMPRESSED: long min = data.ReadInt64(); long mult = data.ReadInt64(); @@ -272,7 +271,7 @@ private NumericDocValues LoadNumeric(FieldInfo field) var quotientReader = new BlockPackedReader(data, entry.packedIntsVersion, quotientBlockSize, maxDoc, false); ramBytesUsed.AddAndGet(quotientReader.RamBytesUsed()); - return new NumericDocValuesAnonymousInnerClassHelper3(this, min, mult, quotientReader); + return new NumericDocValuesAnonymousInnerClassHelper3(min, mult, quotientReader); default: throw new InvalidOperationException(); } @@ -280,15 +279,11 @@ private NumericDocValues LoadNumeric(FieldInfo field) private class NumericDocValuesAnonymousInnerClassHelper : NumericDocValues { - private readonly MemoryDocValuesProducer outerInstance; - private readonly long[] decode; private readonly PackedInt32s.Reader ordsReader; - public NumericDocValuesAnonymousInnerClassHelper(MemoryDocValuesProducer outerInstance, long[] decode, - PackedInt32s.Reader ordsReader) + public NumericDocValuesAnonymousInnerClassHelper(long[] decode, PackedInt32s.Reader ordsReader) { - this.outerInstance = outerInstance; this.decode = decode; this.ordsReader = ordsReader; } @@ -301,12 +296,10 @@ public override long Get(int docID) private class NumericDocValuesAnonymousInnerClassHelper2 : NumericDocValues { - private readonly MemoryDocValuesProducer outerInstance; private readonly sbyte[] bytes; - public NumericDocValuesAnonymousInnerClassHelper2(MemoryDocValuesProducer outerInstance, sbyte[] bytes) + public NumericDocValuesAnonymousInnerClassHelper2(sbyte[] bytes) { - this.outerInstance = outerInstance; this.bytes = bytes; } @@ -322,7 +315,7 @@ private class NumericDocValuesAnonymousInnerClassHelper3 : NumericDocValues private readonly long mult; private readonly BlockPackedReader quotientReader; - public NumericDocValuesAnonymousInnerClassHelper3(MemoryDocValuesProducer outerInstance, long min, long mult, + public NumericDocValuesAnonymousInnerClassHelper3(long min, long mult, BlockPackedReader quotientReader) { this.min = min; @@ -340,8 +333,7 @@ public override BinaryDocValues GetBinary(FieldInfo field) { lock (this) { - BinaryDocValues instance; - if (!binaryInstances.TryGetValue(field.Number, out instance)) + if (!binaryInstances.TryGetValue(field.Number, out BinaryDocValues instance)) { instance = LoadBinary(field); binaryInstances[field.Number] = instance; @@ -361,7 +353,7 @@ private BinaryDocValues LoadBinary(FieldInfo field) { int fixedLength = entry.minLength; ramBytesUsed.AddAndGet(bytes.RamBytesUsed()); - return new BinaryDocValuesAnonymousInnerClassHelper(this, bytesReader, fixedLength); + return new BinaryDocValuesAnonymousInnerClassHelper(bytesReader, fixedLength); } else { @@ -369,7 +361,7 @@ private BinaryDocValues LoadBinary(FieldInfo field) var addresses = new MonotonicBlockPackedReader(data, entry.packedIntsVersion, entry.blockSize, maxDoc, false); ramBytesUsed.AddAndGet(bytes.RamBytesUsed() + addresses.RamBytesUsed()); - return new BinaryDocValuesAnonymousInnerClassHelper2(this, bytesReader, addresses); + return new BinaryDocValuesAnonymousInnerClassHelper2(bytesReader, addresses); } } @@ -378,8 +370,7 @@ private class BinaryDocValuesAnonymousInnerClassHelper : BinaryDocValues private readonly PagedBytes.Reader bytesReader; private readonly int fixedLength; - public BinaryDocValuesAnonymousInnerClassHelper(MemoryDocValuesProducer outerInstance, - PagedBytes.Reader bytesReader, int fixedLength) + public BinaryDocValuesAnonymousInnerClassHelper(PagedBytes.Reader bytesReader, int fixedLength) { this.bytesReader = bytesReader; this.fixedLength = fixedLength; @@ -393,15 +384,11 @@ public override void Get(int docID, BytesRef result) private class BinaryDocValuesAnonymousInnerClassHelper2 : BinaryDocValues { - private readonly MemoryDocValuesProducer outerInstance; - private readonly PagedBytes.Reader bytesReader; private readonly MonotonicBlockPackedReader addresses; - public BinaryDocValuesAnonymousInnerClassHelper2(MemoryDocValuesProducer outerInstance, - PagedBytes.Reader bytesReader, MonotonicBlockPackedReader addresses) + public BinaryDocValuesAnonymousInnerClassHelper2(PagedBytes.Reader bytesReader, MonotonicBlockPackedReader addresses) { - this.outerInstance = outerInstance; this.bytesReader = bytesReader; this.addresses = addresses; } diff --git a/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs b/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs index aa434ee5f0..a53cfb4d10 100644 --- a/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs +++ b/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs @@ -108,8 +108,8 @@ private sealed class TermsWriter : TermsConsumer private readonly FieldInfo field; private readonly Builder builder; private readonly ByteSequenceOutputs outputs = ByteSequenceOutputs.Singleton; - private readonly bool doPackFST; - private readonly float acceptableOverheadRatio; + //private readonly bool doPackFST; // LUCENENET: Never read + //private readonly float acceptableOverheadRatio; // LUCENENET: Never read private int termCount; public TermsWriter(IndexOutput @out, FieldInfo field, bool doPackFST, float acceptableOverheadRatio) @@ -118,8 +118,8 @@ public TermsWriter(IndexOutput @out, FieldInfo field, bool doPackFST, float acce this.@out = @out; this.field = field; - this.doPackFST = doPackFST; - this.acceptableOverheadRatio = acceptableOverheadRatio; + //this.doPackFST = doPackFST; // LUCENENET: Never read + //this.acceptableOverheadRatio = acceptableOverheadRatio; // LUCENENET: Never read builder = new Builder(FST.INPUT_TYPE.BYTE1, 0, 0, true, true, int.MaxValue, outputs, null, doPackFST, acceptableOverheadRatio, true, 15); } @@ -310,7 +310,7 @@ public override void Finish(long sumTotalTermFreq, long sumDocFreq, int docCount public override IComparer Comparer => BytesRef.UTF8SortedAsUnicodeComparer; } - private static string EXTENSION = "ram"; + private const string EXTENSION = "ram"; // LUCENENET specific - made into const private const string CODEC_NAME = "MemoryPostings"; private const int VERSION_START = 0; private const int VERSION_CURRENT = VERSION_START; @@ -341,7 +341,7 @@ private class FieldsConsumerAnonymousInnerClassHelper : FieldsConsumer { private readonly MemoryPostingsFormat outerInstance; - private IndexOutput @out; + private readonly IndexOutput @out; public FieldsConsumerAnonymousInnerClassHelper(MemoryPostingsFormat outerInstance, IndexOutput @out) { @@ -378,7 +378,7 @@ private sealed class FSTDocsEnum : DocsEnum private readonly IndexOptions indexOptions; private readonly bool storePayloads; private byte[] buffer = new byte[16]; - private ByteArrayDataInput @in; + private readonly ByteArrayDataInput @in; // LUCENENET: marked readonly private IBits liveDocs; private int docUpto; @@ -524,7 +524,7 @@ private sealed class FSTDocsAndPositionsEnum : DocsAndPositionsEnum { private readonly bool storePayloads; private byte[] buffer = new byte[16]; - private ByteArrayDataInput @in; + private readonly ByteArrayDataInput @in; // LUCENENET: marked readonly private IBits liveDocs; private int docUpto; @@ -735,7 +735,7 @@ private sealed class FSTTermsEnum : TermsEnum private int docFreq; private long totalTermFreq; private BytesRefFSTEnum.InputOutput current; - private BytesRef postingsSpare = new BytesRef(); + private readonly BytesRef postingsSpare = new BytesRef(); // LUCENENET: marked readonly public FSTTermsEnum(FieldInfo field, FST fst) { @@ -807,20 +807,10 @@ public override SeekStatus SeekCeil(BytesRef text) public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) { DecodeMetaData(); - FSTDocsEnum docsEnum; - if (reuse == null || !(reuse is FSTDocsEnum)) - { + if (reuse is null || !(reuse is FSTDocsEnum docsEnum) || !docsEnum.CanReuse(field.IndexOptions, field.HasPayloads)) docsEnum = new FSTDocsEnum(field.IndexOptions, field.HasPayloads); - } - else - { - docsEnum = (FSTDocsEnum)reuse; - if (!docsEnum.CanReuse(field.IndexOptions, field.HasPayloads)) - { - docsEnum = new FSTDocsEnum(field.IndexOptions, field.HasPayloads); - } - } + return docsEnum.Reset(this.postingsSpare, liveDocs, docFreq); } @@ -833,19 +823,9 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPos return null; } DecodeMetaData(); - FSTDocsAndPositionsEnum docsAndPositionsEnum; - if (reuse == null || !(reuse is FSTDocsAndPositionsEnum)) - { + if (reuse is null || !(reuse is FSTDocsAndPositionsEnum docsAndPositionsEnum) || !docsAndPositionsEnum.CanReuse(field.HasPayloads, hasOffsets)) docsAndPositionsEnum = new FSTDocsAndPositionsEnum(field.HasPayloads, hasOffsets); - } - else - { - docsAndPositionsEnum = (FSTDocsAndPositionsEnum)reuse; - if (!docsAndPositionsEnum.CanReuse(field.HasPayloads, hasOffsets)) - { - docsAndPositionsEnum = new FSTDocsAndPositionsEnum(field.HasPayloads, hasOffsets); - } - } + //System.out.println("D&P reset this=" + this); return docsAndPositionsEnum.Reset(postingsSpare, liveDocs, docFreq); } @@ -1014,8 +994,7 @@ public override IEnumerator GetEnumerator() public override Terms GetTerms(string field) { - TermsReader result; - _fields.TryGetValue(field, out result); + _fields.TryGetValue(field, out TermsReader result); return result; } diff --git a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsFormat.cs b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsFormat.cs index f91fb849a7..f2a22e0597 100644 --- a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsFormat.cs +++ b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsFormat.cs @@ -36,15 +36,15 @@ public abstract class PulsingPostingsFormat : PostingsFormat private readonly int _maxBlockSize; private readonly PostingsBaseFormat _wrappedPostingsBaseFormat; - public PulsingPostingsFormat(PostingsBaseFormat wrappedPostingsBaseFormat, int freqCutoff) + protected PulsingPostingsFormat(PostingsBaseFormat wrappedPostingsBaseFormat, int freqCutoff) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : this(wrappedPostingsBaseFormat, freqCutoff, BlockTreeTermsWriter.DEFAULT_MIN_BLOCK_SIZE, BlockTreeTermsWriter.DEFAULT_MAX_BLOCK_SIZE) { } /// Terms with freq less than or equal are inlined into terms dict. - public PulsingPostingsFormat(PostingsBaseFormat wrappedPostingsBaseFormat, int freqCutoff, - int minBlockSize, int maxBlockSize) + protected PulsingPostingsFormat(PostingsBaseFormat wrappedPostingsBaseFormat, int freqCutoff, + int minBlockSize, int maxBlockSize) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : base() { if (Debugging.AssertsEnabled) Debugging.Assert(minBlockSize > 1); diff --git a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs index cbbdf8adad..7c46920a9f 100644 --- a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs +++ b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs @@ -228,10 +228,8 @@ public override DocsEnum Docs(FieldInfo field, BlockTermState termState, IBits l var termState2 = (PulsingTermState) termState; if (termState2.PostingsSize != -1) { - PulsingDocsEnum postings; - if (reuse is PulsingDocsEnum) + if (reuse is PulsingDocsEnum postings) { - postings = (PulsingDocsEnum) reuse; if (!postings.CanReuse(field)) { postings = new PulsingDocsEnum(field); @@ -276,10 +274,8 @@ public override DocsAndPositionsEnum DocsAndPositions(FieldInfo field, BlockTerm if (termState2.PostingsSize != -1) { - PulsingDocsAndPositionsEnum postings; - if (reuse is PulsingDocsAndPositionsEnum) + if (reuse is PulsingDocsAndPositionsEnum postings) { - postings = (PulsingDocsAndPositionsEnum) reuse; if (!postings.CanReuse(field)) { postings = new PulsingDocsAndPositionsEnum(field); @@ -653,8 +649,7 @@ private DocsEnum GetOther(DocsEnum de) return null; var atts = de.Attributes; - DocsEnum result; - atts.AddAttribute().Enums.TryGetValue(this, out result); + atts.AddAttribute().Enums.TryGetValue(this, out DocsEnum result); return result; } diff --git a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsWriter.cs b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsWriter.cs index a5e83d4b07..1824dc1c8d 100644 --- a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsWriter.cs +++ b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsWriter.cs @@ -42,17 +42,17 @@ namespace Lucene.Net.Codecs.Pulsing /// public sealed class PulsingPostingsWriter : PostingsWriterBase { - internal static readonly string CODEC = "PulsedPostingsWriter"; - internal static readonly string SUMMARY_EXTENSION = "smy"; // recording field summary + internal const string CODEC = "PulsedPostingsWriter"; + internal const string SUMMARY_EXTENSION = "smy"; // recording field summary // To add a new version, increment from the last one, and // change VERSION_CURRENT to point to your new version: - internal static readonly int VERSION_START = 0; - internal static readonly int VERSION_META_ARRAY = 1; - internal static readonly int VERSION_CURRENT = VERSION_META_ARRAY; + internal const int VERSION_START = 0; + internal const int VERSION_META_ARRAY = 1; + internal const int VERSION_CURRENT = VERSION_META_ARRAY; private readonly SegmentWriteState _segmentState; - private IndexOutput _termsOut; + //private IndexOutput _termsOut; // LUCENENET: Never read private readonly List _fields; private IndexOptions _indexOptions; private bool _storePayloads; @@ -136,7 +136,7 @@ public PulsingPostingsWriter(SegmentWriteState state, int maxPositions, Postings public override void Init(IndexOutput termsOut) { - _termsOut = termsOut; + //_termsOut = termsOut; // LUCENENET: Never read CodecUtil.WriteHeader(termsOut, CODEC, VERSION_CURRENT); termsOut.WriteVInt32(_pending.Length); // encode maxPositions in header _wrappedPostingsWriter.Init(termsOut); @@ -434,35 +434,41 @@ public override void EncodeTerm(long[] empty, DataOutput output, FieldInfo field } } + private bool disposed = false; // LUCENENET specific protected override void Dispose(bool disposing) { - _wrappedPostingsWriter.Dispose(); - - if (_wrappedPostingsWriter is PulsingPostingsWriter || - VERSION_CURRENT < VERSION_META_ARRAY) + if (disposing && !disposed) { - return; - } + disposed = true; + _wrappedPostingsWriter.Dispose(); + _buffer.Dispose(); // LUCENENET specific - var summaryFileName = IndexFileNames.SegmentFileName(_segmentState.SegmentInfo.Name, - _segmentState.SegmentSuffix, SUMMARY_EXTENSION); - IndexOutput output = null; - try - { - output = - _segmentState.Directory.CreateOutput(summaryFileName, _segmentState.Context); - CodecUtil.WriteHeader(output, CODEC, VERSION_CURRENT); - output.WriteVInt32(_fields.Count); - foreach (var field in _fields) + if (_wrappedPostingsWriter is PulsingPostingsWriter || + VERSION_CURRENT < VERSION_META_ARRAY) { - output.WriteVInt32(field.FieldNumber); - output.WriteVInt32(field.Int64sSize); + return; + } + + var summaryFileName = IndexFileNames.SegmentFileName(_segmentState.SegmentInfo.Name, + _segmentState.SegmentSuffix, SUMMARY_EXTENSION); + IndexOutput output = null; + try + { + output = + _segmentState.Directory.CreateOutput(summaryFileName, _segmentState.Context); + CodecUtil.WriteHeader(output, CODEC, VERSION_CURRENT); + output.WriteVInt32(_fields.Count); + foreach (var field in _fields) + { + output.WriteVInt32(field.FieldNumber); + output.WriteVInt32(field.Int64sSize); + } + output.Dispose(); + } + finally + { + IOUtils.DisposeWhileHandlingException(output); } - output.Dispose(); - } - finally - { - IOUtils.DisposeWhileHandlingException(output); } } diff --git a/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs b/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs index b6565cff8c..c87d3c34ab 100644 --- a/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs +++ b/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs @@ -35,11 +35,13 @@ namespace Lucene.Net.Codecs.Sep /// public class SepPostingsReader : PostingsReaderBase { +#pragma warning disable CA2213 // Disposable fields should be disposed private readonly Int32IndexInput freqIn; private readonly Int32IndexInput docIn; private readonly Int32IndexInput posIn; private readonly IndexInput payloadIn; private readonly IndexInput skipIn; +#pragma warning restore CA2213 // Disposable fields should be disposed private int skipInterval; private int maxSkipLevels; @@ -240,22 +242,12 @@ public override DocsEnum Docs(FieldInfo fieldInfo, BlockTermState termState, IBi DocsFlags flags) { SepTermState termState_ = (SepTermState)termState; - SepDocsEnum docsEnum; - if (reuse == null || !(reuse is SepDocsEnum)) - { + + // If you are using ParellelReader, and pass in a + // reused DocsAndPositionsEnum, it could have come + // from another reader also using sep codec + if (reuse is null || !(reuse is SepDocsEnum docsEnum) || docsEnum.startDocIn != docIn) docsEnum = new SepDocsEnum(this); - } - else - { - docsEnum = (SepDocsEnum)reuse; - if (docsEnum.startDocIn != docIn) - { - // If you are using ParellelReader, and pass in a - // reused DocsAndPositionsEnum, it could have come - // from another reader also using sep codec - docsEnum = new SepDocsEnum(this); - } - } return docsEnum.Init(fieldInfo, termState_, liveDocs); } @@ -265,22 +257,12 @@ public override DocsAndPositionsEnum DocsAndPositions(FieldInfo fieldInfo, Block { if (Debugging.AssertsEnabled) Debugging.Assert(fieldInfo.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); SepTermState termState_ = (SepTermState)termState; - SepDocsAndPositionsEnum postingsEnum; - if (reuse == null || !(reuse is SepDocsAndPositionsEnum)) - { + + // If you are using ParellelReader, and pass in a + // reused DocsAndPositionsEnum, it could have come + // from another reader also using sep codec + if (reuse is null || !(reuse is SepDocsAndPositionsEnum postingsEnum) || postingsEnum.startDocIn != docIn) postingsEnum = new SepDocsAndPositionsEnum(this); - } - else - { - postingsEnum = (SepDocsAndPositionsEnum)reuse; - if (postingsEnum.startDocIn != docIn) - { - // If you are using ParellelReader, and pass in a - // reused DocsAndPositionsEnum, it could have come - // from another reader also using sep codec - postingsEnum = new SepDocsAndPositionsEnum(this); - } - } return postingsEnum.Init(fieldInfo, termState_, liveDocs); } diff --git a/src/Lucene.Net.Codecs/Sep/SepPostingsWriter.cs b/src/Lucene.Net.Codecs/Sep/SepPostingsWriter.cs index 2e64ffb8ce..59e18bda1c 100644 --- a/src/Lucene.Net.Codecs/Sep/SepPostingsWriter.cs +++ b/src/Lucene.Net.Codecs/Sep/SepPostingsWriter.cs @@ -32,30 +32,32 @@ namespace Lucene.Net.Codecs.Sep /// public sealed class SepPostingsWriter : PostingsWriterBase { - internal readonly static string CODEC = "SepPostingsWriter"; + internal const string CODEC = "SepPostingsWriter"; - internal readonly static string DOC_EXTENSION = "doc"; - internal readonly static string SKIP_EXTENSION = "skp"; - internal readonly static string FREQ_EXTENSION = "frq"; - internal readonly static string POS_EXTENSION = "pos"; - internal readonly static string PAYLOAD_EXTENSION = "pyl"; + internal const string DOC_EXTENSION = "doc"; + internal const string SKIP_EXTENSION = "skp"; + internal const string FREQ_EXTENSION = "frq"; + internal const string POS_EXTENSION = "pos"; + internal const string PAYLOAD_EXTENSION = "pyl"; // Increment version to change it: - internal readonly static int VERSION_START = 0; - internal readonly static int VERSION_CURRENT = VERSION_START; + internal const int VERSION_START = 0; + internal const int VERSION_CURRENT = VERSION_START; - private Int32IndexOutput freqOut; - private Int32IndexOutput.Index freqIndex; +#pragma warning disable CA2213 // Disposable fields should be disposed + private readonly Int32IndexOutput freqOut; // LUCENENET: marked readonly + private readonly Int32IndexOutput.Index freqIndex; // LUCENENET: marked readonly - private Int32IndexOutput posOut; - private Int32IndexOutput.Index posIndex; + private readonly Int32IndexOutput posOut; // LUCENENET: marked readonly + private readonly Int32IndexOutput.Index posIndex; // LUCENENET: marked readonly - private Int32IndexOutput docOut; - private Int32IndexOutput.Index docIndex; + private readonly Int32IndexOutput docOut; // LUCENENET: marked readonly + private readonly Int32IndexOutput.Index docIndex; // LUCENENET: marked readonly - private IndexOutput payloadOut; + private readonly IndexOutput payloadOut; // LUCENENET: marked readonly - private IndexOutput skipOut; + private readonly IndexOutput skipOut; // LUCENENET: marked readonly +#pragma warning restore CA2213 // Disposable fields should be disposed private readonly SepSkipListWriter skipListWriter; @@ -67,7 +69,7 @@ public sealed class SepPostingsWriter : PostingsWriterBase /// accelerable cases. More detailed experiments would be useful here. /// private readonly int skipInterval; - private static readonly int DEFAULT_SKIP_INTERVAL = 16; + private const int DEFAULT_SKIP_INTERVAL = 16; /// /// Expert: minimum docFreq to write any skip data at all. @@ -85,7 +87,7 @@ public sealed class SepPostingsWriter : PostingsWriterBase private bool storePayloads; private IndexOptions indexOptions; - private FieldInfo fieldInfo; + //private FieldInfo fieldInfo; // LUCENENET: Never read private int lastPayloadLength; private int lastPosition; @@ -195,7 +197,7 @@ public override void StartTerm() // our parent calls setField whenever the field changes public override int SetField(FieldInfo fieldInfo) { - this.fieldInfo = fieldInfo; + //this.fieldInfo = fieldInfo; // LUCENENET: Never read this.indexOptions = fieldInfo.IndexOptions; // LUCENENET specific - to avoid boxing, changed from CompareTo() to IndexOptionsComparer.Compare() if (IndexOptionsComparer.Default.Compare(indexOptions, IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0) diff --git a/src/Lucene.Net.Codecs/Sep/SepSkipListReader.cs b/src/Lucene.Net.Codecs/Sep/SepSkipListReader.cs index ab7af414ee..3f38673d2b 100644 --- a/src/Lucene.Net.Codecs/Sep/SepSkipListReader.cs +++ b/src/Lucene.Net.Codecs/Sep/SepSkipListReader.cs @@ -35,11 +35,11 @@ namespace Lucene.Net.Codecs.Sep internal class SepSkipListReader : MultiLevelSkipListReader { private bool currentFieldStoresPayloads; - private Int32IndexInput.Index[] freqIndex; - private Int32IndexInput.Index[] docIndex; - private Int32IndexInput.Index[] posIndex; - private long[] payloadPointer; - private int[] payloadLength; + private readonly Int32IndexInput.Index[] freqIndex; // LUCENENET: marked readonly + private readonly Int32IndexInput.Index[] docIndex; // LUCENENET: marked readonly + private readonly Int32IndexInput.Index[] posIndex; // LUCENENET: marked readonly + private readonly long[] payloadPointer; // LUCENENET: marked readonly + private readonly int[] payloadLength; // LUCENENET: marked readonly private readonly Int32IndexInput.Index lastFreqIndex; private readonly Int32IndexInput.Index lastDocIndex; diff --git a/src/Lucene.Net.Codecs/Sep/SepSkipListWriter.cs b/src/Lucene.Net.Codecs/Sep/SepSkipListWriter.cs index f65b92244e..76cb522246 100644 --- a/src/Lucene.Net.Codecs/Sep/SepSkipListWriter.cs +++ b/src/Lucene.Net.Codecs/Sep/SepSkipListWriter.cs @@ -35,15 +35,15 @@ namespace Lucene.Net.Codecs.Sep /// internal class SepSkipListWriter : MultiLevelSkipListWriter { - private int[] lastSkipDoc; - private int[] lastSkipPayloadLength; - private long[] lastSkipPayloadPointer; + private readonly int[] lastSkipDoc; // LUCENENET: marked readonly + private readonly int[] lastSkipPayloadLength; // LUCENENET: marked readonly + private readonly long[] lastSkipPayloadPointer; // LUCENENET: marked readonly - private Int32IndexOutput.Index[] docIndex; - private Int32IndexOutput.Index[] freqIndex; - private Int32IndexOutput.Index[] posIndex; + private readonly Int32IndexOutput.Index[] docIndex; // LUCENENET: marked readonly + private readonly Int32IndexOutput.Index[] freqIndex; // LUCENENET: marked readonly + private readonly Int32IndexOutput.Index[] posIndex; // LUCENENET: marked readonly - private Int32IndexOutput freqOutput; + private readonly Int32IndexOutput freqOutput; // LUCENENET: marked readonly // TODO: -- private again internal Int32IndexOutput posOutput; // TODO: -- private again @@ -90,7 +90,7 @@ internal SepSkipListWriter(int skipInterval, int numberOfSkipLevels, int docCoun } } - IndexOptions indexOptions; + private IndexOptions indexOptions; internal void SetIndexOptions(IndexOptions v) { diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs index aa70804d43..f9c1a64006 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs @@ -385,8 +385,6 @@ private class SortedDocValuesAnonymousInnerClassHelper : SortedDocValues private readonly OneField _field; private readonly IndexInput _input; private readonly BytesRef _scratch; - private readonly string _decoderFormat; - private readonly string _ordDecoderFormat; public SortedDocValuesAnonymousInnerClassHelper(SimpleTextDocValuesReader outerInstance, OneField field, IndexInput input, BytesRef scratch) @@ -395,8 +393,6 @@ public SortedDocValuesAnonymousInnerClassHelper(SimpleTextDocValuesReader outerI _field = field; _input = input; _scratch = scratch; - _decoderFormat = field.Pattern; - _ordDecoderFormat = field.OrdPattern; } public override int GetOrd(int docId) @@ -490,7 +486,6 @@ private class SortedSetDocValuesAnonymousInnerClassHelper : SortedSetDocValues private readonly OneField _field; private readonly IndexInput _input; private readonly BytesRef _scratch; - private readonly string _decoderFormat; public SortedSetDocValuesAnonymousInnerClassHelper(SimpleTextDocValuesReader outerInstance, OneField field, IndexInput input, BytesRef scratch) @@ -499,7 +494,6 @@ public SortedSetDocValuesAnonymousInnerClassHelper(SimpleTextDocValuesReader out _field = field; _input = input; _scratch = scratch; - _decoderFormat = field.Pattern; _currentOrds = Arrays.Empty(); _currentIndex = 0; } diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs index faedde2579..3d5cca875d 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs @@ -51,7 +51,9 @@ public class SimpleTextDocValuesWriter : DocValuesConsumer // LUCENENET NOTE: ch internal static readonly BytesRef NUMVALUES = new BytesRef(" numvalues "); internal static readonly BytesRef ORDPATTERN = new BytesRef(" ordpattern "); +#pragma warning disable CA2213 // Disposable fields should be disposed private IndexOutput data; +#pragma warning restore CA2213 // Disposable fields should be disposed private readonly BytesRef scratch = new BytesRef(); private readonly int numDocs; private readonly ISet _fieldsSeen = new JCG.HashSet(); // for asserting diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs index f78809ad32..949461120f 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsReader.cs @@ -193,17 +193,10 @@ public override void SeekExact(long ord) public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) { - SimpleTextDocsEnum docsEnum; - if (reuse != null && reuse is SimpleTextDocsEnum && ((SimpleTextDocsEnum) reuse).CanReuse(_outerInstance._input)) - { - docsEnum = (SimpleTextDocsEnum) reuse; - } - else - { + if (reuse is null || !(reuse is SimpleTextDocsEnum docsEnum) || !docsEnum.CanReuse(_outerInstance._input)) docsEnum = new SimpleTextDocsEnum(_outerInstance); - } - return docsEnum.Reset(_docsStart, liveDocs, _indexOptions == IndexOptions.DOCS_ONLY, - _docFreq); + + return docsEnum.Reset(_docsStart, liveDocs, _indexOptions == IndexOptions.DOCS_ONLY, _docFreq); } public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPositionsEnum reuse, DocsAndPositionsFlags flags) @@ -215,15 +208,9 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPos return null; } - SimpleTextDocsAndPositionsEnum docsAndPositionsEnum; - if (reuse != null && reuse is SimpleTextDocsAndPositionsEnum && ((SimpleTextDocsAndPositionsEnum) reuse).CanReuse(_outerInstance._input)) - { - docsAndPositionsEnum = (SimpleTextDocsAndPositionsEnum) reuse; - } - else - { + if (reuse is null || !(reuse is SimpleTextDocsAndPositionsEnum docsAndPositionsEnum) || !docsAndPositionsEnum.CanReuse(_outerInstance._input)) docsAndPositionsEnum = new SimpleTextDocsAndPositionsEnum(_outerInstance); - } + return docsAndPositionsEnum.Reset(_docsStart, liveDocs, _indexOptions, _docFreq); } @@ -692,11 +679,9 @@ public override Terms GetTerms(string field) { lock (this) { - SimpleTextTerms terms; - if (!_termsCache.TryGetValue(field, out terms) || terms == null) + if (!_termsCache.TryGetValue(field, out SimpleTextTerms terms) || terms == null) { - long? fp; - if (!_fields.TryGetValue(field, out fp) || !fp.HasValue) + if (!_fields.TryGetValue(field, out long? fp) || !fp.HasValue) { return null; } diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsWriter.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsWriter.cs index 1af8ea38c9..4ff5ff78a7 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsWriter.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextFieldsWriter.cs @@ -75,12 +75,10 @@ public override TermsConsumer AddField(FieldInfo field) private class SimpleTextTermsWriter : TermsConsumer { - private readonly SimpleTextFieldsWriter _outerInstance; private readonly SimpleTextPostingsWriter _postingsWriter; public SimpleTextTermsWriter(SimpleTextFieldsWriter outerInstance, FieldInfo field) { - _outerInstance = outerInstance; _postingsWriter = new SimpleTextPostingsWriter(outerInstance, field); } diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextStoredFieldsReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextStoredFieldsReader.cs index d7afaa0ec8..e9c38f5b51 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextStoredFieldsReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextStoredFieldsReader.cs @@ -51,7 +51,9 @@ namespace Lucene.Net.Codecs.SimpleText public class SimpleTextStoredFieldsReader : StoredFieldsReader { private long[] _offsets; // docid -> offset in .fld file +#pragma warning disable CA2213 // Disposable fields should be disposed private IndexInput _input; +#pragma warning restore CA2213 // Disposable fields should be disposed private readonly BytesRef _scratch = new BytesRef(); private readonly CharsRef _scratchUtf16 = new CharsRef(); private readonly FieldInfos _fieldInfos; diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextStoredFieldsWriter.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextStoredFieldsWriter.cs index 0ea977628c..449e961f6b 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextStoredFieldsWriter.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextStoredFieldsWriter.cs @@ -43,7 +43,9 @@ public class SimpleTextStoredFieldsWriter : StoredFieldsWriter private int _numDocsWritten; private readonly Directory _directory; private readonly string _segment; +#pragma warning disable CA2213 // Disposable fields should be disposed private IndexOutput _output; +#pragma warning restore CA2213 // Disposable fields should be disposed internal const string FIELDS_EXTENSION = "fld"; diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs index 099439b52e..25b58ca9d8 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsReader.cs @@ -55,7 +55,9 @@ namespace Lucene.Net.Codecs.SimpleText public class SimpleTextTermVectorsReader : TermVectorsReader { private long[] _offsets; // docid -> offset in .vec file +#pragma warning disable CA2213 // Disposable fields should be disposed private IndexInput _input; +#pragma warning restore CA2213 // Disposable fields should be disposed private readonly BytesRef _scratch = new BytesRef(); private readonly CharsRef _scratchUtf16 = new CharsRef(); @@ -226,7 +228,7 @@ public override Fields Get(int doc) } } } - return new SimpleTVFields(this, fields); + return new SimpleTVFields(fields); } public override object Clone() @@ -275,12 +277,10 @@ private string ReadString(int offset, BytesRef scratch) private class SimpleTVFields : Fields { - private readonly SimpleTextTermVectorsReader _outerInstance; private readonly IDictionary _fields; - internal SimpleTVFields(SimpleTextTermVectorsReader outerInstance, IDictionary fields) + internal SimpleTVFields(IDictionary fields) { - _outerInstance = outerInstance; _fields = fields; } @@ -291,8 +291,7 @@ public override IEnumerator GetEnumerator() public override Terms GetTerms(string field) { - SimpleTVTerms result; - _fields.TryGetValue(field, out result); + _fields.TryGetValue(field, out SimpleTVTerms result); return result; } @@ -542,7 +541,7 @@ public virtual void Reset(IBits liveDocs, int[] positions, int[] startOffsets, i public override BytesRef GetPayload() { - return _payloads == null ? null : _payloads[_nextPos - 1]; + return _payloads?[_nextPos - 1]; } public override int NextPosition() diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsWriter.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsWriter.cs index e178c3e790..5db7626af8 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsWriter.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextTermVectorsWriter.cs @@ -61,7 +61,9 @@ public class SimpleTextTermVectorsWriter : TermVectorsWriter private readonly Directory _directory; private readonly string _segment; +#pragma warning disable CA2213 // Disposable fields should be disposed private IndexOutput _output; +#pragma warning restore CA2213 // Disposable fields should be disposed private int _numDocsWritten; private readonly BytesRef _scratch = new BytesRef(); private bool _offsets; diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextUtil.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextUtil.cs index e98a5a6334..71e8da00a1 100644 --- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextUtil.cs +++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextUtil.cs @@ -31,8 +31,8 @@ namespace Lucene.Net.Codecs.SimpleText internal class SimpleTextUtil { - public static readonly byte NEWLINE = 10; - public static readonly byte ESCAPE = 92; + public const byte NEWLINE = 10; + public const byte ESCAPE = 92; internal static readonly BytesRef CHECKSUM = new BytesRef("checksum "); public static void Write(DataOutput output, string s, BytesRef scratch) diff --git a/src/Lucene.Net.Demo/Facet/AssociationsFacetsExample.cs b/src/Lucene.Net.Demo/Facet/AssociationsFacetsExample.cs index 1641bf17f1..168baf1062 100644 --- a/src/Lucene.Net.Demo/Facet/AssociationsFacetsExample.cs +++ b/src/Lucene.Net.Demo/Facet/AssociationsFacetsExample.cs @@ -67,87 +67,78 @@ private void Index() { IndexWriterConfig iwc = new IndexWriterConfig(EXAMPLE_VERSION, new WhitespaceAnalyzer(EXAMPLE_VERSION)); - using (IndexWriter indexWriter = new IndexWriter(indexDir, iwc)) + using IndexWriter indexWriter = new IndexWriter(indexDir, iwc); // Writes facet ords to a separate directory from the main index - using (DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir)) - { - - Document doc = new Document(); - // 3 occurrences for tag 'lucene' - - doc.AddInt32AssociationFacetField(3, "tags", "lucene"); - // 87% confidence level of genre 'computing' - doc.AddSingleAssociationFacetField(0.87f, "genre", "computing"); - indexWriter.AddDocument(config.Build(taxoWriter, doc)); - - doc = new Document(); - // 1 occurrence for tag 'lucene' - doc.AddInt32AssociationFacetField(1, "tags", "lucene"); - // 2 occurrence for tag 'solr' - doc.AddInt32AssociationFacetField(2, "tags", "solr"); - // 75% confidence level of genre 'computing' - doc.AddSingleAssociationFacetField(0.75f, "genre", "computing"); - // 34% confidence level of genre 'software' - doc.AddSingleAssociationFacetField(0.34f, "genre", "software"); - indexWriter.AddDocument(config.Build(taxoWriter, doc)); - - } // Disposes indexWriter and taxoWriter + using DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir); + Document doc = new Document(); + // 3 occurrences for tag 'lucene' + + doc.AddInt32AssociationFacetField(3, "tags", "lucene"); + // 87% confidence level of genre 'computing' + doc.AddSingleAssociationFacetField(0.87f, "genre", "computing"); + indexWriter.AddDocument(config.Build(taxoWriter, doc)); + + doc = new Document(); + // 1 occurrence for tag 'lucene' + doc.AddInt32AssociationFacetField(1, "tags", "lucene"); + // 2 occurrence for tag 'solr' + doc.AddInt32AssociationFacetField(2, "tags", "solr"); + // 75% confidence level of genre 'computing' + doc.AddSingleAssociationFacetField(0.75f, "genre", "computing"); + // 34% confidence level of genre 'software' + doc.AddSingleAssociationFacetField(0.34f, "genre", "software"); + indexWriter.AddDocument(config.Build(taxoWriter, doc)); } /// User runs a query and aggregates facets by summing their association values. private IList SumAssociations() { - using (DirectoryReader indexReader = DirectoryReader.Open(indexDir)) - using (TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir)) - { - IndexSearcher searcher = new IndexSearcher(indexReader); - - FacetsCollector fc = new FacetsCollector(); - - // MatchAllDocsQuery is for "browsing" (counts facets - // for all non-deleted docs in the index); normally - // you'd use a "normal" query: - FacetsCollector.Search(searcher, new MatchAllDocsQuery(), 10, fc); + using DirectoryReader indexReader = DirectoryReader.Open(indexDir); + using TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir); + IndexSearcher searcher = new IndexSearcher(indexReader); - Facets tags = new TaxonomyFacetSumInt32Associations("$tags", taxoReader, config, fc); - Facets genre = new TaxonomyFacetSumSingleAssociations("$genre", taxoReader, config, fc); + FacetsCollector fc = new FacetsCollector(); - // Retrieve results - IList results = new List(); + // MatchAllDocsQuery is for "browsing" (counts facets + // for all non-deleted docs in the index); normally + // you'd use a "normal" query: + FacetsCollector.Search(searcher, new MatchAllDocsQuery(), 10, fc); - results.Add(tags.GetTopChildren(10, "tags")); - results.Add(genre.GetTopChildren(10, "genre")); + Facets tags = new TaxonomyFacetSumInt32Associations("$tags", taxoReader, config, fc); + Facets genre = new TaxonomyFacetSumSingleAssociations("$genre", taxoReader, config, fc); - return results; + // Retrieve results + IList results = new List + { + tags.GetTopChildren(10, "tags"), + genre.GetTopChildren(10, "genre") + }; - } // Disposes indexReader and taxoReader + return results; } /// User drills down on 'tags/solr'. private FacetResult DrillDown() { - using (DirectoryReader indexReader = DirectoryReader.Open(indexDir)) - using (TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir)) - { - IndexSearcher searcher = new IndexSearcher(indexReader); + using DirectoryReader indexReader = DirectoryReader.Open(indexDir); + using TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir); + IndexSearcher searcher = new IndexSearcher(indexReader); - // Passing no baseQuery means we drill down on all - // documents ("browse only"): - DrillDownQuery q = new DrillDownQuery(config); + // Passing no baseQuery means we drill down on all + // documents ("browse only"): + DrillDownQuery q = new DrillDownQuery(config); - // Now user drills down on Publish Date/2010: - q.Add("tags", "solr"); - FacetsCollector fc = new FacetsCollector(); - FacetsCollector.Search(searcher, q, 10, fc); + // Now user drills down on Publish Date/2010: + q.Add("tags", "solr"); + FacetsCollector fc = new FacetsCollector(); + FacetsCollector.Search(searcher, q, 10, fc); - // Retrieve results - Facets facets = new TaxonomyFacetSumSingleAssociations("$genre", taxoReader, config, fc); - FacetResult result = facets.GetTopChildren(10, "genre"); + // Retrieve results + Facets facets = new TaxonomyFacetSumSingleAssociations("$genre", taxoReader, config, fc); + FacetResult result = facets.GetTopChildren(10, "genre"); - return result; - - } // Disposes indexReader and taxoReader + return result; } /// Runs summing association example. @@ -164,7 +155,9 @@ public FacetResult RunDrillDown() return DrillDown(); } + /// Runs the sum int/float associations examples and prints the results. + [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0060:Remove unused parameter", Justification = "Demo shows use of optional args argument")] public static void Main(string[] args) { Console.WriteLine("Sum associations example:"); diff --git a/src/Lucene.Net.Demo/Facet/DistanceFacetsExample.cs b/src/Lucene.Net.Demo/Facet/DistanceFacetsExample.cs index d28c6cad61..d20f4ad18a 100644 --- a/src/Lucene.Net.Demo/Facet/DistanceFacetsExample.cs +++ b/src/Lucene.Net.Demo/Facet/DistanceFacetsExample.cs @@ -45,7 +45,7 @@ namespace Lucene.Net.Demo.Facet /// Shows simple usage of dynamic range faceting, using the /// expressions module to calculate distance. /// - public class DistanceFacetsExample : IDisposable + public sealed class DistanceFacetsExample : IDisposable { /// /// Using a constant for all functionality related to a specific index @@ -83,34 +83,34 @@ public class DistanceFacetsExample : IDisposable /// Build the example index. public void Index() { - using (IndexWriter writer = new IndexWriter(indexDir, new IndexWriterConfig(EXAMPLE_VERSION, - new WhitespaceAnalyzer(EXAMPLE_VERSION)))) - { - // TODO: we could index in radians instead ... saves all the conversions in GetBoundingBoxFilter - - // Add documents with latitude/longitude location: - Document doc = new Document(); - doc.Add(new DoubleField("latitude", 40.759011, Field.Store.NO)); - doc.Add(new DoubleField("longitude", -73.9844722, Field.Store.NO)); - writer.AddDocument(doc); + using IndexWriter writer = new IndexWriter(indexDir, new IndexWriterConfig(EXAMPLE_VERSION, + new WhitespaceAnalyzer(EXAMPLE_VERSION))); + // TODO: we could index in radians instead ... saves all the conversions in GetBoundingBoxFilter - doc = new Document(); - doc.Add(new DoubleField("latitude", 40.718266, Field.Store.NO)); - doc.Add(new DoubleField("longitude", -74.007819, Field.Store.NO)); - writer.AddDocument(doc); + // Add documents with latitude/longitude location: + writer.AddDocument(new Document + { + new DoubleField("latitude", 40.759011, Field.Store.NO), + new DoubleField("longitude", -73.9844722, Field.Store.NO) + }); - doc = new Document(); - doc.Add(new DoubleField("latitude", 40.7051157, Field.Store.NO)); - doc.Add(new DoubleField("longitude", -74.0088305, Field.Store.NO)); - writer.AddDocument(doc); + writer.AddDocument(new Document + { + new DoubleField("latitude", 40.718266, Field.Store.NO), + new DoubleField("longitude", -74.007819, Field.Store.NO) + }); - // Open near-real-time searcher - searcher = new IndexSearcher(DirectoryReader.Open(writer, true)); + writer.AddDocument(new Document + { + new DoubleField("latitude", 40.7051157, Field.Store.NO), + new DoubleField("longitude", -74.0088305, Field.Store.NO) + }); - } // Disposes writer + // Open near-real-time searcher + searcher = new IndexSearcher(DirectoryReader.Open(writer, true)); } - private ValueSource GetDistanceValueSource() + private static ValueSource GetDistanceValueSource() { Expression distance = JavascriptCompiler.Compile( string.Format(CultureInfo.InvariantCulture, "haversin({0:R},{1:R},latitude,longitude)", ORIGIN_LATITUDE, ORIGIN_LONGITUDE)); @@ -173,22 +173,31 @@ public static Filter GetBoundingBoxFilter(double originLat, double originLng, do maxLng = 180.ToRadians(); } - BooleanFilter f = new BooleanFilter(); - - // Add latitude range filter: - f.Add(NumericRangeFilter.NewDoubleRange("latitude", minLat.ToDegrees(), maxLat.ToDegrees(), true, true), - Occur.MUST); + BooleanFilter f = new BooleanFilter + { + // Add latitude range filter: + { + NumericRangeFilter.NewDoubleRange("latitude", minLat.ToDegrees(), maxLat.ToDegrees(), true, true), + Occur.MUST + } + }; // Add longitude range filter: if (minLng > maxLng) { // The bounding box crosses the international date // line: - BooleanFilter lonF = new BooleanFilter(); - lonF.Add(NumericRangeFilter.NewDoubleRange("longitude", minLng.ToDegrees(), null, true, true), - Occur.SHOULD); - lonF.Add(NumericRangeFilter.NewDoubleRange("longitude", null, maxLng.ToDegrees(), true, true), - Occur.SHOULD); + BooleanFilter lonF = new BooleanFilter + { + { + NumericRangeFilter.NewDoubleRange("longitude", minLng.ToDegrees(), null, true, true), + Occur.SHOULD + }, + { + NumericRangeFilter.NewDoubleRange("longitude", null, maxLng.ToDegrees(), true, true), + Occur.SHOULD + } + }; f.Add(lonF, Occur.MUST); } else @@ -255,21 +264,18 @@ public void Dispose() /// Runs the search and drill-down examples and prints the results. public static void Main(string[] args) { - using (DistanceFacetsExample example = new DistanceFacetsExample()) - { - example.Index(); - - Console.WriteLine("Distance facet counting example:"); - Console.WriteLine("-----------------------"); - Console.WriteLine(example.Search()); - - Console.WriteLine("\n"); - Console.WriteLine("Distance facet drill-down example (field/< 2 km):"); - Console.WriteLine("---------------------------------------------"); - TopDocs hits = example.DrillDown(TWO_KM); - Console.WriteLine(hits.TotalHits + " totalHits"); - - } // Disposes example + using DistanceFacetsExample example = new DistanceFacetsExample(); + example.Index(); + + Console.WriteLine("Distance facet counting example:"); + Console.WriteLine("-----------------------"); + Console.WriteLine(example.Search()); + + Console.WriteLine("\n"); + Console.WriteLine("Distance facet drill-down example (field/< 2 km):"); + Console.WriteLine("---------------------------------------------"); + TopDocs hits = example.DrillDown(TWO_KM); + Console.WriteLine(hits.TotalHits + " totalHits"); } } } diff --git a/src/Lucene.Net.Demo/Facet/ExpressionAggregationFacetsExample.cs b/src/Lucene.Net.Demo/Facet/ExpressionAggregationFacetsExample.cs index c080b5a7bb..4fde35c884 100644 --- a/src/Lucene.Net.Demo/Facet/ExpressionAggregationFacetsExample.cs +++ b/src/Lucene.Net.Demo/Facet/ExpressionAggregationFacetsExample.cs @@ -57,57 +57,53 @@ public class ExpressionAggregationFacetsExample /// Build the example index. private void Index() { - using (IndexWriter indexWriter = new IndexWriter(indexDir, new IndexWriterConfig(EXAMPLE_VERSION, - new WhitespaceAnalyzer(EXAMPLE_VERSION)))) + using IndexWriter indexWriter = new IndexWriter(indexDir, new IndexWriterConfig(EXAMPLE_VERSION, + new WhitespaceAnalyzer(EXAMPLE_VERSION))); // Writes facet ords to a separate directory from the main index - using (DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir)) - { - - Document doc = new Document(); - doc.Add(new TextField("c", "foo bar", Field.Store.NO)); - doc.Add(new NumericDocValuesField("popularity", 5L)); - doc.Add(new FacetField("A", "B")); - indexWriter.AddDocument(config.Build(taxoWriter, doc)); - - doc = new Document(); - doc.Add(new TextField("c", "foo foo bar", Field.Store.NO)); - doc.Add(new NumericDocValuesField("popularity", 3L)); - doc.Add(new FacetField("A", "C")); - indexWriter.AddDocument(config.Build(taxoWriter, doc)); - - } // Disposes indexWriter and taxoWriter + using DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir); + + indexWriter.AddDocument(config.Build(taxoWriter, new Document + { + new TextField("c", "foo bar", Field.Store.NO), + new NumericDocValuesField("popularity", 5L), + new FacetField("A", "B") + })); + + indexWriter.AddDocument(config.Build(taxoWriter, new Document + { + new TextField("c", "foo foo bar", Field.Store.NO), + new NumericDocValuesField("popularity", 3L), + new FacetField("A", "C") + })); } /// User runs a query and aggregates facets. private FacetResult Search() { - using (DirectoryReader indexReader = DirectoryReader.Open(indexDir)) - using (TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir)) - { - IndexSearcher searcher = new IndexSearcher(indexReader); - - // Aggregate categories by an expression that combines the document's score - // and its popularity field - Expression expr = JavascriptCompiler.Compile("_score * sqrt(popularity)"); - SimpleBindings bindings = new SimpleBindings(); - bindings.Add(new SortField("_score", SortFieldType.SCORE)); // the score of the document - bindings.Add(new SortField("popularity", SortFieldType.INT64)); // the value of the 'popularity' field - - // Aggregates the facet values - FacetsCollector fc = new FacetsCollector(true); - - // MatchAllDocsQuery is for "browsing" (counts facets - // for all non-deleted docs in the index); normally - // you'd use a "normal" query: - FacetsCollector.Search(searcher, new MatchAllDocsQuery(), 10, fc); - - // Retrieve results - Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, fc, expr.GetValueSource(bindings)); - FacetResult result = facets.GetTopChildren(10, "A"); - - return result; - - } // Disposes indexReader and taxoReader + using DirectoryReader indexReader = DirectoryReader.Open(indexDir); + using TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir); + IndexSearcher searcher = new IndexSearcher(indexReader); + + // Aggregate categories by an expression that combines the document's score + // and its popularity field + Expression expr = JavascriptCompiler.Compile("_score * sqrt(popularity)"); + SimpleBindings bindings = new SimpleBindings(); + bindings.Add(new SortField("_score", SortFieldType.SCORE)); // the score of the document + bindings.Add(new SortField("popularity", SortFieldType.INT64)); // the value of the 'popularity' field + + // Aggregates the facet values + FacetsCollector fc = new FacetsCollector(true); + + // MatchAllDocsQuery is for "browsing" (counts facets + // for all non-deleted docs in the index); normally + // you'd use a "normal" query: + FacetsCollector.Search(searcher, new MatchAllDocsQuery(), 10, fc); + + // Retrieve results + Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, fc, expr.GetValueSource(bindings)); + FacetResult result = facets.GetTopChildren(10, "A"); + + return result; } /// Runs the search example. diff --git a/src/Lucene.Net.Demo/Facet/MultiCategoryListsFacetsExample.cs b/src/Lucene.Net.Demo/Facet/MultiCategoryListsFacetsExample.cs index a99fb4d5bf..656d5570d0 100644 --- a/src/Lucene.Net.Demo/Facet/MultiCategoryListsFacetsExample.cs +++ b/src/Lucene.Net.Demo/Facet/MultiCategoryListsFacetsExample.cs @@ -63,38 +63,40 @@ public MultiCategoryListsFacetsExample() /// Build the example index. private void Index() { - using (IndexWriter indexWriter = new IndexWriter(indexDir, new IndexWriterConfig(EXAMPLE_VERSION, - new WhitespaceAnalyzer(EXAMPLE_VERSION)))) + using IndexWriter indexWriter = new IndexWriter(indexDir, new IndexWriterConfig(EXAMPLE_VERSION, + new WhitespaceAnalyzer(EXAMPLE_VERSION))); // Writes facet ords to a separate directory from the main index - using (DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir)) + using DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir); + + indexWriter.AddDocument(config.Build(taxoWriter, new Document + { + new FacetField("Author", "Bob"), + new FacetField("Publish Date", "2010", "10", "15") + })); + + indexWriter.AddDocument(config.Build(taxoWriter, new Document + { + new FacetField("Author", "Lisa"), + new FacetField("Publish Date", "2010", "10", "20") + })); + + indexWriter.AddDocument(config.Build(taxoWriter, new Document { - Document doc = new Document(); - - doc.Add(new FacetField("Author", "Bob")); - doc.Add(new FacetField("Publish Date", "2010", "10", "15")); - indexWriter.AddDocument(config.Build(taxoWriter, doc)); - - doc = new Document(); - doc.Add(new FacetField("Author", "Lisa")); - doc.Add(new FacetField("Publish Date", "2010", "10", "20")); - indexWriter.AddDocument(config.Build(taxoWriter, doc)); - - doc = new Document(); - doc.Add(new FacetField("Author", "Lisa")); - doc.Add(new FacetField("Publish Date", "2012", "1", "1")); - indexWriter.AddDocument(config.Build(taxoWriter, doc)); - - doc = new Document(); - doc.Add(new FacetField("Author", "Susan")); - doc.Add(new FacetField("Publish Date", "2012", "1", "7")); - indexWriter.AddDocument(config.Build(taxoWriter, doc)); - - doc = new Document(); - doc.Add(new FacetField("Author", "Frank")); - doc.Add(new FacetField("Publish Date", "1999", "5", "5")); - indexWriter.AddDocument(config.Build(taxoWriter, doc)); - - } // Disposes indexWriter and taxoWriter + new FacetField("Author", "Lisa"), + new FacetField("Publish Date", "2012", "1", "1") + })); + + indexWriter.AddDocument(config.Build(taxoWriter, new Document + { + new FacetField("Author", "Susan"), + new FacetField("Publish Date", "2012", "1", "7") + })); + + indexWriter.AddDocument(config.Build(taxoWriter, new Document + { + new FacetField("Author", "Frank"), + new FacetField("Publish Date", "1999", "5", "5") + })); } /// User runs a query and counts facets. @@ -102,27 +104,25 @@ private IList Search() { IList results = new List(); - using (DirectoryReader indexReader = DirectoryReader.Open(indexDir)) - using (TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir)) - { - IndexSearcher searcher = new IndexSearcher(indexReader); - FacetsCollector fc = new FacetsCollector(); + using DirectoryReader indexReader = DirectoryReader.Open(indexDir); + using TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir); - // MatchAllDocsQuery is for "browsing" (counts facets - // for all non-deleted docs in the index); normally - // you'd use a "normal" query: - FacetsCollector.Search(searcher, new MatchAllDocsQuery(), 10, fc); + IndexSearcher searcher = new IndexSearcher(indexReader); + FacetsCollector fc = new FacetsCollector(); - // Retrieve results + // MatchAllDocsQuery is for "browsing" (counts facets + // for all non-deleted docs in the index); normally + // you'd use a "normal" query: + FacetsCollector.Search(searcher, new MatchAllDocsQuery(), 10, fc); - // Count both "Publish Date" and "Author" dimensions - Facets author = new FastTaxonomyFacetCounts("author", taxoReader, config, fc); - results.Add(author.GetTopChildren(10, "Author")); + // Retrieve results - Facets pubDate = new FastTaxonomyFacetCounts("pubdate", taxoReader, config, fc); - results.Add(pubDate.GetTopChildren(10, "Publish Date")); + // Count both "Publish Date" and "Author" dimensions + Facets author = new FastTaxonomyFacetCounts("author", taxoReader, config, fc); + results.Add(author.GetTopChildren(10, "Author")); - } // Disposes indexReader and taxoReader + Facets pubDate = new FastTaxonomyFacetCounts("pubdate", taxoReader, config, fc); + results.Add(pubDate.GetTopChildren(10, "Publish Date")); return results; } diff --git a/src/Lucene.Net.Demo/Facet/RangeFacetsExample.cs b/src/Lucene.Net.Demo/Facet/RangeFacetsExample.cs index 180c39b95a..264925a17e 100644 --- a/src/Lucene.Net.Demo/Facet/RangeFacetsExample.cs +++ b/src/Lucene.Net.Demo/Facet/RangeFacetsExample.cs @@ -35,7 +35,7 @@ namespace Lucene.Net.Demo.Facet /// /// Shows simple usage of dynamic range faceting. /// - public class RangeFacetsExample : IDisposable + public sealed class RangeFacetsExample : IDisposable { /// /// Using a constant for all functionality related to a specific index @@ -65,29 +65,26 @@ public RangeFacetsExample() /// Build the example index. public void Index() { - using (IndexWriter indexWriter = new IndexWriter(indexDir, new IndexWriterConfig(EXAMPLE_VERSION, - new WhitespaceAnalyzer(EXAMPLE_VERSION)))) + using IndexWriter indexWriter = new IndexWriter(indexDir, new IndexWriterConfig(EXAMPLE_VERSION, + new WhitespaceAnalyzer(EXAMPLE_VERSION))); + // Add documents with a fake timestamp, 1000 sec before + // "now", 2000 sec before "now", ...: + for (int i = 0; i < 100; i++) { - // Add documents with a fake timestamp, 1000 sec before - // "now", 2000 sec before "now", ...: - for (int i = 0; i < 100; i++) - { - Document doc = new Document(); - long then = nowSec - i * 1000; - // Add as doc values field, so we can compute range facets: - doc.Add(new NumericDocValuesField("timestamp", then)); - // Add as numeric field so we can drill-down: - doc.Add(new Int64Field("timestamp", then, Field.Store.NO)); - indexWriter.AddDocument(doc); - } - - // Open near-real-time searcher - searcher = new IndexSearcher(DirectoryReader.Open(indexWriter, true)); - - } // Disposes indexWriter + Document doc = new Document(); + long then = nowSec - i * 1000; + // Add as doc values field, so we can compute range facets: + doc.Add(new NumericDocValuesField("timestamp", then)); + // Add as numeric field so we can drill-down: + doc.Add(new Int64Field("timestamp", then, Field.Store.NO)); + indexWriter.AddDocument(doc); + } + + // Open near-real-time searcher + searcher = new IndexSearcher(DirectoryReader.Open(indexWriter, true)); } - private FacetsConfig GetConfig() + private static FacetsConfig GetConfig() { return new FacetsConfig(); } @@ -131,21 +128,18 @@ public void Dispose() /// Runs the search and drill-down examples and prints the results. public static void Main(string[] args) { - using (RangeFacetsExample example = new RangeFacetsExample()) - { - example.Index(); - - Console.WriteLine("Facet counting example:"); - Console.WriteLine("-----------------------"); - Console.WriteLine(example.Search()); - - Console.WriteLine("\n"); - Console.WriteLine("Facet drill-down example (timestamp/Past six hours):"); - Console.WriteLine("---------------------------------------------"); - TopDocs hits = example.DrillDown(example.PAST_SIX_HOURS); - Console.WriteLine(hits.TotalHits + " TotalHits"); - - } // Disposes example + using RangeFacetsExample example = new RangeFacetsExample(); + example.Index(); + + Console.WriteLine("Facet counting example:"); + Console.WriteLine("-----------------------"); + Console.WriteLine(example.Search()); + + Console.WriteLine("\n"); + Console.WriteLine("Facet drill-down example (timestamp/Past six hours):"); + Console.WriteLine("---------------------------------------------"); + TopDocs hits = example.DrillDown(example.PAST_SIX_HOURS); + Console.WriteLine(hits.TotalHits + " TotalHits"); } } } diff --git a/src/Lucene.Net.Demo/Facet/SimpleFacetsExample.cs b/src/Lucene.Net.Demo/Facet/SimpleFacetsExample.cs index 90eb472438..9f591e3e36 100644 --- a/src/Lucene.Net.Demo/Facet/SimpleFacetsExample.cs +++ b/src/Lucene.Net.Demo/Facet/SimpleFacetsExample.cs @@ -61,96 +61,95 @@ public SimpleFacetsExample() /// Build the example index. private void Index() { - using (IndexWriter indexWriter = new IndexWriter(indexDir, new IndexWriterConfig(EXAMPLE_VERSION, - new WhitespaceAnalyzer(EXAMPLE_VERSION)))) + using IndexWriter indexWriter = new IndexWriter(indexDir, new IndexWriterConfig(EXAMPLE_VERSION, + new WhitespaceAnalyzer(EXAMPLE_VERSION))); // Writes facet ords to a separate directory from the main index - using (DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir)) - { - - Document doc = new Document(); - doc.Add(new FacetField("Author", "Bob")); - doc.Add(new FacetField("Publish Date", "2010", "10", "15")); - indexWriter.AddDocument(config.Build(taxoWriter, doc)); + using DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir); - doc = new Document(); - doc.Add(new FacetField("Author", "Lisa")); - doc.Add(new FacetField("Publish Date", "2010", "10", "20")); - indexWriter.AddDocument(config.Build(taxoWriter, doc)); + indexWriter.AddDocument(config.Build(taxoWriter, new Document + { + new FacetField("Author", "Bob"), + new FacetField("Publish Date", "2010", "10", "15") + })); - doc = new Document(); - doc.Add(new FacetField("Author", "Lisa")); - doc.Add(new FacetField("Publish Date", "2012", "1", "1")); - indexWriter.AddDocument(config.Build(taxoWriter, doc)); + indexWriter.AddDocument(config.Build(taxoWriter, new Document + { + new FacetField("Author", "Lisa"), + new FacetField("Publish Date", "2010", "10", "20") + })); - doc = new Document(); - doc.Add(new FacetField("Author", "Susan")); - doc.Add(new FacetField("Publish Date", "2012", "1", "7")); - indexWriter.AddDocument(config.Build(taxoWriter, doc)); + indexWriter.AddDocument(config.Build(taxoWriter, new Document + { + new FacetField("Author", "Lisa"), + new FacetField("Publish Date", "2012", "1", "1") + })); - doc = new Document(); - doc.Add(new FacetField("Author", "Frank")); - doc.Add(new FacetField("Publish Date", "1999", "5", "5")); - indexWriter.AddDocument(config.Build(taxoWriter, doc)); + indexWriter.AddDocument(config.Build(taxoWriter, new Document + { + new FacetField("Author", "Susan"), + new FacetField("Publish Date", "2012", "1", "7") + })); - } // Disposes indexWriter and taxoWriter + indexWriter.AddDocument(config.Build(taxoWriter, new Document + { + new FacetField("Author", "Frank"), + new FacetField("Publish Date", "1999", "5", "5") + })); } /// User runs a query and counts facets. private IList FacetsWithSearch() { - using (DirectoryReader indexReader = DirectoryReader.Open(indexDir)) - using (TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir)) - { - IndexSearcher searcher = new IndexSearcher(indexReader); + using DirectoryReader indexReader = DirectoryReader.Open(indexDir); + using TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir); + IndexSearcher searcher = new IndexSearcher(indexReader); - FacetsCollector fc = new FacetsCollector(); + FacetsCollector fc = new FacetsCollector(); - // MatchAllDocsQuery is for "browsing" (counts facets - // for all non-deleted docs in the index); normally - // you'd use a "normal" query: - FacetsCollector.Search(searcher, new MatchAllDocsQuery(), 10, fc); + // MatchAllDocsQuery is for "browsing" (counts facets + // for all non-deleted docs in the index); normally + // you'd use a "normal" query: + FacetsCollector.Search(searcher, new MatchAllDocsQuery(), 10, fc); - // Retrieve results - IList results = new List(); + Facets facets = new FastTaxonomyFacetCounts(taxoReader, config, fc); + // Retrieve results + IList results = new List + { // Count both "Publish Date" and "Author" dimensions - Facets facets = new FastTaxonomyFacetCounts(taxoReader, config, fc); - results.Add(facets.GetTopChildren(10, "Author")); - results.Add(facets.GetTopChildren(10, "Publish Date")); + facets.GetTopChildren(10, "Author"), + facets.GetTopChildren(10, "Publish Date") + }; - return results; - - } // Disposes indexReader and taxoReader + return results; } /// User runs a query and counts facets only without collecting the matching documents. private IList FacetsOnly() { - using (DirectoryReader indexReader = DirectoryReader.Open(indexDir)) - using (TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir)) - { - IndexSearcher searcher = new IndexSearcher(indexReader); + using DirectoryReader indexReader = DirectoryReader.Open(indexDir); + using TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir); + IndexSearcher searcher = new IndexSearcher(indexReader); - FacetsCollector fc = new FacetsCollector(); + FacetsCollector fc = new FacetsCollector(); - // MatchAllDocsQuery is for "browsing" (counts facets - // for all non-deleted docs in the index); normally - // you'd use a "normal" query: - searcher.Search(new MatchAllDocsQuery(), null /*Filter */, fc); + // MatchAllDocsQuery is for "browsing" (counts facets + // for all non-deleted docs in the index); normally + // you'd use a "normal" query: + searcher.Search(new MatchAllDocsQuery(), null /*Filter */, fc); - // Retrieve results - IList results = new List(); + Facets facets = new FastTaxonomyFacetCounts(taxoReader, config, fc); + // Retrieve results + IList results = new List + { // Count both "Publish Date" and "Author" dimensions - Facets facets = new FastTaxonomyFacetCounts(taxoReader, config, fc); - - results.Add(facets.GetTopChildren(10, "Author")); - results.Add(facets.GetTopChildren(10, "Publish Date")); - - return results; + facets.GetTopChildren(10, "Author"), + facets.GetTopChildren(10, "Publish Date") + }; - } // Disposes indexReader and taxoReader + return results; } /// @@ -159,27 +158,24 @@ private IList FacetsOnly() /// private FacetResult DrillDown() { - using (DirectoryReader indexReader = DirectoryReader.Open(indexDir)) - using (TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir)) - { - IndexSearcher searcher = new IndexSearcher(indexReader); - - // Passing no baseQuery means we drill down on all - // documents ("browse only"): - DrillDownQuery q = new DrillDownQuery(config); + using DirectoryReader indexReader = DirectoryReader.Open(indexDir); + using TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir); + IndexSearcher searcher = new IndexSearcher(indexReader); - // Now user drills down on Publish Date/2010: - q.Add("Publish Date", "2010"); - FacetsCollector fc = new FacetsCollector(); - FacetsCollector.Search(searcher, q, 10, fc); + // Passing no baseQuery means we drill down on all + // documents ("browse only"): + DrillDownQuery q = new DrillDownQuery(config); - // Retrieve results - Facets facets = new FastTaxonomyFacetCounts(taxoReader, config, fc); - FacetResult result = facets.GetTopChildren(10, "Author"); + // Now user drills down on Publish Date/2010: + q.Add("Publish Date", "2010"); + FacetsCollector fc = new FacetsCollector(); + FacetsCollector.Search(searcher, q, 10, fc); - return result; + // Retrieve results + Facets facets = new FastTaxonomyFacetCounts(taxoReader, config, fc); + FacetResult result = facets.GetTopChildren(10, "Author"); - } // Disposes indexReader and taxoReader + return result; } /// @@ -189,27 +185,24 @@ private FacetResult DrillDown() /// private IList DrillSideways() { - using (DirectoryReader indexReader = DirectoryReader.Open(indexDir)) - using (TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir)) - { - IndexSearcher searcher = new IndexSearcher(indexReader); - - // Passing no baseQuery means we drill down on all - // documents ("browse only"): - DrillDownQuery q = new DrillDownQuery(config); + using DirectoryReader indexReader = DirectoryReader.Open(indexDir); + using TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir); + IndexSearcher searcher = new IndexSearcher(indexReader); - // Now user drills down on Publish Date/2010: - q.Add("Publish Date", "2010"); + // Passing no baseQuery means we drill down on all + // documents ("browse only"): + DrillDownQuery q = new DrillDownQuery(config); - DrillSideways ds = new DrillSideways(searcher, config, taxoReader); - DrillSidewaysResult result = ds.Search(q, 10); + // Now user drills down on Publish Date/2010: + q.Add("Publish Date", "2010"); - // Retrieve results - IList facets = result.Facets.GetAllDims(10); + DrillSideways ds = new DrillSideways(searcher, config, taxoReader); + DrillSidewaysResult result = ds.Search(q, 10); - return facets; + // Retrieve results + IList facets = result.Facets.GetAllDims(10); - } // Disposes indexReader and taxoReader + return facets; } /// Runs the search example. diff --git a/src/Lucene.Net.Demo/Facet/SimpleSortedSetFacetsExample.cs b/src/Lucene.Net.Demo/Facet/SimpleSortedSetFacetsExample.cs index b2d6da10f5..b787015378 100644 --- a/src/Lucene.Net.Demo/Facet/SimpleSortedSetFacetsExample.cs +++ b/src/Lucene.Net.Demo/Facet/SimpleSortedSetFacetsExample.cs @@ -55,87 +55,86 @@ public class SimpleSortedSetFacetsExample /// Build the example index. private void Index() { - using (IndexWriter indexWriter = new IndexWriter(indexDir, + using IndexWriter indexWriter = new IndexWriter(indexDir, new IndexWriterConfig(EXAMPLE_VERSION, - new WhitespaceAnalyzer(EXAMPLE_VERSION)))) + new WhitespaceAnalyzer(EXAMPLE_VERSION))); + + indexWriter.AddDocument(config.Build(new Document + { + new SortedSetDocValuesFacetField("Author", "Bob"), + new SortedSetDocValuesFacetField("Publish Year", "2010") + })); + + indexWriter.AddDocument(config.Build(new Document + { + new SortedSetDocValuesFacetField("Author", "Lisa"), + new SortedSetDocValuesFacetField("Publish Year", "2010") + })); + + indexWriter.AddDocument(config.Build(new Document + { + new SortedSetDocValuesFacetField("Author", "Lisa"), + new SortedSetDocValuesFacetField("Publish Year", "2012") + })); + + indexWriter.AddDocument(config.Build(new Document { - Document doc = new Document(); - doc.Add(new SortedSetDocValuesFacetField("Author", "Bob")); - doc.Add(new SortedSetDocValuesFacetField("Publish Year", "2010")); - indexWriter.AddDocument(config.Build(doc)); - - doc = new Document(); - doc.Add(new SortedSetDocValuesFacetField("Author", "Lisa")); - doc.Add(new SortedSetDocValuesFacetField("Publish Year", "2010")); - indexWriter.AddDocument(config.Build(doc)); - - doc = new Document(); - doc.Add(new SortedSetDocValuesFacetField("Author", "Lisa")); - doc.Add(new SortedSetDocValuesFacetField("Publish Year", "2012")); - indexWriter.AddDocument(config.Build(doc)); - - doc = new Document(); - doc.Add(new SortedSetDocValuesFacetField("Author", "Susan")); - doc.Add(new SortedSetDocValuesFacetField("Publish Year", "2012")); - indexWriter.AddDocument(config.Build(doc)); - - doc = new Document(); - doc.Add(new SortedSetDocValuesFacetField("Author", "Frank")); - doc.Add(new SortedSetDocValuesFacetField("Publish Year", "1999")); - indexWriter.AddDocument(config.Build(doc)); - - } // Disposes indexWriter + new SortedSetDocValuesFacetField("Author", "Susan"), + new SortedSetDocValuesFacetField("Publish Year", "2012") + })); + + indexWriter.AddDocument(config.Build(new Document + { + new SortedSetDocValuesFacetField("Author", "Frank"), + new SortedSetDocValuesFacetField("Publish Year", "1999") + })); } /// User runs a query and counts facets. private IList Search() { - using (DirectoryReader indexReader = DirectoryReader.Open(indexDir)) - { - IndexSearcher searcher = new IndexSearcher(indexReader); - SortedSetDocValuesReaderState state = new DefaultSortedSetDocValuesReaderState(indexReader); - - // Aggregatses the facet counts - FacetsCollector fc = new FacetsCollector(); + using DirectoryReader indexReader = DirectoryReader.Open(indexDir); + IndexSearcher searcher = new IndexSearcher(indexReader); + SortedSetDocValuesReaderState state = new DefaultSortedSetDocValuesReaderState(indexReader); - // MatchAllDocsQuery is for "browsing" (counts facets - // for all non-deleted docs in the index); normally - // you'd use a "normal" query: - FacetsCollector.Search(searcher, new MatchAllDocsQuery(), 10, fc); + // Aggregatses the facet counts + FacetsCollector fc = new FacetsCollector(); - // Retrieve results - Facets facets = new SortedSetDocValuesFacetCounts(state, fc); + // MatchAllDocsQuery is for "browsing" (counts facets + // for all non-deleted docs in the index); normally + // you'd use a "normal" query: + FacetsCollector.Search(searcher, new MatchAllDocsQuery(), 10, fc); - IList results = new List(); - results.Add(facets.GetTopChildren(10, "Author")); - results.Add(facets.GetTopChildren(10, "Publish Year")); + // Retrieve results + Facets facets = new SortedSetDocValuesFacetCounts(state, fc); - return results; + IList results = new List + { + facets.GetTopChildren(10, "Author"), + facets.GetTopChildren(10, "Publish Year") + }; - } // Disposes indexWriter + return results; } /// User drills down on 'Publish Year/2010'. private FacetResult DrillDown() { - using (DirectoryReader indexReader = DirectoryReader.Open(indexDir)) - { - IndexSearcher searcher = new IndexSearcher(indexReader); - SortedSetDocValuesReaderState state = new DefaultSortedSetDocValuesReaderState(indexReader); - - // Now user drills down on Publish Year/2010: - DrillDownQuery q = new DrillDownQuery(config); - q.Add("Publish Year", "2010"); - FacetsCollector fc = new FacetsCollector(); - FacetsCollector.Search(searcher, q, 10, fc); + using DirectoryReader indexReader = DirectoryReader.Open(indexDir); + IndexSearcher searcher = new IndexSearcher(indexReader); + SortedSetDocValuesReaderState state = new DefaultSortedSetDocValuesReaderState(indexReader); - // Retrieve results - Facets facets = new SortedSetDocValuesFacetCounts(state, fc); - FacetResult result = facets.GetTopChildren(10, "Author"); + // Now user drills down on Publish Year/2010: + DrillDownQuery q = new DrillDownQuery(config); + q.Add("Publish Year", "2010"); + FacetsCollector fc = new FacetsCollector(); + FacetsCollector.Search(searcher, q, 10, fc); - return result; + // Retrieve results + Facets facets = new SortedSetDocValuesFacetCounts(state, fc); + FacetResult result = facets.GetTopChildren(10, "Author"); - } // Disposes indexReader + return result; } /// Runs the search example. diff --git a/src/Lucene.Net.Demo/IndexFiles.cs b/src/Lucene.Net.Demo/IndexFiles.cs index 1abf35c670..af988e971b 100644 --- a/src/Lucene.Net.Demo/IndexFiles.cs +++ b/src/Lucene.Net.Demo/IndexFiles.cs @@ -37,10 +37,8 @@ namespace Lucene.Net.Demo /// This is a command-line application demonstrating simple Lucene indexing. /// Run it with no command-line arguments for usage information. /// - public class IndexFiles + public static class IndexFiles // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { - private IndexFiles() { } - /// Index all text files under a directory. public static void Main(string[] args) { @@ -176,47 +174,45 @@ internal static void IndexDocs(IndexWriter writer, DirectoryInfo directoryInfo) /// internal static void IndexDocs(IndexWriter writer, FileInfo file) { - using (FileStream fs = new FileStream(file.FullName, FileMode.Open, FileAccess.Read)) + using FileStream fs = new FileStream(file.FullName, FileMode.Open, FileAccess.Read); + // make a new, empty document + Document doc = new Document(); + + // Add the path of the file as a field named "path". Use a + // field that is indexed (i.e. searchable), but don't tokenize + // the field into separate words and don't index term frequency + // or positional information: + Field pathField = new StringField("path", file.FullName, Field.Store.YES); + doc.Add(pathField); + + // Add the last modified date of the file a field named "modified". + // Use a LongField that is indexed (i.e. efficiently filterable with + // NumericRangeFilter). This indexes to milli-second resolution, which + // is often too fine. You could instead create a number based on + // year/month/day/hour/minutes/seconds, down the resolution you require. + // For example the long value 2011021714 would mean + // February 17, 2011, 2-3 PM. + doc.Add(new Int64Field("modified", file.LastWriteTimeUtc.Ticks, Field.Store.NO)); + + // Add the contents of the file to a field named "contents". Specify a Reader, + // so that the text of the file is tokenized and indexed, but not stored. + // Note that FileReader expects the file to be in UTF-8 encoding. + // If that's not the case searching for special characters will fail. + doc.Add(new TextField("contents", new StreamReader(fs, Encoding.UTF8))); + + if (writer.Config.OpenMode == OpenMode.CREATE) { - // make a new, empty document - Document doc = new Document(); - - // Add the path of the file as a field named "path". Use a - // field that is indexed (i.e. searchable), but don't tokenize - // the field into separate words and don't index term frequency - // or positional information: - Field pathField = new StringField("path", file.FullName, Field.Store.YES); - doc.Add(pathField); - - // Add the last modified date of the file a field named "modified". - // Use a LongField that is indexed (i.e. efficiently filterable with - // NumericRangeFilter). This indexes to milli-second resolution, which - // is often too fine. You could instead create a number based on - // year/month/day/hour/minutes/seconds, down the resolution you require. - // For example the long value 2011021714 would mean - // February 17, 2011, 2-3 PM. - doc.Add(new Int64Field("modified", file.LastWriteTimeUtc.Ticks, Field.Store.NO)); - - // Add the contents of the file to a field named "contents". Specify a Reader, - // so that the text of the file is tokenized and indexed, but not stored. - // Note that FileReader expects the file to be in UTF-8 encoding. - // If that's not the case searching for special characters will fail. - doc.Add(new TextField("contents", new StreamReader(fs, Encoding.UTF8))); - - if (writer.Config.OpenMode == OpenMode.CREATE) - { - // New index, so we just add the document (no old document can be there): - Console.WriteLine("adding " + file); - writer.AddDocument(doc); - } - else - { - // Existing index (an old copy of this document may have been indexed) so - // we use updateDocument instead to replace the old one matching the exact - // path, if present: - Console.WriteLine("updating " + file); - writer.UpdateDocument(new Term("path", file.FullName), doc); - } + // New index, so we just add the document (no old document can be there): + Console.WriteLine("adding " + file); + writer.AddDocument(doc); + } + else + { + // Existing index (an old copy of this document may have been indexed) so + // we use updateDocument instead to replace the old one matching the exact + // path, if present: + Console.WriteLine("updating " + file); + writer.UpdateDocument(new Term("path", file.FullName), doc); } } } diff --git a/src/Lucene.Net.Demo/Lucene.Net.Demo.csproj b/src/Lucene.Net.Demo/Lucene.Net.Demo.csproj index bf9e0785bd..7d159410f6 100644 --- a/src/Lucene.Net.Demo/Lucene.Net.Demo.csproj +++ b/src/Lucene.Net.Demo/Lucene.Net.Demo.csproj @@ -31,7 +31,9 @@ Simple example code for the Lucene.Net full-text search engine library from The Apache Software Foundation. $(PackageTags);demo bin\$(Configuration)\$(TargetFramework)\$(AssemblyName).xml + $(NoWarn);1591;1573 + $(NoWarn);IDE0060 diff --git a/src/Lucene.Net.Demo/SearchFiles.cs b/src/Lucene.Net.Demo/SearchFiles.cs index 29106ba4c7..fb933fbdd5 100644 --- a/src/Lucene.Net.Demo/SearchFiles.cs +++ b/src/Lucene.Net.Demo/SearchFiles.cs @@ -38,10 +38,8 @@ namespace Lucene.Net.Demo /// /// Simple command-line based search demo. /// - public class SearchFiles + public static class SearchFiles // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { - private SearchFiles() { } - /// Simple command-line based search demo. public static void Main(string[] args) { @@ -108,66 +106,64 @@ public static void Main(string[] args) } } - using (IndexReader reader = DirectoryReader.Open(FSDirectory.Open(index))) - { - IndexSearcher searcher = new IndexSearcher(reader); - // :Post-Release-Update-Version.LUCENE_XY: - Analyzer analyzer = new StandardAnalyzer(LuceneVersion.LUCENE_48); + using IndexReader reader = DirectoryReader.Open(FSDirectory.Open(index)); + IndexSearcher searcher = new IndexSearcher(reader); + // :Post-Release-Update-Version.LUCENE_XY: + Analyzer analyzer = new StandardAnalyzer(LuceneVersion.LUCENE_48); - TextReader input = null; - if (queries != null) + TextReader input = null; + if (queries != null) + { + input = new StreamReader(new FileStream(queries, FileMode.Open, FileAccess.Read), Encoding.UTF8); + } + else + { + input = Console.In; + } + // :Post-Release-Update-Version.LUCENE_XY: + QueryParser parser = new QueryParser(LuceneVersion.LUCENE_48, field, analyzer); + while (true) + { + if (queries == null && queryString == null) { - input = new StreamReader(new FileStream(queries, FileMode.Open, FileAccess.Read), Encoding.UTF8); + // prompt the user + Console.WriteLine("Enter query (or press Enter to exit): "); } - else + + string line = queryString ?? input.ReadLine(); + + if (line == null || line.Length == 0) { - input = Console.In; + break; } - // :Post-Release-Update-Version.LUCENE_XY: - QueryParser parser = new QueryParser(LuceneVersion.LUCENE_48, field, analyzer); - while (true) + + line = line.Trim(); + if (line.Length == 0) { - if (queries == null && queryString == null) - { - // prompt the user - Console.WriteLine("Enter query (or press Enter to exit): "); - } + break; + } - string line = queryString != null ? queryString : input.ReadLine(); + Query query = parser.Parse(line); + Console.WriteLine("Searching for: " + query.ToString(field)); - if (line == null || line.Length == 0) - { - break; - } - - line = line.Trim(); - if (line.Length == 0) + if (repeat > 0) // repeat & time as benchmark + { + DateTime start = DateTime.UtcNow; + for (int i = 0; i < repeat; i++) { - break; - } - - Query query = parser.Parse(line); - Console.WriteLine("Searching for: " + query.ToString(field)); - - if (repeat > 0) // repeat & time as benchmark - { - DateTime start = DateTime.UtcNow; - for (int i = 0; i < repeat; i++) - { - searcher.Search(query, null, 100); - } - DateTime end = DateTime.UtcNow; - Console.WriteLine("Time: " + (end - start).TotalMilliseconds + "ms"); + searcher.Search(query, null, 100); } + DateTime end = DateTime.UtcNow; + Console.WriteLine("Time: " + (end - start).TotalMilliseconds + "ms"); + } - DoPagingSearch(searcher, query, hitsPerPage, raw, queries == null && queryString == null); + DoPagingSearch(searcher, query, hitsPerPage, raw, queries == null && queryString == null); - if (queryString != null) - { - break; - } + if (queryString != null) + { + break; } - } // Disposes reader + } } /// diff --git a/src/Lucene.Net.Expressions/Bindings.cs b/src/Lucene.Net.Expressions/Bindings.cs index 732bffb9c2..303a2c3703 100644 --- a/src/Lucene.Net.Expressions/Bindings.cs +++ b/src/Lucene.Net.Expressions/Bindings.cs @@ -27,6 +27,7 @@ namespace Lucene.Net.Expressions /// relevance score, or other s. /// @lucene.experimental /// + [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1012:Abstract types should not have constructors", Justification = "Required for Reflection")] public abstract class Bindings { /// Sole constructor. @@ -46,7 +47,7 @@ public abstract class Bindings /// /// Returns a over relevance scores /// - protected ValueSource GetScoreValueSource() + protected static ValueSource GetScoreValueSource() // LUCENENET: CA1822: Mark members as static { return new ScoreValueSource(); } diff --git a/src/Lucene.Net.Expressions/Expression.cs b/src/Lucene.Net.Expressions/Expression.cs index c31b09b1e1..dfa292b6bb 100644 --- a/src/Lucene.Net.Expressions/Expression.cs +++ b/src/Lucene.Net.Expressions/Expression.cs @@ -42,6 +42,7 @@ namespace Lucene.Net.Expressions /// @lucene.experimental /// /// + [SuppressMessage("Design", "CA1012:Abstract types should not have constructors", Justification = "Required for Reflection")] public abstract class Expression { /// The original source text diff --git a/src/Lucene.Net.Expressions/ExpressionComparator.cs b/src/Lucene.Net.Expressions/ExpressionComparator.cs index 034020176e..15db756646 100644 --- a/src/Lucene.Net.Expressions/ExpressionComparator.cs +++ b/src/Lucene.Net.Expressions/ExpressionComparator.cs @@ -33,7 +33,7 @@ internal class ExpressionComparer : FieldComparer private double bottom; private double topValue; - private ValueSource source; + private readonly ValueSource source; // LUCENENET: marked readonly private FunctionValues scores; private AtomicReaderContext readerContext; diff --git a/src/Lucene.Net.Expressions/ExpressionFunctionValues.cs b/src/Lucene.Net.Expressions/ExpressionFunctionValues.cs index 8b61dd3d55..3f8dbdfb01 100644 --- a/src/Lucene.Net.Expressions/ExpressionFunctionValues.cs +++ b/src/Lucene.Net.Expressions/ExpressionFunctionValues.cs @@ -35,16 +35,8 @@ internal class ExpressionFunctionValues : DoubleDocValues internal ExpressionFunctionValues(ValueSource parent, Expression expression, FunctionValues[] functionValues) : base(parent) { - if (expression == null) - { - throw new ArgumentNullException(); - } - if (functionValues == null) - { - throw new ArgumentNullException(); - } - this.expression = expression; - this.functionValues = functionValues; + this.expression = expression ?? throw new ArgumentNullException(nameof(expression)); + this.functionValues = functionValues ?? throw new ArgumentNullException(nameof(functionValues)); } public override double DoubleVal(int document) diff --git a/src/Lucene.Net.Expressions/ExpressionValueSource.cs b/src/Lucene.Net.Expressions/ExpressionValueSource.cs index bb4729804d..e99e231028 100644 --- a/src/Lucene.Net.Expressions/ExpressionValueSource.cs +++ b/src/Lucene.Net.Expressions/ExpressionValueSource.cs @@ -39,13 +39,10 @@ internal ExpressionValueSource(Bindings bindings, Expression expression) { if (bindings == null) { - throw new ArgumentNullException(); + throw new ArgumentNullException(nameof(bindings)); } - if (expression == null) - { - throw new ArgumentNullException(); - } - this.expression = expression; + + this.expression = expression ?? throw new ArgumentNullException(nameof(expression)); variables = new ValueSource[expression.Variables.Length]; bool needsScores = false; for (int i = 0; i < variables.Length; i++) @@ -57,8 +54,7 @@ internal ExpressionValueSource(Bindings bindings, Expression expression) } else { - var valueSource = source as ExpressionValueSource; - if (valueSource != null) + if (source is ExpressionValueSource valueSource) { if (valueSource.NeedsScores) { @@ -85,20 +81,23 @@ public override FunctionValues GetValues(IDictionary context, AtomicReaderContex if (valuesCache == null) { valuesCache = new Dictionary(); - context = new Hashtable(context); - context["valuesCache"] = valuesCache; + context = new Hashtable(context) + { + ["valuesCache"] = valuesCache + }; } FunctionValues[] externalValues = new FunctionValues[expression.Variables.Length]; for (int i = 0; i < variables.Length; ++i) { string externalName = expression.Variables[i]; - FunctionValues values; - if (!valuesCache.TryGetValue(externalName,out values)) + if (!valuesCache.TryGetValue(externalName, out FunctionValues values)) { values = variables[i].GetValues(context, readerContext); if (values == null) { - throw new InvalidOperationException("Internal error. External (" + externalName + ") does not exist."); +#pragma warning disable IDE0016 // Use 'throw' expression + throw new InvalidOperationException($"Internal error. External ({externalName}) does not exist."); +#pragma warning restore IDE0016 // Use 'throw' expression } valuesCache[externalName] = values; } diff --git a/src/Lucene.Net.Expressions/JS/JavascriptCompiler.cs b/src/Lucene.Net.Expressions/JS/JavascriptCompiler.cs index 02e2b4135c..0a122de3ac 100644 --- a/src/Lucene.Net.Expressions/JS/JavascriptCompiler.cs +++ b/src/Lucene.Net.Expressions/JS/JavascriptCompiler.cs @@ -144,10 +144,12 @@ public static Expression Compile(string sourceText, IDictionary class. /// +#pragma warning disable IDE0051 // Remove unused private members private static void UnusedTestCompile() +#pragma warning restore IDE0051 // Remove unused private members { FunctionValues f = null; - double ret = f.DoubleVal(2); + /*double ret = */f.DoubleVal(2); // LUCENENET: IDE0059: Remove unnecessary value assignment } /// Constructs a compiler for expressions. @@ -237,8 +239,7 @@ private void RecursiveCompile(ITree current, Type expected) ITree identifier = current.GetChild(0); string call = identifier.Text; int arguments = current.ChildCount - 1; - MethodInfo method; - if (!functions.TryGetValue(call, out method) || method == null) + if (!functions.TryGetValue(call, out MethodInfo method) || method == null) { throw new ArgumentException("Unrecognized method call (" + call + ")."); } @@ -290,14 +291,13 @@ private void RecursiveCompile(ITree current, Type expected) if (bitwiseOps.Any(s => sourceText.Contains(s))) { - int val; - if (int.TryParse(text, NumberStyles.Integer, CultureInfo.InvariantCulture, out val)) + if (int.TryParse(text, NumberStyles.Integer, CultureInfo.InvariantCulture, out int val)) { gen.Emit(OpCodes.Ldc_I4, val); } else { - gen.Emit(OpCodes.Ldc_I8,long.Parse(text, CultureInfo.InvariantCulture)); + gen.Emit(OpCodes.Ldc_I8, long.Parse(text, CultureInfo.InvariantCulture)); gen.Emit(OpCodes.Conv_Ovf_U4_Un); } } @@ -511,7 +511,9 @@ private void PushCondEq(OpCode opCode, ITree current, Type expected) gen.Emit(OpCodes.Conv_R8); } +#pragma warning disable IDE0060 // Remove unused parameter private void PushArith(OpCode op, ITree current, Type expected) +#pragma warning restore IDE0060 // Remove unused parameter { PushBinaryOp(op, current, typeof(double), typeof(double)); } diff --git a/src/Lucene.Net.Expressions/JS/JavascriptLexer.cs b/src/Lucene.Net.Expressions/JS/JavascriptLexer.cs index 397b44ae61..0e9a70f3a5 100644 --- a/src/Lucene.Net.Expressions/JS/JavascriptLexer.cs +++ b/src/Lucene.Net.Expressions/JS/JavascriptLexer.cs @@ -26,6 +26,7 @@ using System.Runtime.Serialization; #endif using Antlr.Runtime; +using Lucene.Net.Support; namespace Lucene.Net.Expressions.JS { @@ -125,7 +126,7 @@ public override void DisplayRecognitionError(string[] tokenNames, RecognitionExc // delegators public virtual Lexer[] GetDelegates() { - return new Lexer[] { }; + return Arrays.Empty(); } public JavascriptLexer() @@ -953,8 +954,7 @@ public void MDECIMAL() int type = DECIMAL; int channel = TokenChannels.Default; // src/java/org/apache/lucene/expressions/js/Javascript.g:347:5: ( DECIMALINTEGER AT_DOT ( DECIMALDIGIT )* ( EXPONENT )? | AT_DOT ( DECIMALDIGIT )+ ( EXPONENT )? | DECIMALINTEGER ( EXPONENT )? ) - int alt9 = 3; - alt9 = m_dfa9.Predict(input); + int alt9 = m_dfa9.Predict(input); // LUCENENET: IDE0059: Remove unnecessary value assignment switch (alt9) { case 1: @@ -1191,7 +1191,7 @@ public void MHEX() // src/java/org/apache/lucene/expressions/js/Javascript.g:357:5: ( ( '0x' | '0X' ) ( HEXDIGIT )+ ) // src/java/org/apache/lucene/expressions/js/Javascript.g:357:7: ( '0x' | '0X' ) ( HEXDIGIT )+ // src/java/org/apache/lucene/expressions/js/Javascript.g:357:7: ( '0x' | '0X' ) - int alt11 = 2; + int alt11/* = 2*/; // LUCENENET: IDE0059: Remove unnecessary value assignment int LA11_0 = input.LA(1); if ((LA11_0 == '0')) { @@ -1305,7 +1305,7 @@ public void MDECIMALINTEGER() try { // src/java/org/apache/lucene/expressions/js/Javascript.g:363:5: ( '0' | '1' .. '9' ( DECIMALDIGIT )* ) - int alt14 = 2; + int alt14/* = 2*/; // LUCENENET: IDE0059: Remove unnecessary value assignment int LA14_0 = input.LA(1); if ((LA14_0 == '0')) { @@ -1566,7 +1566,7 @@ public void MOCTALDIGIT() public override void mTokens() { // src/java/org/apache/lucene/expressions/js/Javascript.g:1:8: ( AT_ADD | AT_BIT_AND | AT_BIT_NOT | AT_BIT_OR | AT_BIT_SHL | AT_BIT_SHR | AT_BIT_SHU | AT_BIT_XOR | AT_BOOL_AND | AT_BOOL_NOT | AT_BOOL_OR | AT_COLON | AT_COMMA | AT_COMP_EQ | AT_COMP_GT | AT_COMP_GTE | AT_COMP_LT | AT_COMP_LTE | AT_COMP_NEQ | AT_COND_QUE | AT_DIVIDE | AT_DOT | AT_LPAREN | AT_MODULO | AT_MULTIPLY | AT_RPAREN | AT_SUBTRACT | NAMESPACE_ID | WS | DECIMAL | OCTAL | HEX ) - int alt17 = 32; + int alt17/* = 32*/; // LUCENENET: IDE0059: Remove unnecessary value assignment switch (input.LA(1)) { case '+': @@ -2111,17 +2111,17 @@ public override void mTokens() protected JavascriptLexer.DFA9 m_dfa9; - internal static readonly string DFA9_eotS = "\x1\uffff\x2\x4\x3\uffff\x1\x4"; + internal const string DFA9_eotS = "\x1\uffff\x2\x4\x3\uffff\x1\x4"; - internal static readonly string DFA9_eofS = "\x7\uffff"; + internal const string DFA9_eofS = "\x7\uffff"; - internal static readonly string DFA9_minS = "\x3\x30\x3\uffff\x1\x30"; + internal const string DFA9_minS = "\x3\x30\x3\uffff\x1\x30"; - internal static readonly string DFA9_maxS = "\x1\x49\x1\x30\x1\x49\x3\uffff\x1\x49"; + internal const string DFA9_maxS = "\x1\x49\x1\x30\x1\x49\x3\uffff\x1\x49"; - internal static readonly string DFA9_acceptS = "\x3\uffff\x1\x2\x1\x3\x1\x1\x1\uffff"; + internal const string DFA9_acceptS = "\x3\uffff\x1\x2\x1\x3\x1\x1\x1\uffff"; - internal static readonly string DFA9_specialS = "\x7\uffff}>"; + internal const string DFA9_specialS = "\x7\uffff}>"; internal static readonly string[] DFA9_transitionS = new string[] { "\x1\x3\x1\uffff\x1\x1\xb\x2" , "\x1\x5", "\x1\x5\x1\uffff\xc\x6", string.Empty, string.Empty, string.Empty, "\x1\x5\x1\uffff\xc\x6" @@ -2173,7 +2173,9 @@ public DFA9(JavascriptLexer _enclosing, BaseRecognizer recognizer) public override string Description => "346:1: DECIMAL : ( DECIMALINTEGER AT_DOT ( DECIMALDIGIT )* ( EXPONENT )? | AT_DOT ( DECIMALDIGIT )+ ( EXPONENT )? | DECIMALINTEGER ( EXPONENT )? );"; +#pragma warning disable IDE0052 // Remove unread private members private readonly JavascriptLexer _enclosing; +#pragma warning restore IDE0052 // Remove unread private members } } diff --git a/src/Lucene.Net.Expressions/JS/JavascriptParser.cs b/src/Lucene.Net.Expressions/JS/JavascriptParser.cs index 9f7a866bed..8da6329b99 100644 --- a/src/Lucene.Net.Expressions/JS/JavascriptParser.cs +++ b/src/Lucene.Net.Expressions/JS/JavascriptParser.cs @@ -128,7 +128,7 @@ internal class JavascriptParser : Parser // delegates public virtual Parser[] GetDelegates() { - return new Parser[] { }; + return Arrays.Empty(); } public JavascriptParser(CommonTokenStream input) @@ -381,8 +381,8 @@ public class ExpressionReturn : ParserRuleReturnScope public AstParserRuleReturnScope Expression() { var retval = new AstParserRuleReturnScope { Start = input.LT(1) }; - CommonTree root = null; - IToken EOF2 = null; + CommonTree root; // LUCENENET: IDE0059: Remove unnecessary value assignment + IToken EOF2; // LUCENENET: IDE0059: Remove unnecessary value assignment AstParserRuleReturnScope conditional1; //CommonTree EOF2_tree = null; // LUCENENET NOTE: Not used try @@ -420,7 +420,7 @@ public AstParserRuleReturnScope Conditional() var retval = new AstParserRuleReturnScope { Start = input.LT(1) }; CommonTree root_0; IToken AT_COND_QUE4; - IToken AT_COLON6 = null; + //IToken AT_COLON6 = null; // LUCENENET: IDE0059: Remove unnecessary value assignment AstParserRuleReturnScope logical_or3; AstParserRuleReturnScope conditional5; AstParserRuleReturnScope conditional7; @@ -455,7 +455,7 @@ public AstParserRuleReturnScope Conditional() conditional5 = Conditional(); state._fsp--; m_adaptor.AddChild(root_0, conditional5.Tree); - AT_COLON6 = (IToken)Match(input, AT_COLON, FOLLOW_AT_COLON_in_conditional765); + /*AT_COLON6 = (IToken)*/Match(input, AT_COLON, FOLLOW_AT_COLON_in_conditional765); // LUCENENET: IDE0059: Remove unnecessary value assignment PushFollow(FOLLOW_conditional_in_conditional768); conditional7 = Conditional(); state._fsp--; @@ -558,7 +558,7 @@ public AstParserRuleReturnScope Logical_And() IToken AT_BOOL_AND12; AstParserRuleReturnScope bitwise_or11; AstParserRuleReturnScope bitwise_or13; - CommonTree AT_BOOL_AND12_tree = null; + CommonTree AT_BOOL_AND12_tree; // LUCENENET: IDE0059: Remove unnecessary value assignment try { { @@ -1545,9 +1545,9 @@ public AstParserRuleReturnScope Primary() { var retval = new AstParserRuleReturnScope { Start = input.LT(1) }; CommonTree root = null; - IToken AT_LPAREN51 = null; - IToken AT_RPAREN53 = null; - CommonTree NAMESPACE_ID49_tree = null; + IToken AT_LPAREN51; // LUCENENET: IDE0059: Remove unnecessary value assignment + IToken AT_RPAREN53; // LUCENENET: IDE0059: Remove unnecessary value assignment + CommonTree NAMESPACE_ID49_tree; // LUCENENET: IDE0059: Remove unnecessary value assignment //CommonTree AT_LPAREN51_tree = null; // LUCENENET NOTE: Not used //CommonTree AT_RPAREN53_tree = null; // LUCENENET NOTE: Not used try @@ -1640,10 +1640,10 @@ public AstParserRuleReturnScope Primary() public AstParserRuleReturnScope Arguments() { var retval = new AstParserRuleReturnScope { Start = input.LT(1) }; - CommonTree root = null; - IToken AT_LPAREN54 = null; - IToken AT_COMMA56 = null; - IToken AT_RPAREN58 = null; + CommonTree root; // LUCENENET: IDE0059: Remove unnecessary value assignment + IToken AT_LPAREN54; // LUCENENET: IDE0059: Remove unnecessary value assignment + IToken AT_COMMA56; // LUCENENET: IDE0059: Remove unnecessary value assignment + IToken AT_RPAREN58; // LUCENENET: IDE0059: Remove unnecessary value assignment //CommonTree AT_LPAREN54_tree = null; // LUCENENET NOTE: Not used //CommonTree AT_COMMA56_tree = null; // LUCENENET NOTE: Not used //CommonTree AT_RPAREN58_tree = null; // LUCENENET NOTE: Not used diff --git a/src/Lucene.Net.Expressions/SimpleBindings.cs b/src/Lucene.Net.Expressions/SimpleBindings.cs index c1d5bc9076..14e676e474 100644 --- a/src/Lucene.Net.Expressions/SimpleBindings.cs +++ b/src/Lucene.Net.Expressions/SimpleBindings.cs @@ -42,7 +42,7 @@ namespace Lucene.Net.Expressions /// /// @lucene.experimental /// - public sealed class SimpleBindings : Bindings + public sealed class SimpleBindings : Bindings // LUCENENET TODO: Implement collection initializer to make populating easier { internal readonly IDictionary map = new Dictionary(); @@ -76,14 +76,12 @@ public void Add(string name, Expression expression) public override ValueSource GetValueSource(string name) { - object o; // LUCENENET NOTE: Directly looking up a missing key will throw a KeyNotFoundException - if (!map.TryGetValue(name, out o)) + if (!map.TryGetValue(name, out object o)) { throw new ArgumentException("Invalid reference '" + name + "'"); } - var expression = o as Expression; - if (expression != null) + if (o is Expression expression) { return expression.GetValueSource(this); } @@ -128,10 +126,8 @@ public void Validate() { foreach (object o in map.Values) { - if (o is Expression) + if (o is Expression expr) { - Expression expr = (Expression)o; - expr.GetValueSource(this); } } diff --git a/src/Lucene.Net.Facet/DrillDownQuery.cs b/src/Lucene.Net.Facet/DrillDownQuery.cs index c9ac679ed1..f7fca270b7 100644 --- a/src/Lucene.Net.Facet/DrillDownQuery.cs +++ b/src/Lucene.Net.Facet/DrillDownQuery.cs @@ -2,7 +2,6 @@ using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Runtime.CompilerServices; using JCG = J2N.Collections.Generic; @@ -50,7 +49,7 @@ namespace Lucene.Net.Facet /// @lucene.experimental /// /// - public sealed class DrillDownQuery : Query + public sealed class DrillDownQuery : Query // LUCENENET TODO: Add collection initializer to make populating easier { /// /// Creates a drill-down term. @@ -186,8 +185,10 @@ public void Add(string dim, params string[] path) } string indexedField = config.GetDimConfig(dim).IndexFieldName; - BooleanQuery bq = new BooleanQuery(true); // disable coord - bq.Add(new TermQuery(Term(indexedField, dim, path)), Occur.SHOULD); + BooleanQuery bq = new BooleanQuery(true) + { + { new TermQuery(Term(indexedField, dim, path)), Occur.SHOULD } + }; // disable coord Add(dim, bq); } @@ -242,8 +243,7 @@ public void Add(string dim, Filter subFilter) internal static Filter GetFilter(Query query) { - var scoreQuery = query as ConstantScoreQuery; - if (scoreQuery != null) + if (query is ConstantScoreQuery scoreQuery) { ConstantScoreQuery csq = scoreQuery; Filter filter = csq.Filter; diff --git a/src/Lucene.Net.Facet/DrillSidewaysQuery.cs b/src/Lucene.Net.Facet/DrillSidewaysQuery.cs index 2758c5fbaf..bab7ed3044 100644 --- a/src/Lucene.Net.Facet/DrillSidewaysQuery.cs +++ b/src/Lucene.Net.Facet/DrillSidewaysQuery.cs @@ -160,12 +160,12 @@ public override BulkScorer GetBulkScorer(AtomicReaderContext context, bool score { dims[dim] = new DrillSidewaysScorer.DocsAndCost(); dims[dim].sidewaysCollector = outerInstance.drillSidewaysCollectors[dim]; - if (drillDowns[dim] is Filter) + if (drillDowns[dim] is Filter filter) { // Pass null for acceptDocs because we already // passed it to baseScorer and baseScorer is // MUST'd here - DocIdSet dis = ((Filter)drillDowns[dim]).GetDocIdSet(context, null); + DocIdSet dis = filter.GetDocIdSet(context, null); if (dis == null) { diff --git a/src/Lucene.Net.Facet/Facets.cs b/src/Lucene.Net.Facet/Facets.cs index 9a0ea3e8fe..50b52635f6 100644 --- a/src/Lucene.Net.Facet/Facets.cs +++ b/src/Lucene.Net.Facet/Facets.cs @@ -29,7 +29,7 @@ public abstract class Facets /// /// Default constructor. /// - public Facets() + protected Facets() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { } diff --git a/src/Lucene.Net.Facet/FacetsCollector.cs b/src/Lucene.Net.Facet/FacetsCollector.cs index b56ff02a0e..ab07ebe23c 100644 --- a/src/Lucene.Net.Facet/FacetsCollector.cs +++ b/src/Lucene.Net.Facet/FacetsCollector.cs @@ -53,7 +53,7 @@ protected abstract class Docs /// /// Sole constructor. /// - public Docs() + protected Docs() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { } diff --git a/src/Lucene.Net.Facet/FacetsConfig.cs b/src/Lucene.Net.Facet/FacetsConfig.cs index 51830651b4..8a430d07c5 100644 --- a/src/Lucene.Net.Facet/FacetsConfig.cs +++ b/src/Lucene.Net.Facet/FacetsConfig.cs @@ -3,7 +3,7 @@ using System; using System.Collections.Concurrent; using System.Collections.Generic; -using System.Diagnostics; +using System.Runtime.CompilerServices; using System.Text; using System.Threading; using JCG = J2N.Collections.Generic; @@ -137,6 +137,7 @@ public FacetsConfig() /// /// Get the current configuration for a dimension. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual DimConfig GetDimConfig(string dimName) { lock (syncLock) @@ -153,6 +154,7 @@ public virtual DimConfig GetDimConfig(string dimName) /// Pass true if this dimension is hierarchical /// (has depth > 1 paths). /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void SetHierarchical(string dimName, bool v) { lock (syncLock) @@ -173,6 +175,7 @@ public virtual void SetHierarchical(string dimName, bool v) /// Pass true if this dimension may have more than /// one value per document. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void SetMultiValued(string dimName, bool v) { lock (syncLock) @@ -194,6 +197,7 @@ public virtual void SetMultiValued(string dimName, bool v) /// accurate counts of the dimension, i.e. how many /// hits have this dimension. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void SetRequireDimCount(string dimName, bool v) { lock (syncLock) @@ -236,6 +240,7 @@ public virtual void SetIndexFieldName(string dimName, string indexFieldName) /// public virtual IDictionary DimConfigs => fieldTypes; + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void CheckSeen(ISet seenDims, string dim) { if (seenDims.Contains(dim)) @@ -255,6 +260,7 @@ private static void CheckSeen(ISet seenDims, string dim) /// input one! /// /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual Document Build(Document doc) { return Build(null, doc); @@ -586,7 +592,8 @@ protected virtual BytesRef DedupAndEncode(Int32sRef ordinals) return new BytesRef(bytes, 0, upto); } - private void CheckTaxoWriter(ITaxonomyWriter taxoWriter) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static void CheckTaxoWriter(ITaxonomyWriter taxoWriter) // LUCENENET: CA1822: Mark members as static { if (taxoWriter == null) { diff --git a/src/Lucene.Net.Facet/Range/LongRangeCounter.cs b/src/Lucene.Net.Facet/Range/LongRangeCounter.cs index c8b8998ef0..7fa886cf86 100644 --- a/src/Lucene.Net.Facet/Range/LongRangeCounter.cs +++ b/src/Lucene.Net.Facet/Range/LongRangeCounter.cs @@ -47,15 +47,15 @@ public Int64RangeCounter(Int64Range[] ranges) // track the start vs end case separately because if a // given point is both, then it must be its own // elementary interval: - IDictionary endsMap = new Dictionary(); - - endsMap[long.MinValue] = 1; - endsMap[long.MaxValue] = 2; + IDictionary endsMap = new Dictionary + { + [long.MinValue] = 1, + [long.MaxValue] = 2 + }; foreach (Int64Range range in ranges) { - int? cur; - if (!endsMap.TryGetValue(range.minIncl, out cur)) + if (!endsMap.TryGetValue(range.minIncl, out int? cur)) { endsMap[range.minIncl] = 1; } @@ -80,7 +80,7 @@ public Int64RangeCounter(Int64Range[] ranges) // Build elementaryIntervals (a 1D Venn diagram): IList elementaryIntervals = new List(); int upto0 = 1; - long v = endsList[0].HasValue ? endsList[0].Value : 0; + long v = endsList[0] ?? 0; long prev; if (endsMap[v] == 3) { @@ -94,8 +94,8 @@ public Int64RangeCounter(Int64Range[] ranges) while (upto0 < endsList.Count) { - v = endsList[upto0].HasValue ? endsList[upto0].Value : 0; - int flags = endsMap[v].HasValue ? endsMap[v].Value : 0; + v = endsList[upto0] ?? 0; + int flags = endsMap[v] ?? 0; //System.out.println(" v=" + v + " flags=" + flags); if (flags == 3) { diff --git a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs index f68bdca9eb..0ee82f2f7f 100644 --- a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs +++ b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs @@ -155,9 +155,9 @@ private void Count(IList matchingDocs) // TODO: is this right? really, we need a way to // verify that this ordinalMap "matches" the leaves in // matchingDocs... - if (dv is MultiDocValues.MultiSortedSetDocValues && matchingDocs.Count > 1) + if (dv is MultiDocValues.MultiSortedSetDocValues values && matchingDocs.Count > 1) { - ordinalMap = ((MultiDocValues.MultiSortedSetDocValues)dv).Mapping; + ordinalMap = values.Mapping; } else { diff --git a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs index 72334430b2..64d2f552ac 100644 --- a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs +++ b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs @@ -2,8 +2,8 @@ using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; using System.Text; namespace Lucene.Net.Facet.Taxonomy @@ -161,12 +161,14 @@ public virtual int CompareTo(CategoryPath other) return Length - other.Length; } - private void HasDelimiter(string offender, char delimiter) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static void HasDelimiter(string offender, char delimiter) // LUCENENET: CA1822: Mark members as static { - throw new ArgumentException("delimiter character '" + delimiter + + throw new ArgumentException("delimiter character '" + delimiter + "' (U+" + delimiter.ToString() + ") appears in path component \"" + offender + "\""); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void NoDelimiter(char[] buf, int offset, int len, char delimiter) { for (int idx = 0; idx < len; idx++) @@ -296,6 +298,7 @@ public virtual CategoryPath Subpath(int length) /// '/'. /// /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override string ToString() { return ToString('/'); diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs index 5b0f83b464..25fd4b89aa 100644 --- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs +++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs @@ -400,10 +400,8 @@ private void CloseResources() // LUCENENET: Made private, since this has the sam readerManager = null; initializedReaderManager = false; } - if (cache != null) - { - cache.Dispose(); - } + cache?.Dispose(); + parentStream.Dispose(); // LUCENENET specific } /// @@ -576,8 +574,10 @@ private int AddCategoryDocument(FacetLabel categoryPath, int parent) // in the reader (which knows that anyway only category 0 has a parent // -1). parentStream.Set(Math.Max(parent + 1, 1)); - Document d = new Document(); - d.Add(parentStreamField); + Document d = new Document + { + parentStreamField + }; fullPathField.SetStringValue(FacetsConfig.PathToString(categoryPath.Components, categoryPath.Length)); d.Add(fullPathField); @@ -1080,7 +1080,7 @@ public void AddDone() } } - int[] map = null; + private int[] map = null; public int[] GetMap() { diff --git a/src/Lucene.Net.Facet/Taxonomy/OrdinalsReader.cs b/src/Lucene.Net.Facet/Taxonomy/OrdinalsReader.cs index 2dd18d1fbb..b390813077 100644 --- a/src/Lucene.Net.Facet/Taxonomy/OrdinalsReader.cs +++ b/src/Lucene.Net.Facet/Taxonomy/OrdinalsReader.cs @@ -39,7 +39,7 @@ public abstract class OrdinalsSegmentReader /// /// Default constructor. /// - public OrdinalsSegmentReader() + protected OrdinalsSegmentReader() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { } } @@ -47,7 +47,7 @@ public OrdinalsSegmentReader() /// /// Default constructor. /// - public OrdinalsReader() + protected OrdinalsReader() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { } diff --git a/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs b/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs index 01e3bd3b65..64b8dc9eb3 100644 --- a/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs +++ b/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs @@ -48,7 +48,7 @@ public abstract class ParallelTaxonomyArrays /// /// Sole constructor. /// - public ParallelTaxonomyArrays() + protected ParallelTaxonomyArrays() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { } diff --git a/src/Lucene.Net.Facet/Taxonomy/PrintTaxonomyStats.cs b/src/Lucene.Net.Facet/Taxonomy/PrintTaxonomyStats.cs index 58d56771ea..44394a84b0 100644 --- a/src/Lucene.Net.Facet/Taxonomy/PrintTaxonomyStats.cs +++ b/src/Lucene.Net.Facet/Taxonomy/PrintTaxonomyStats.cs @@ -57,13 +57,9 @@ public static int Main(string[] args) //Console.WriteLine("\nUsage: java -classpath ... org.apache.lucene.facet.util.PrintTaxonomyStats [-printTree] /path/to/taxononmy/index\n"); //return 1; } - using (Store.Directory dir = FSDirectory.Open(new DirectoryInfo(path))) - { - using (var r = new DirectoryTaxonomyReader(dir)) - { - PrintStats(r, System.Console.Out, printTree); - } - } + using Store.Directory dir = FSDirectory.Open(new DirectoryInfo(path)); + using var r = new DirectoryTaxonomyReader(dir); + PrintStats(r, System.Console.Out, printTree); return 0; } diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs index 81a338a428..3d8c6b0373 100644 --- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs +++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs @@ -194,7 +194,7 @@ public override double DoubleVal(int document) public override bool Equals(object o) { - if (ReferenceEquals(null, o)) return false; + if (o is null) return false; if (ReferenceEquals(this, o)) return true; if (o.GetType() != this.GetType()) return false; return Equals((ScoreValueSource)o); diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs index 50fbd38e9d..c8412b645d 100644 --- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs +++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs @@ -92,7 +92,7 @@ public Block(Stream reader) // case 1L: // LUCENENET TODO: When object fields change, increment serialVersionUID and move the above block here for legacy support... default: - throw new InvalidDataException($"Version {serialVersion} of {this.GetType().ToString()} deserialization is not supported."); + throw new InvalidDataException($"Version {serialVersion} of {this.GetType()} deserialization is not supported."); } } } @@ -309,7 +309,7 @@ internal CharBlockArray(Stream reader) // case 1L: // LUCENENET TODO: When object fields change, increment serialVersionUID and move the above block here for legacy support... default: - throw new InvalidDataException($"Version {serialVersion} of {this.GetType().ToString()} deserialization is not supported."); + throw new InvalidDataException($"Version {serialVersion} of {this.GetType()} deserialization is not supported."); } } diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs index f2cc59626f..824b42d82f 100644 --- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs +++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs @@ -206,13 +206,11 @@ private void Grow() this.collisionMap = new CollisionMap(oldCollisionMap.Capacity, this.labelRepository); this.threshold = (int)(this.capacity * this.loadFactor); - using (var it = oldCollisionMap.GetEnumerator()) + using var it = oldCollisionMap.GetEnumerator(); + while (it.MoveNext()) { - while (it.MoveNext()) - { - var e = it.Current; - AddLabelOffset(StringHashCode(this.labelRepository, e.offset), e.cid, e.offset); - } + var e = it.Current; + AddLabelOffset(StringHashCode(this.labelRepository, e.offset), e.cid, e.offset); } } @@ -250,7 +248,7 @@ private void AddLabelOffset(int hash, int cid, int knownOffset) } } - private bool AddLabelOffsetToHashArray(HashArray a, int hash, int ordinal, int knownOffset) + private static bool AddLabelOffsetToHashArray(HashArray a, int hash, int ordinal, int knownOffset) // LUCENENET: CA1822: Mark members as static { int index = CompactLabelToOrdinal.IndexFor(hash, a.offsets.Length); int offset = a.offsets[index]; @@ -479,13 +477,11 @@ internal static CompactLabelToOrdinal Open(FileInfo file, float loadFactor, int internal virtual void Flush(Stream stream) { - using (BinaryWriter dos = new BinaryWriter(stream)) - { - dos.Write(this.m_counter); + using BinaryWriter dos = new BinaryWriter(stream); + dos.Write(this.m_counter); - // write the labelRepository - this.labelRepository.Flush(dos.BaseStream); - } + // write the labelRepository + this.labelRepository.Flush(dos.BaseStream); } private sealed class HashArray diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LabelToOrdinal.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LabelToOrdinal.cs index 88248a769c..ca522a7913 100644 --- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LabelToOrdinal.cs +++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LabelToOrdinal.cs @@ -39,7 +39,7 @@ public abstract class LabelToOrdinal /// /// Default constructor. /// - public LabelToOrdinal() + protected LabelToOrdinal() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { } diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs index 389e6b931c..25beaf6301 100644 --- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs +++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs @@ -233,14 +233,12 @@ bool IInternalNameInt32CacheLru.MakeRoomLRU() } //System.Diagnostics.Debug.WriteLine("Removing cache entries in MakeRoomLRU"); - using (var it = cache.GetEnumerator()) + using var it = cache.GetEnumerator(); + int i = 0; + while (i < n && it.MoveNext()) { - int i = 0; - while (i < n && it.MoveNext()) - { - cache.Remove(it.Current.Key); - i++; - } + cache.Remove(it.Current.Key); + i++; } } return true; diff --git a/src/Lucene.Net.Grouping/AbstractDistinctValuesCollector.cs b/src/Lucene.Net.Grouping/AbstractDistinctValuesCollector.cs index 753fdaeac6..e950ca8fcb 100644 --- a/src/Lucene.Net.Grouping/AbstractDistinctValuesCollector.cs +++ b/src/Lucene.Net.Grouping/AbstractDistinctValuesCollector.cs @@ -77,11 +77,8 @@ public virtual void SetScorer(Scorer scorer) /// (AbstractDistinctValuesCollector.GroupCount{TGroupValue} rather than /// AbstractDistinctValuesCollector{GC}.GroupCount{TGroupValue}). /// - public class AbstractDistinctValuesCollector + public static class AbstractDistinctValuesCollector // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { - // Disallow direct creation - private AbstractDistinctValuesCollector() { } - /// /// Returned by , /// representing the value and set of distinct values for the group. @@ -97,7 +94,7 @@ public abstract class GroupCount : IGroupCount public TGroupValue GroupValue { get; protected set; } public IEnumerable UniqueValues { get; protected set; } - public GroupCount(TGroupValue groupValue) + protected GroupCount(TGroupValue groupValue) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.GroupValue = groupValue; this.UniqueValues = new JCG.HashSet(); diff --git a/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs b/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs index a7bd3957bd..e6bacdea59 100644 --- a/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs +++ b/src/Lucene.Net.Grouping/AbstractFirstPassGroupingCollector.cs @@ -67,7 +67,7 @@ public abstract class AbstractFirstPassGroupingCollector : IAbstrac /// /// How many top groups to keep. /// If I/O related errors occur - public AbstractFirstPassGroupingCollector(Sort groupSort, int topNGroups) + protected AbstractFirstPassGroupingCollector(Sort groupSort, int topNGroups) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { if (topNGroups < 1) { @@ -202,8 +202,7 @@ public virtual void Collect(int doc) // under null group)? TGroupValue groupValue = GetDocGroupValue(doc); - CollectedSearchGroup group; - if (!groupMap.TryGetValue(groupValue, out group)) + if (!groupMap.TryGetValue(groupValue, out CollectedSearchGroup group)) { // First time we are seeing this group, or, we've seen @@ -219,7 +218,7 @@ public virtual void Collect(int doc) // Add a new CollectedSearchGroup: CollectedSearchGroup sg = new CollectedSearchGroup(); - sg.GroupValue = CopyDocGroupValue(groupValue, default(TGroupValue)); + sg.GroupValue = CopyDocGroupValue(groupValue, default); sg.ComparerSlot = groupMap.Count; sg.TopDoc = docBase + doc; foreach (FieldComparer fc in comparers) diff --git a/src/Lucene.Net.Grouping/AbstractSecondPassGroupingCollector.cs b/src/Lucene.Net.Grouping/AbstractSecondPassGroupingCollector.cs index ee9c211f1a..b29da15a45 100644 --- a/src/Lucene.Net.Grouping/AbstractSecondPassGroupingCollector.cs +++ b/src/Lucene.Net.Grouping/AbstractSecondPassGroupingCollector.cs @@ -49,8 +49,8 @@ public abstract class AbstractSecondPassGroupingCollector : IAbstra private int totalHitCount; private int totalGroupedHitCount; - public AbstractSecondPassGroupingCollector(IEnumerable> groups, Sort groupSort, Sort withinGroupSort, - int maxDocsPerGroup, bool getScores, bool getMaxScores, bool fillSortFields) + protected AbstractSecondPassGroupingCollector(IEnumerable> groups, Sort groupSort, Sort withinGroupSort, + int maxDocsPerGroup, bool getScores, bool getMaxScores, bool fillSortFields) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { //System.out.println("SP init"); @@ -141,7 +141,7 @@ public virtual ITopGroups GetTopGroups(int withinGroupOffset) } return new TopGroups(groupSort.GetSort(), - withinGroupSort == null ? null : withinGroupSort.GetSort(), + withinGroupSort?.GetSort(), totalHitCount, totalGroupedHitCount, groupDocsResult, maxScore); } @@ -155,13 +155,8 @@ public virtual ITopGroups GetTopGroups(int withinGroupOffset) /// to access nested classes of /// without referencing the generic closing type. /// - public class AbstractSecondPassGroupingCollector + public static class AbstractSecondPassGroupingCollector // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { - /// - /// Don't allow creation - /// - private AbstractSecondPassGroupingCollector() { } - // TODO: merge with SearchGroup or not? // ad: don't need to build a new hashmap // disad: blows up the size of SearchGroup if we need many of them, and couples implementations diff --git a/src/Lucene.Net.Grouping/BlockGroupingCollector.cs b/src/Lucene.Net.Grouping/BlockGroupingCollector.cs index 3d73f2d9c2..b9dde6a796 100644 --- a/src/Lucene.Net.Grouping/BlockGroupingCollector.cs +++ b/src/Lucene.Net.Grouping/BlockGroupingCollector.cs @@ -456,7 +456,7 @@ public virtual ITopGroups GetTopGroups(Sort withinGrou topDocs.MaxScore, og.count, topDocs.ScoreDocs, - default(TGroupValue), + default, groupSortValues); maxScore = Math.Max(maxScore, topDocs.MaxScore); } @@ -470,7 +470,7 @@ public virtual ITopGroups GetTopGroups(Sort withinGrou */ return new TopGroups(new TopGroups(groupSort.GetSort(), - withinGroupSort == null ? null : withinGroupSort.GetSort(), + withinGroupSort?.GetSort(), totalHitCount, totalGroupedHitCount, groups, maxScore), totalGroupCount); } diff --git a/src/Lucene.Net.Grouping/Function/FunctionAllGroupHeadsCollector.cs b/src/Lucene.Net.Grouping/Function/FunctionAllGroupHeadsCollector.cs index 3c313fe03c..e9f953a22b 100644 --- a/src/Lucene.Net.Grouping/Function/FunctionAllGroupHeadsCollector.cs +++ b/src/Lucene.Net.Grouping/Function/FunctionAllGroupHeadsCollector.cs @@ -65,8 +65,7 @@ public FunctionAllGroupHeadsCollector(ValueSource groupBy, IDictionary /* Map*/ vsContext, Valu public override void Collect(int doc) { groupFiller.FillValue(doc); - GroupCount groupCount; - if (groupMap.TryGetValue(groupMval, out groupCount)) + if (groupMap.TryGetValue(groupMval, out GroupCount groupCount)) { countFiller.FillValue(doc); ((ISet)groupCount.UniqueValues).Add(countMval.Duplicate()); diff --git a/src/Lucene.Net.Grouping/Function/FunctionSecondPassGroupingCollector.cs b/src/Lucene.Net.Grouping/Function/FunctionSecondPassGroupingCollector.cs index 1c1f76438e..883583cb89 100644 --- a/src/Lucene.Net.Grouping/Function/FunctionSecondPassGroupingCollector.cs +++ b/src/Lucene.Net.Grouping/Function/FunctionSecondPassGroupingCollector.cs @@ -63,8 +63,7 @@ public FunctionSecondPassGroupingCollector(IEnumerable RetrieveGroup(int doc) { filler.FillValue(doc); - AbstractSecondPassGroupingCollector.SearchGroupDocs result; - m_groupMap.TryGetValue(mval, out result); + m_groupMap.TryGetValue(mval, out var result); return result; } diff --git a/src/Lucene.Net.Grouping/GroupDocs.cs b/src/Lucene.Net.Grouping/GroupDocs.cs index 5e4892deb4..860d97514f 100644 --- a/src/Lucene.Net.Grouping/GroupDocs.cs +++ b/src/Lucene.Net.Grouping/GroupDocs.cs @@ -112,6 +112,7 @@ public interface IGroupDocs /// /// Matches the groupSort passed to . /// + [SuppressMessage("Performance", "CA1819:Properties should not return arrays", Justification = "Lucene's design requires some array properties")] object[] GroupSortValues { get; } } } \ No newline at end of file diff --git a/src/Lucene.Net.Grouping/GroupingSearch.cs b/src/Lucene.Net.Grouping/GroupingSearch.cs index f056796fef..6a018b8db9 100644 --- a/src/Lucene.Net.Grouping/GroupingSearch.cs +++ b/src/Lucene.Net.Grouping/GroupingSearch.cs @@ -235,8 +235,10 @@ protected virtual ITopGroups GroupByFieldOrFunction(In ICollector firstRound; if (allGroupHeads || allGroups) { - List collectors = new List(); - collectors.Add(firstPassCollector); + List collectors = new List + { + firstPassCollector + }; if (allGroups) { collectors.Add(allGroupsCollector); diff --git a/src/Lucene.Net.Grouping/SearchGroup.cs b/src/Lucene.Net.Grouping/SearchGroup.cs index 680b2570b1..27ae9214d7 100644 --- a/src/Lucene.Net.Grouping/SearchGroup.cs +++ b/src/Lucene.Net.Grouping/SearchGroup.cs @@ -87,13 +87,8 @@ public override int GetHashCode() /// LUCENENET specific class used to nest types to mimic the syntax used /// by Lucene (that is, without specifying the generic closing type of ) /// - public class SearchGroup + public static class SearchGroup // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { - /// - /// Prevent direct creation - /// - private SearchGroup() { } - private class ShardIter { public IEnumerator> Iter => iter; diff --git a/src/Lucene.Net.Grouping/Term/TermAllGroupHeadsCollector.cs b/src/Lucene.Net.Grouping/Term/TermAllGroupHeadsCollector.cs index 72111a6235..13137afef8 100644 --- a/src/Lucene.Net.Grouping/Term/TermAllGroupHeadsCollector.cs +++ b/src/Lucene.Net.Grouping/Term/TermAllGroupHeadsCollector.cs @@ -52,14 +52,9 @@ protected TermAllGroupHeadsCollector(string groupField, int numberOfSorts) /// specifying its generic closing type. /// (TermAllGroupHeadsCollector.Create() rather than TermAllGroupHeadsCollector{GH}.Create()). /// - public class TermAllGroupHeadsCollector + public static class TermAllGroupHeadsCollector // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { - private static readonly int DEFAULT_INITIAL_SIZE = 128; - - /// - /// Disallow creation - /// - private TermAllGroupHeadsCollector() { } + private const int DEFAULT_INITIAL_SIZE = 128; /// /// Creates an instance based on the supplied arguments. @@ -171,8 +166,7 @@ protected override void RetrieveGroupHeadAndAddIfNotExist(int doc) groupIndex.LookupOrd(ord, scratchBytesRef); groupValue = scratchBytesRef; } - GroupHead groupHead; - if (!groups.TryGetValue(groupValue, out groupHead)) + if (!groups.TryGetValue(groupValue, out GroupHead groupHead)) { groupHead = new GroupHead(this, groupValue, sortWithinGroup, doc); groups[groupValue == null ? null : BytesRef.DeepCopyOf(groupValue)] = groupHead; @@ -267,7 +261,7 @@ internal class OrdScoreAllGroupHeadsCollector : TermAllGroupHeadsCollector collectedGroups; private readonly SortField[] fields; - private SortedDocValues[] sortsIndex; + private readonly SortedDocValues[] sortsIndex; // LUCENENET: marked readonly private Scorer scorer; private GroupHead[] segmentGroupHeads; @@ -492,7 +486,7 @@ internal class OrdAllGroupHeadsCollector : TermAllGroupHeadsCollector collectedGroups; private readonly SortField[] fields; - private SortedDocValues[] sortsIndex; + private readonly SortedDocValues[] sortsIndex; // LUCENENET: marked readonly private GroupHead[] segmentGroupHeads; internal OrdAllGroupHeadsCollector(string groupField, Sort sortWithinGroup, int initialSize) diff --git a/src/Lucene.Net.Grouping/Term/TermAllGroupsCollector.cs b/src/Lucene.Net.Grouping/Term/TermAllGroupsCollector.cs index c5a8690e69..3cccd6f54b 100644 --- a/src/Lucene.Net.Grouping/Term/TermAllGroupsCollector.cs +++ b/src/Lucene.Net.Grouping/Term/TermAllGroupsCollector.cs @@ -39,9 +39,9 @@ namespace Lucene.Net.Search.Grouping.Terms /// public class TermAllGroupsCollector : AbstractAllGroupsCollector { - private static readonly int DEFAULT_INITIAL_SIZE = 128; + private const int DEFAULT_INITIAL_SIZE = 128; - private readonly String groupField; + private readonly string groupField; private readonly SentinelInt32Set ordSet; private readonly IList groups; diff --git a/src/Lucene.Net.Grouping/Term/TermFirstPassGroupingCollector.cs b/src/Lucene.Net.Grouping/Term/TermFirstPassGroupingCollector.cs index 9cb95be617..6e533b9671 100644 --- a/src/Lucene.Net.Grouping/Term/TermFirstPassGroupingCollector.cs +++ b/src/Lucene.Net.Grouping/Term/TermFirstPassGroupingCollector.cs @@ -33,7 +33,7 @@ public class TermFirstPassGroupingCollector : AbstractFirstPassGroupingCollector private readonly BytesRef scratchBytesRef = new BytesRef(); private SortedDocValues index; - private string groupField; + private readonly string groupField; // LUCENENET: marked readonly /// /// Create the first pass collector. diff --git a/src/Lucene.Net.Grouping/TopGroups.cs b/src/Lucene.Net.Grouping/TopGroups.cs index 494332e8f0..ff2c592716 100644 --- a/src/Lucene.Net.Grouping/TopGroups.cs +++ b/src/Lucene.Net.Grouping/TopGroups.cs @@ -93,13 +93,8 @@ public TopGroups(ITopGroups oldTopGroups, int? totalGroupCount) /// LUCENENET specific class used to nest types to mimic the syntax used /// by Lucene (that is, without specifying the generic closing type of ) /// - public class TopGroups + public static class TopGroups // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { - /// - /// Prevent direct creation - /// - private TopGroups() { } - /// /// How the GroupDocs score (if any) should be merged. public enum ScoreMergeMode @@ -228,7 +223,7 @@ public static TopGroups Merge(ITopGroups[] shardGroups, Sort groupSort, } else if (docOffset >= mergedTopDocs.ScoreDocs.Length) { - mergedScoreDocs = new ScoreDoc[0]; + mergedScoreDocs = Arrays.Empty(); } else { @@ -266,11 +261,11 @@ public static TopGroups Merge(ITopGroups[] shardGroups, Sort groupSort, if (totalGroupCount != null) { - var result = new TopGroups(groupSort.GetSort(), docSort == null ? null : docSort.GetSort(), totalHitCount, totalGroupedHitCount, mergedGroupDocs, totalMaxScore); + var result = new TopGroups(groupSort.GetSort(), docSort?.GetSort(), totalHitCount, totalGroupedHitCount, mergedGroupDocs, totalMaxScore); return new TopGroups(result, totalGroupCount); } - return new TopGroups(groupSort.GetSort(), docSort == null ? null : docSort.GetSort(), totalHitCount, totalGroupedHitCount, mergedGroupDocs, totalMaxScore); + return new TopGroups(groupSort.GetSort(), docSort?.GetSort(), totalHitCount, totalGroupedHitCount, mergedGroupDocs, totalMaxScore); } } @@ -295,14 +290,17 @@ public interface ITopGroups /// /// Group results in groupSort order + [SuppressMessage("Performance", "CA1819:Properties should not return arrays", Justification = "Lucene's design requires some array properties")] IGroupDocs[] Groups { get; } /// /// How groups are sorted against each other + [SuppressMessage("Performance", "CA1819:Properties should not return arrays", Justification = "Lucene's design requires some array properties")] SortField[] GroupSort { get; } /// /// How docs are sorted within each group + [SuppressMessage("Performance", "CA1819:Properties should not return arrays", Justification = "Lucene's design requires some array properties")] SortField[] WithinGroupSort { get; } /// diff --git a/src/Lucene.Net.Highlighter/Highlight/GradientFormatter.cs b/src/Lucene.Net.Highlighter/Highlight/GradientFormatter.cs index 180546ca24..09eabd712e 100644 --- a/src/Lucene.Net.Highlighter/Highlight/GradientFormatter.cs +++ b/src/Lucene.Net.Highlighter/Highlight/GradientFormatter.cs @@ -25,7 +25,7 @@ namespace Lucene.Net.Search.Highlight /// public class GradientFormatter : IFormatter { - private float maxScore; + private readonly float maxScore; // LUCENENET: marked readonly protected int m_fgRMin, m_fgGMin, m_fgBMin; protected int m_fgRMax, m_fgGMax, m_fgBMax; @@ -177,7 +177,8 @@ private int GetColorVal(int colorMin, int colorMax, float score) return Math.Min(colorMin, colorMax) + (int)colScore; } - private static char[] hexDigits = new char[] { '0', '1', '2', '3', '4', '5', '6', '7', + private static readonly char[] hexDigits = new char[] { // LUCENENET: marked readonly + '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F' }; /// diff --git a/src/Lucene.Net.Highlighter/Highlight/Highlighter.cs b/src/Lucene.Net.Highlighter/Highlight/Highlighter.cs index 4e414eb8fb..d6cb282f13 100644 --- a/src/Lucene.Net.Highlighter/Highlight/Highlighter.cs +++ b/src/Lucene.Net.Highlighter/Highlight/Highlighter.cs @@ -35,7 +35,7 @@ public class Highlighter public static readonly int DEFAULT_MAX_CHARS_TO_ANALYZE = 50 * 1024; private int _maxDocCharsToAnalyze = DEFAULT_MAX_CHARS_TO_ANALYZE; - private IFormatter _formatter; + private readonly IFormatter _formatter; // LUCENENET: marked readonly private IEncoder _encoder; private IFragmenter _textFragmenter = new SimpleFragmenter(); private IScorer _fragmentScorer = null; @@ -169,8 +169,8 @@ public TextFragment[] GetBestTextFragments( tokenStream.Reset(); var currentFrag = new TextFragment(newText, newText.Length, docFrags.Count); - if (_fragmentScorer is QueryScorer) { - ((QueryScorer)_fragmentScorer).SetMaxDocCharsToAnalyze(_maxDocCharsToAnalyze); + if (_fragmentScorer is QueryScorer queryScorer) { + queryScorer.SetMaxDocCharsToAnalyze(_maxDocCharsToAnalyze); } var newStream = _fragmentScorer.Init(tokenStream); @@ -342,7 +342,7 @@ public TextFragment[] GetBestTextFragments( /// This will leave a "null" in the array entry for the lesser scored fragment. /// /// An array of document fragments in descending score - private void MergeContiguousFragments(TextFragment[] frag) + private static void MergeContiguousFragments(TextFragment[] frag) // LUCENENET: CA1822: Mark members as static { bool mergingStillBeingDone; if (frag.Length > 1) diff --git a/src/Lucene.Net.Highlighter/Highlight/QueryScorer.cs b/src/Lucene.Net.Highlighter/Highlight/QueryScorer.cs index fc11822735..78eefdc41a 100644 --- a/src/Lucene.Net.Highlighter/Highlight/QueryScorer.cs +++ b/src/Lucene.Net.Highlighter/Highlight/QueryScorer.cs @@ -141,8 +141,7 @@ public virtual float GetTokenScore() position += posIncAtt.PositionIncrement; string termText = termAtt.ToString(); - WeightedSpanTerm weightedSpanTerm; - if (!fieldWeightedSpanTerms.TryGetValue(termText, out weightedSpanTerm) || weightedSpanTerm == null) + if (!fieldWeightedSpanTerms.TryGetValue(termText, out WeightedSpanTerm weightedSpanTerm) || weightedSpanTerm == null) { return 0; } @@ -187,8 +186,7 @@ public virtual TokenStream Init(TokenStream tokenStream) /// for token public virtual WeightedSpanTerm GetWeightedSpanTerm(string token) { - WeightedSpanTerm result; - fieldWeightedSpanTerms.TryGetValue(token, out result); + fieldWeightedSpanTerms.TryGetValue(token, out WeightedSpanTerm result); return result; } diff --git a/src/Lucene.Net.Highlighter/Highlight/QueryTermExtractor.cs b/src/Lucene.Net.Highlighter/Highlight/QueryTermExtractor.cs index 2f5c41d8e7..946852351b 100644 --- a/src/Lucene.Net.Highlighter/Highlight/QueryTermExtractor.cs +++ b/src/Lucene.Net.Highlighter/Highlight/QueryTermExtractor.cs @@ -105,10 +105,10 @@ private static void GetTerms(Query query, ISet terms, bool prohibi { try { - if (query is BooleanQuery) - GetTermsFromBooleanQuery((BooleanQuery)query, terms, prohibited, fieldName); - else if (query is FilteredQuery) - GetTermsFromFilteredQuery((FilteredQuery)query, terms, prohibited, fieldName); + if (query is BooleanQuery booleanQuery) + GetTermsFromBooleanQuery(booleanQuery, terms, prohibited, fieldName); + else if (query is FilteredQuery filteredQuery) + GetTermsFromFilteredQuery(filteredQuery, terms, prohibited, fieldName); else { var nonWeightedTerms = new JCG.HashSet(); @@ -122,9 +122,7 @@ private static void GetTerms(Query query, ISet terms, bool prohibi } } } -#pragma warning disable 168 - catch (NotSupportedException ignore) -#pragma warning restore 168 + catch (NotSupportedException) // LUCENENET: IDE0059: Remove unnecessary value assignment { //this is non-fatal for our purposes } diff --git a/src/Lucene.Net.Highlighter/Highlight/SimpleFragmenter.cs b/src/Lucene.Net.Highlighter/Highlight/SimpleFragmenter.cs index db115e96d9..d347dd407a 100644 --- a/src/Lucene.Net.Highlighter/Highlight/SimpleFragmenter.cs +++ b/src/Lucene.Net.Highlighter/Highlight/SimpleFragmenter.cs @@ -26,7 +26,7 @@ namespace Lucene.Net.Search.Highlight /// public class SimpleFragmenter : IFragmenter { - private static readonly int DEFAULT_FRAGMENT_SIZE = 100; + private const int DEFAULT_FRAGMENT_SIZE = 100; private int currentNumFrags; private int fragmentSize; private IOffsetAttribute offsetAtt; diff --git a/src/Lucene.Net.Highlighter/Highlight/SimpleHTMLFormatter.cs b/src/Lucene.Net.Highlighter/Highlight/SimpleHTMLFormatter.cs index 0392bb20b0..a3bba74f05 100644 --- a/src/Lucene.Net.Highlighter/Highlight/SimpleHTMLFormatter.cs +++ b/src/Lucene.Net.Highlighter/Highlight/SimpleHTMLFormatter.cs @@ -23,8 +23,8 @@ namespace Lucene.Net.Search.Highlight /// MAHarwood public class SimpleHTMLFormatter : IFormatter { - private static readonly string DEFAULT_PRE_TAG = ""; - private static readonly string DEFAULT_POST_TAG = ""; + private const string DEFAULT_PRE_TAG = ""; + private const string DEFAULT_POST_TAG = ""; internal string preTag; internal string postTag; diff --git a/src/Lucene.Net.Highlighter/Highlight/SimpleSpanFragmenter.cs b/src/Lucene.Net.Highlighter/Highlight/SimpleSpanFragmenter.cs index d57358dfe0..c8b460b63a 100644 --- a/src/Lucene.Net.Highlighter/Highlight/SimpleSpanFragmenter.cs +++ b/src/Lucene.Net.Highlighter/Highlight/SimpleSpanFragmenter.cs @@ -27,11 +27,11 @@ namespace Lucene.Net.Search.Highlight /// public class SimpleSpanFragmenter : IFragmenter { - private static int DEFAULT_FRAGMENT_SIZE = 100; - private int fragmentSize; + private const int DEFAULT_FRAGMENT_SIZE = 100; + private readonly int fragmentSize; // LUCENENET: marked readonly private int currentNumFrags; private int position = -1; - private QueryScorer queryScorer; + private readonly QueryScorer queryScorer; // LUCENENET: marked readonly private int waitForPos = -1; private int textSize; private ICharTermAttribute termAtt; diff --git a/src/Lucene.Net.Highlighter/Highlight/SpanGradientFormatter.cs b/src/Lucene.Net.Highlighter/Highlight/SpanGradientFormatter.cs index 8d3fe957ce..ec03755145 100644 --- a/src/Lucene.Net.Highlighter/Highlight/SpanGradientFormatter.cs +++ b/src/Lucene.Net.Highlighter/Highlight/SpanGradientFormatter.cs @@ -28,7 +28,7 @@ namespace Lucene.Net.Search.Highlight public class SpanGradientFormatter : GradientFormatter { // guess how much extra text we'll add to the text we're highlighting to try to avoid a StringBuilder resize - private static readonly string TEMPLATE = "..."; + private const string TEMPLATE = "..."; private static readonly int EXTRA = TEMPLATE.Length; public SpanGradientFormatter(float maxScore, string minForegroundColor, diff --git a/src/Lucene.Net.Highlighter/Highlight/TokenGroup.cs b/src/Lucene.Net.Highlighter/Highlight/TokenGroup.cs index 440976fd05..21d276088a 100644 --- a/src/Lucene.Net.Highlighter/Highlight/TokenGroup.cs +++ b/src/Lucene.Net.Highlighter/Highlight/TokenGroup.cs @@ -27,7 +27,7 @@ namespace Lucene.Net.Search.Highlight /// public class TokenGroup { - private static readonly int MAX_NUM_TOKENS_PER_GROUP = 50; + private const int MAX_NUM_TOKENS_PER_GROUP = 50; internal Token[] tokens = new Token[MAX_NUM_TOKENS_PER_GROUP]; internal float[] scores = new float[MAX_NUM_TOKENS_PER_GROUP]; @@ -55,8 +55,8 @@ public class TokenGroup /// public virtual float TotalScore { get; private set; } - private IOffsetAttribute offsetAtt; - private ICharTermAttribute termAtt; + private readonly IOffsetAttribute offsetAtt; // LUCENENET: marked readonly + private readonly ICharTermAttribute termAtt; // LUCENENET: marked readonly public TokenGroup(TokenStream tokenStream) { diff --git a/src/Lucene.Net.Highlighter/Highlight/TokenSources.cs b/src/Lucene.Net.Highlighter/Highlight/TokenSources.cs index 080277746c..bc9cd26aff 100644 --- a/src/Lucene.Net.Highlighter/Highlight/TokenSources.cs +++ b/src/Lucene.Net.Highlighter/Highlight/TokenSources.cs @@ -32,7 +32,7 @@ namespace Lucene.Net.Search.Highlight /// term vectors with offsets and positions or from an Analyzer re-parsing the stored content. /// see TokenStreamFromTermVector /// - public class TokenSources + public static class TokenSources // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { private class TokenComparer : IComparer { diff --git a/src/Lucene.Net.Highlighter/Highlight/TokenStreamFromTermPositionVector.cs b/src/Lucene.Net.Highlighter/Highlight/TokenStreamFromTermPositionVector.cs index 73ad263152..5baf119ad6 100644 --- a/src/Lucene.Net.Highlighter/Highlight/TokenStreamFromTermPositionVector.cs +++ b/src/Lucene.Net.Highlighter/Highlight/TokenStreamFromTermPositionVector.cs @@ -32,13 +32,13 @@ public sealed class TokenStreamFromTermPositionVector : TokenStream private IEnumerator tokensAtCurrentPosition; - private ICharTermAttribute termAttribute; + private readonly ICharTermAttribute termAttribute; // LUCENENET: marked readonly - private IPositionIncrementAttribute positionIncrementAttribute; + private readonly IPositionIncrementAttribute positionIncrementAttribute; // LUCENENET: marked readonly - private IOffsetAttribute offsetAttribute; + private readonly IOffsetAttribute offsetAttribute; // LUCENENET: marked readonly - private IPayloadAttribute payloadAttribute; + private readonly IPayloadAttribute payloadAttribute; // LUCENENET: marked readonly ///Constructor /// diff --git a/src/Lucene.Net.Highlighter/Highlight/WeightedSpanTermExtractor.cs b/src/Lucene.Net.Highlighter/Highlight/WeightedSpanTermExtractor.cs index 7461da8a9f..cec91d8bee 100644 --- a/src/Lucene.Net.Highlighter/Highlight/WeightedSpanTermExtractor.cs +++ b/src/Lucene.Net.Highlighter/Highlight/WeightedSpanTermExtractor.cs @@ -65,9 +65,9 @@ public WeightedSpanTermExtractor(string defaultField) /// If there is a low-level I/O error protected virtual void Extract(Query query, IDictionary terms) { - if (query is BooleanQuery) + if (query is BooleanQuery booleanQuery) { - IList queryClauses = ((BooleanQuery)query).Clauses; + IList queryClauses = booleanQuery.Clauses; for (int i = 0; i < queryClauses.Count; i++) { @@ -77,9 +77,8 @@ protected virtual void Extract(Query query, IDictionary termArrays = mpq.GetTermArrays(); int[] positions = mpq.GetPositions(); if (positions.Length > 0) @@ -319,8 +317,7 @@ protected virtual void ExtractWeightedSpanTerms(IDictionary GetWeightedSpanTerms(Query public virtual IDictionary GetWeightedSpanTermsWithScores( Query query, TokenStream tokenStream, string fieldName, IndexReader reader) { - this.fieldName = fieldName == null ? null : fieldName.Intern(); + this.fieldName = fieldName?.Intern(); this.tokenStream = tokenStream; @@ -524,8 +521,7 @@ public virtual IDictionary GetWeightedSpanTermsWithSco { foreach (var wt in weightedTerms) { - WeightedSpanTerm weightedSpanTerm; - terms.TryGetValue(wt, out weightedSpanTerm); + terms.TryGetValue(wt, out WeightedSpanTerm weightedSpanTerm); int docFreq = reader.DocFreq(new Term(fieldName, weightedSpanTerm.Term)); // IDF algorithm taken from DefaultSimilarity class float idf = (float)(Math.Log((float)totalNumDocs / (double)(docFreq + 1)) + 1.0); @@ -542,28 +538,28 @@ public virtual IDictionary GetWeightedSpanTermsWithSco protected virtual void CollectSpanQueryFields(SpanQuery spanQuery, ISet fieldNames) { - if (spanQuery is FieldMaskingSpanQuery) + if (spanQuery is FieldMaskingSpanQuery fieldMaskingSpanQuery) { - CollectSpanQueryFields(((FieldMaskingSpanQuery)spanQuery).MaskedQuery, fieldNames); + CollectSpanQueryFields(fieldMaskingSpanQuery.MaskedQuery, fieldNames); } - else if (spanQuery is SpanFirstQuery) + else if (spanQuery is SpanFirstQuery spanFirstQuery) { - CollectSpanQueryFields(((SpanFirstQuery)spanQuery).Match, fieldNames); + CollectSpanQueryFields(spanFirstQuery.Match, fieldNames); } - else if (spanQuery is SpanNearQuery) + else if (spanQuery is SpanNearQuery spanNearQuery) { - foreach (SpanQuery clause in ((SpanNearQuery)spanQuery).GetClauses()) + foreach (SpanQuery clause in spanNearQuery.GetClauses()) { CollectSpanQueryFields(clause, fieldNames); } } - else if (spanQuery is SpanNotQuery) + else if (spanQuery is SpanNotQuery spanNotQuery) { - CollectSpanQueryFields(((SpanNotQuery)spanQuery).Include, fieldNames); + CollectSpanQueryFields(spanNotQuery.Include, fieldNames); } - else if (spanQuery is SpanOrQuery) + else if (spanQuery is SpanOrQuery spanOrQuery) { - foreach (SpanQuery clause in ((SpanOrQuery)spanQuery).GetClauses()) + foreach (SpanQuery clause in spanOrQuery.GetClauses()) { CollectSpanQueryFields(clause, fieldNames); } @@ -580,17 +576,17 @@ protected virtual bool MustRewriteQuery(SpanQuery spanQuery) { return false; // Will throw NotImplementedException in case of a SpanRegexQuery. } - else if (spanQuery is FieldMaskingSpanQuery) + else if (spanQuery is FieldMaskingSpanQuery fieldMaskingSpanQuery) { - return MustRewriteQuery(((FieldMaskingSpanQuery)spanQuery).MaskedQuery); + return MustRewriteQuery(fieldMaskingSpanQuery.MaskedQuery); } - else if (spanQuery is SpanFirstQuery) + else if (spanQuery is SpanFirstQuery spanFirstQuery) { - return MustRewriteQuery(((SpanFirstQuery)spanQuery).Match); + return MustRewriteQuery(spanFirstQuery.Match); } - else if (spanQuery is SpanNearQuery) + else if (spanQuery is SpanNearQuery spanNearQuery) { - foreach (SpanQuery clause in ((SpanNearQuery)spanQuery).GetClauses()) + foreach (SpanQuery clause in spanNearQuery.GetClauses()) { if (MustRewriteQuery(clause)) { @@ -599,14 +595,13 @@ protected virtual bool MustRewriteQuery(SpanQuery spanQuery) } return false; } - else if (spanQuery is SpanNotQuery) + else if (spanQuery is SpanNotQuery spanNotQuery) { - SpanNotQuery spanNotQuery = (SpanNotQuery)spanQuery; return MustRewriteQuery(spanNotQuery.Include) || MustRewriteQuery(spanNotQuery.Exclude); } - else if (spanQuery is SpanOrQuery) + else if (spanQuery is SpanOrQuery spanOrQuery) { - foreach (SpanQuery clause in ((SpanOrQuery)spanQuery).GetClauses()) + foreach (SpanQuery clause in spanOrQuery.GetClauses()) { if (MustRewriteQuery(clause)) { @@ -643,8 +638,7 @@ public WeightedSpanTerm this[K key] set { - WeightedSpanTerm prev = null; - wrapped.TryGetValue(key, out prev); + wrapped.TryGetValue(key, out WeightedSpanTerm prev); wrapped[key] = value; if (prev == null) return; diff --git a/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs b/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs index a26ba1e846..9af5ed8cee 100644 --- a/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs +++ b/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs @@ -157,10 +157,10 @@ internal class SimpleCharacterRunAutomatonAnonymousHelper : CharacterRunAutomato private readonly CharsRef lowerBound; private readonly CharsRef upperBound; - private bool includeLower; - private bool includeUpper; + private readonly bool includeLower; + private readonly bool includeUpper; #pragma warning disable 612, 618 - private IComparer comparer = CharsRef.UTF16SortedAsUTF8Comparer; + private static readonly IComparer comparer = CharsRef.UTF16SortedAsUTF8Comparer; // LUCENENET specific - made static #pragma warning restore 612, 618 public SimpleCharacterRunAutomatonAnonymousHelper(Automaton a, TermRangeQuery tq) @@ -257,7 +257,7 @@ public DocsAndPositionsEnumAnonymousHelper( internal int currentEndOffset = -1; internal TokenStream stream; - readonly BytesRef[] matchDescriptions; + private readonly BytesRef[] matchDescriptions; public override int NextPosition() diff --git a/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs b/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs index ca21146c66..1ec4c6559f 100644 --- a/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs +++ b/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs @@ -219,8 +219,7 @@ public virtual string[] Highlight(string field, Query query, IndexSearcher searc public virtual string[] Highlight(string field, Query query, IndexSearcher searcher, TopDocs topDocs, int maxPassages) { IDictionary res = HighlightFields(new string[] { field }, query, searcher, topDocs, new int[] { maxPassages }); - string[] result; - res.TryGetValue(field, out result); + res.TryGetValue(field, out string[] result); return result; } @@ -509,7 +508,7 @@ private IDictionary HighlightField(string field, string[] contents, // check if we should do any multiterm processing Analyzer analyzer = GetIndexAnalyzer(field); - CharacterRunAutomaton[] automata = new CharacterRunAutomaton[0]; + CharacterRunAutomaton[] automata = Arrays.Empty(); if (analyzer != null) { automata = MultiTermHighlighting.ExtractAutomata(query, field); diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragListBuilder.cs b/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragListBuilder.cs index 1c328c2156..944bf43fab 100644 --- a/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragListBuilder.cs +++ b/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragListBuilder.cs @@ -34,7 +34,7 @@ public abstract class BaseFragListBuilder : IFragListBuilder internal readonly int margin; internal readonly int minFragCharSize; - public BaseFragListBuilder(int margin) + protected BaseFragListBuilder(int margin) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { if (margin < 0) throw new ArgumentException("margin(" + margin + ") is too small. It must be 0 or higher."); @@ -43,7 +43,7 @@ public BaseFragListBuilder(int margin) this.minFragCharSize = Math.Max(1, margin * MIN_FRAG_CHAR_SIZE_FACTOR); } - public BaseFragListBuilder() + protected BaseFragListBuilder() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : this(MARGIN_DEFAULT) { } @@ -159,7 +159,7 @@ public T RemoveTop() } else { - top = default(T); + top = default; } return currentTop; } diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/FieldFragList.cs b/src/Lucene.Net.Highlighter/VectorHighlight/FieldFragList.cs index 8f8ee251b7..3c44972c19 100644 --- a/src/Lucene.Net.Highlighter/VectorHighlight/FieldFragList.cs +++ b/src/Lucene.Net.Highlighter/VectorHighlight/FieldFragList.cs @@ -29,13 +29,15 @@ namespace Lucene.Net.Search.VectorHighlight /// public abstract class FieldFragList { - private List fragInfos = new List(); + private readonly List fragInfos = new List(); /// /// a constructor. /// /// the length (number of chars) of a fragment - public FieldFragList(int fragCharSize) +#pragma warning disable IDE0060 // Remove unused parameter + protected FieldFragList(int fragCharSize) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) +#pragma warning restore IDE0060 // Remove unused parameter { } @@ -57,10 +59,10 @@ public FieldFragList(int fragCharSize) /// public class WeightedFragInfo { - private IList subInfos; - private float totalBoost; - private int startOffset; - private int endOffset; + private readonly IList subInfos; // LUCENENET: marked readonly + private readonly float totalBoost; // LUCENENET: marked readonly + private readonly int startOffset; // LUCENENET: marked readonly + private readonly int endOffset; // LUCENENET: marked readonly public WeightedFragInfo(int startOffset, int endOffset, IList subInfos, float totalBoost) { diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/FieldPhraseList.cs b/src/Lucene.Net.Highlighter/VectorHighlight/FieldPhraseList.cs index 7973e11be1..692cd6de54 100644 --- a/src/Lucene.Net.Highlighter/VectorHighlight/FieldPhraseList.cs +++ b/src/Lucene.Net.Highlighter/VectorHighlight/FieldPhraseList.cs @@ -62,14 +62,14 @@ public FieldPhraseList(FieldTermStack fieldTermStack, FieldQuery fieldQuery, int string field = fieldTermStack.FieldName; List phraseCandidate = new List(); - QueryPhraseMap currMap = null; - QueryPhraseMap nextMap = null; + QueryPhraseMap currMap; // LUCENENET: IDE0059: Remove unnecessary value assignment + QueryPhraseMap nextMap; // LUCENENET: IDE0059: Remove unnecessary value assignment while (!fieldTermStack.IsEmpty && (phraseList.Count < phraseLimit)) { phraseCandidate.Clear(); - TermInfo ti = null; - TermInfo first = null; + TermInfo ti; // LUCENENET: IDE0059: Remove unnecessary value assignment + TermInfo first; // LUCENENET: IDE0059: Remove unnecessary value assignment first = ti = fieldTermStack.Pop(); currMap = fieldQuery.GetFieldTermMap(field, ti.Text); @@ -153,51 +153,49 @@ public FieldPhraseList(FieldPhraseList[] toMerge) { allInfos[index++] = fplToMerge.phraseList.GetEnumerator(); } - using (MergedEnumerator itr = new MergedEnumerator(false, allInfos)) + using MergedEnumerator itr = new MergedEnumerator(false, allInfos); + // Step 2. Walk the sorted list merging infos that overlap + phraseList = new List(); + if (!itr.MoveNext()) { - // Step 2. Walk the sorted list merging infos that overlap - phraseList = new List(); - if (!itr.MoveNext()) + return; + } + List work = new List(); + WeightedPhraseInfo first = itr.Current; + work.Add(first); + int workEndOffset = first.EndOffset; + while (itr.MoveNext()) + { + WeightedPhraseInfo current = itr.Current; + if (current.StartOffset <= workEndOffset) { - return; + workEndOffset = Math.Max(workEndOffset, current.EndOffset); + work.Add(current); } - List work = new List(); - WeightedPhraseInfo first = itr.Current; - work.Add(first); - int workEndOffset = first.EndOffset; - while (itr.MoveNext()) + else { - WeightedPhraseInfo current = itr.Current; - if (current.StartOffset <= workEndOffset) + if (work.Count == 1) { - workEndOffset = Math.Max(workEndOffset, current.EndOffset); - work.Add(current); + phraseList.Add(work[0]); + work[0] = current; } else { - if (work.Count == 1) - { - phraseList.Add(work[0]); - work[0] = current; - } - else - { - phraseList.Add(new WeightedPhraseInfo(work)); - work.Clear(); - work.Add(current); - } - workEndOffset = current.EndOffset; + phraseList.Add(new WeightedPhraseInfo(work)); + work.Clear(); + work.Add(current); } + workEndOffset = current.EndOffset; } - if (work.Count == 1) - { - phraseList.Add(work[0]); - } - else - { - phraseList.Add(new WeightedPhraseInfo(work)); - work.Clear(); - } + } + if (work.Count == 1) + { + phraseList.Add(work[0]); + } + else + { + phraseList.Add(new WeightedPhraseInfo(work)); + work.Clear(); } } finally @@ -226,12 +224,12 @@ public virtual void AddIfNoOverlap(WeightedPhraseInfo wpi) /// public class WeightedPhraseInfo : IComparable { - private List termsOffsets; // usually termsOffsets.size() == 1, - // but if position-gap > 1 and slop > 0 then size() could be greater than 1 - private float boost; // query boost - private int seqnum; + private readonly List termsOffsets; // usually termsOffsets.size() == 1, // LUCENENET: marked readonly + // but if position-gap > 1 and slop > 0 then size() could be greater than 1 + private readonly float boost; // query boost // LUCENENET: marked readonly + private readonly int seqnum; // LUCENENET: marked readonly - private List termsInfos; + private readonly List termsInfos; // LUCENENET: marked readonly /// /// Text of the match, calculated on the fly. Use for debugging only. @@ -311,52 +309,48 @@ public WeightedPhraseInfo(ICollection toMerge) // Pretty much the same idea as merging FieldPhraseLists: // Step 1. Sort by startOffset, endOffset // While we are here merge the boosts and termInfos - using (IEnumerator toMergeItr = toMerge.GetEnumerator()) + using IEnumerator toMergeItr = toMerge.GetEnumerator(); + if (!toMergeItr.MoveNext()) { - if (!toMergeItr.MoveNext()) - { - throw new ArgumentException("toMerge must contain at least one WeightedPhraseInfo."); - } - WeightedPhraseInfo first = toMergeItr.Current; - - termsInfos = new List(); - seqnum = first.seqnum; - boost = first.boost; - allToffs[0] = first.termsOffsets.GetEnumerator(); - int index = 1; - while (toMergeItr.MoveNext()) + throw new ArgumentException("toMerge must contain at least one WeightedPhraseInfo."); + } + WeightedPhraseInfo first = toMergeItr.Current; + + termsInfos = new List(); + seqnum = first.seqnum; + boost = first.boost; + allToffs[0] = first.termsOffsets.GetEnumerator(); + int index = 1; + while (toMergeItr.MoveNext()) + { + WeightedPhraseInfo info = toMergeItr.Current; + boost += info.boost; + termsInfos.AddRange(info.termsInfos); + allToffs[index++] = info.termsOffsets.GetEnumerator(); + } + + // Step 2. Walk the sorted list merging overlaps + using MergedEnumerator itr = new MergedEnumerator(false, allToffs); + termsOffsets = new List(); + if (!itr.MoveNext()) + { + return; + } + Toffs work = itr.Current; + while (itr.MoveNext()) + { + Toffs current = itr.Current; + if (current.StartOffset <= work.EndOffset) { - WeightedPhraseInfo info = toMergeItr.Current; - boost += info.boost; - termsInfos.AddRange(info.termsInfos); - allToffs[index++] = info.termsOffsets.GetEnumerator(); + work.EndOffset = Math.Max(work.EndOffset, current.EndOffset); } - - // Step 2. Walk the sorted list merging overlaps - using (MergedEnumerator itr = new MergedEnumerator(false, allToffs)) + else { - termsOffsets = new List(); - if (!itr.MoveNext()) - { - return; - } - Toffs work = itr.Current; - while (itr.MoveNext()) - { - Toffs current = itr.Current; - if (current.StartOffset <= work.EndOffset) - { - work.EndOffset = Math.Max(work.EndOffset, current.EndOffset); - } - else - { - termsOffsets.Add(work); - work = current; - } - } termsOffsets.Add(work); + work = current; } } + termsOffsets.Add(work); } finally { @@ -470,7 +464,7 @@ public override bool Equals(object obj) /// public class Toffs : IComparable { - private int startOffset; + private readonly int startOffset; // LUCENENET: marked readonly private int endOffset; public Toffs(int startOffset, int endOffset) { diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/FieldQuery.cs b/src/Lucene.Net.Highlighter/VectorHighlight/FieldQuery.cs index 7080b9520f..dc52f298c0 100644 --- a/src/Lucene.Net.Highlighter/VectorHighlight/FieldQuery.cs +++ b/src/Lucene.Net.Highlighter/VectorHighlight/FieldQuery.cs @@ -43,7 +43,7 @@ public class FieldQuery internal int termOrPhraseNumber; // used for colored tag support // The maximum number of different matching terms accumulated from any one MultiTermQuery - private static readonly int MAX_MTQ_TERMS = 1024; + private const int MAX_MTQ_TERMS = 1024; internal FieldQuery(Query query, IndexReader reader, bool phraseHighlight, bool fieldMatch) { @@ -57,10 +57,9 @@ internal FieldQuery(Query query, IndexReader reader, bool phraseHighlight, bool foreach (Query flatQuery in expandQueries) { QueryPhraseMap rootMap = GetRootMap(flatQuery); - rootMap.Add(flatQuery, reader); - if (!phraseHighlight && flatQuery is PhraseQuery) + rootMap.Add(flatQuery /*, reader // LUCENENET: Never read */); + if (!phraseHighlight && flatQuery is PhraseQuery pq) { - PhraseQuery pq = (PhraseQuery)flatQuery; if (pq.GetTerms().Length > 1) { foreach (Term term in pq.GetTerms()) @@ -84,9 +83,8 @@ internal FieldQuery(Query query, bool phraseHighlight, bool fieldMatch) internal void Flatten(Query sourceQuery, IndexReader reader, ICollection flatQueries) { - if (sourceQuery is BooleanQuery) + if (sourceQuery is BooleanQuery bq) { - BooleanQuery bq = (BooleanQuery)sourceQuery; foreach (BooleanClause clause in bq) { if (!clause.IsProhibited) @@ -95,9 +93,8 @@ internal void Flatten(Query sourceQuery, IndexReader reader, ICollection } } } - else if (sourceQuery is DisjunctionMaxQuery) + else if (sourceQuery is DisjunctionMaxQuery dmq) { - DisjunctionMaxQuery dmq = (DisjunctionMaxQuery)sourceQuery; foreach (Query query in dmq) { Flatten(ApplyParentBoost(query, dmq), reader, flatQueries); @@ -108,32 +105,33 @@ internal void Flatten(Query sourceQuery, IndexReader reader, ICollection if (!flatQueries.Contains(sourceQuery)) flatQueries.Add(sourceQuery); } - else if (sourceQuery is PhraseQuery) + else if (sourceQuery is PhraseQuery pq) { if (!flatQueries.Contains(sourceQuery)) // LUCENENET - set semantics, but this is a list. The original logic was already correct. { - PhraseQuery pq = (PhraseQuery)sourceQuery; if (pq.GetTerms().Length > 1) flatQueries.Add(pq); else if (pq.GetTerms().Length == 1) { - Query flat = new TermQuery(pq.GetTerms()[0]); - flat.Boost = pq.Boost; + Query flat = new TermQuery(pq.GetTerms()[0]) + { + Boost = pq.Boost + }; flatQueries.Add(flat); } } } - else if (sourceQuery is ConstantScoreQuery) + else if (sourceQuery is ConstantScoreQuery constantScoreQuery) { - Query q = ((ConstantScoreQuery)sourceQuery).Query; + Query q = constantScoreQuery.Query; if (q != null) { Flatten(ApplyParentBoost(q, sourceQuery), reader, flatQueries); } } - else if (sourceQuery is FilteredQuery) + else if (sourceQuery is FilteredQuery filteredQuery) { - Query q = ((FilteredQuery)sourceQuery).Query; + Query q = filteredQuery.Query; if (q != null) { Flatten(ApplyParentBoost(q, sourceQuery), reader, flatQueries); @@ -203,14 +201,12 @@ internal ICollection Expand(ICollection flatQueries) } expandQueries.Add(query); if (!(query is PhraseQuery)) continue; - using (IEnumerator j = flatQueries.GetEnumerator()) + using IEnumerator j = flatQueries.GetEnumerator(); + while (j.MoveNext()) { - while (j.MoveNext()) - { - Query qj = j.Current; - if (!(qj is PhraseQuery)) continue; - CheckOverlap(expandQueries, (PhraseQuery)query, (PhraseQuery)qj); - } + Query qj = j.Current; + if (!(qj is PhraseQuery)) continue; + CheckOverlap(expandQueries, (PhraseQuery)query, (PhraseQuery)qj); } } @@ -264,7 +260,7 @@ private void CheckOverlap(ICollection expandQueries, PhraseQuery a, Phras /// ex8) src="a b c d", dest="b c" => no overlap /// /// - private void CheckOverlap(ICollection expandQueries, Term[] src, Term[] dest, int slop, float boost) + private static void CheckOverlap(ICollection expandQueries, Term[] src, Term[] dest, int slop, float boost) // LUCENENET: CA1822: Mark members as static { // beginning from 1 (not 0) is safe because that the PhraseQuery has multiple terms // is guaranteed in flatten() method (if PhraseQuery has only one term, flatten() @@ -300,8 +296,7 @@ private void CheckOverlap(ICollection expandQueries, Term[] src, Term[] d internal QueryPhraseMap GetRootMap(Query query) { string key = GetKey(query); - QueryPhraseMap map; - if (!rootMaps.TryGetValue(key, out map) || map == null) + if (!rootMaps.TryGetValue(key, out QueryPhraseMap map) || map == null) { map = new QueryPhraseMap(this); rootMaps[key] = map; @@ -316,17 +311,16 @@ internal QueryPhraseMap GetRootMap(Query query) private string GetKey(Query query) { if (!fieldMatch) return null; - if (query is TermQuery) - return ((TermQuery)query).Term.Field; - else if (query is PhraseQuery) + if (query is TermQuery termQuery) + return termQuery.Term.Field; + else if (query is PhraseQuery pq) { - PhraseQuery pq = (PhraseQuery)query; Term[] terms = pq.GetTerms(); return terms[0].Field; } - else if (query is MultiTermQuery) + else if (query is MultiTermQuery multiTermQuery) { - return ((MultiTermQuery)query).Field; + return multiTermQuery.Field; } else throw new Exception("query \"" + query.ToString() + "\" must be flatten first."); @@ -359,11 +353,11 @@ internal void SaveTerms(ICollection flatQueries, IndexReader reader) foreach (Query query in flatQueries) { ISet termSet = GetTermSet(query); - if (query is TermQuery) - termSet.Add(((TermQuery)query).Term.Text()); - else if (query is PhraseQuery) + if (query is TermQuery termQuery) + termSet.Add(termQuery.Term.Text()); + else if (query is PhraseQuery phraseQuery) { - foreach (Term term in ((PhraseQuery)query).GetTerms()) + foreach (Term term in phraseQuery.GetTerms()) termSet.Add(term.Text()); } else if (query is MultiTermQuery && reader != null) @@ -458,15 +452,14 @@ private QueryPhraseMap GetOrNewMap(IDictionary subMap, s return map; } - internal void Add(Query query, IndexReader reader) + internal void Add(Query query /*, IndexReader reader // LUCENENET: Never read */) { - if (query is TermQuery) + if (query is TermQuery termQuery) { - AddTerm(((TermQuery)query).Term, query.Boost); + AddTerm(termQuery.Term, query.Boost); } - else if (query is PhraseQuery) + else if (query is PhraseQuery pq) { - PhraseQuery pq = (PhraseQuery)query; Term[] terms = pq.GetTerms(); IDictionary map = subMap; QueryPhraseMap qpm = null; @@ -483,8 +476,7 @@ internal void Add(Query query, IndexReader reader) public virtual QueryPhraseMap GetTermMap(string term) { - QueryPhraseMap result; - subMap.TryGetValue(term, out result); + subMap.TryGetValue(term, out QueryPhraseMap result); return result; } diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/SingleFragListBuilder.cs b/src/Lucene.Net.Highlighter/VectorHighlight/SingleFragListBuilder.cs index 981868dd4a..ce20f30c58 100644 --- a/src/Lucene.Net.Highlighter/VectorHighlight/SingleFragListBuilder.cs +++ b/src/Lucene.Net.Highlighter/VectorHighlight/SingleFragListBuilder.cs @@ -38,21 +38,19 @@ public virtual FieldFragList CreateFieldFragList(FieldPhraseList fieldPhraseList FieldFragList ffl = new SimpleFieldFragList(fragCharSize); List wpil = new List(); - using (IEnumerator ite = fieldPhraseList.PhraseList.GetEnumerator()) + using IEnumerator ite = fieldPhraseList.PhraseList.GetEnumerator(); + WeightedPhraseInfo phraseInfo = null; + while (true) { - WeightedPhraseInfo phraseInfo = null; - while (true) - { - if (!ite.MoveNext()) break; - phraseInfo = ite.Current; - if (phraseInfo == null) break; + if (!ite.MoveNext()) break; + phraseInfo = ite.Current; + if (phraseInfo == null) break; - wpil.Add(phraseInfo); - } - if (wpil.Count > 0) - ffl.Add(0, int.MaxValue, wpil); - return ffl; + wpil.Add(phraseInfo); } + if (wpil.Count > 0) + ffl.Add(0, int.MaxValue, wpil); + return ffl; } } } diff --git a/src/Lucene.Net.Join/TermsIncludingScoreQuery.cs b/src/Lucene.Net.Join/TermsIncludingScoreQuery.cs index 8bca375bc8..c28ca38314 100644 --- a/src/Lucene.Net.Join/TermsIncludingScoreQuery.cs +++ b/src/Lucene.Net.Join/TermsIncludingScoreQuery.cs @@ -84,7 +84,7 @@ public override Query Rewrite(IndexReader reader) public override bool Equals(object obj) { - if (ReferenceEquals(null, obj)) return false; + if (obj is null) return false; if (ReferenceEquals(this, obj)) return true; if (!base.Equals(obj)) return false; if (obj.GetType() != GetType()) return false; @@ -123,7 +123,7 @@ private class WeightAnonymousInnerClassHelper : Weight { private readonly TermsIncludingScoreQuery outerInstance; - private Weight originalWeight; + private readonly Weight originalWeight; public WeightAnonymousInnerClassHelper(TermsIncludingScoreQuery outerInstance, Weight originalWeight) { @@ -194,17 +194,19 @@ public override BulkScorer GetBulkScorer(AtomicReaderContext context, bool score return null; } // what is the runtime...seems ok? - long cost = context.AtomicReader.MaxDoc * terms.Count; + //long cost = context.AtomicReader.MaxDoc * terms.Count; // LUCENENET: IDE0059: Remove unnecessary value assignment segmentTermsEnum = terms.GetEnumerator(segmentTermsEnum); // Optimized impls that take advantage of docs // being allowed to be out of order: if (outerInstance._multipleValuesPerDocument) { - return new MVInnerScorer(outerInstance, this, acceptDocs, segmentTermsEnum, context.AtomicReader.MaxDoc, cost); + return new MVInnerScorer(outerInstance, /*this, // LUCENENET: Never read */ + acceptDocs, segmentTermsEnum, context.AtomicReader.MaxDoc /*, cost // LUCENENET: Never read */); } - return new SVInnerScorer(outerInstance, this, acceptDocs, segmentTermsEnum, cost); + return new SVInnerScorer(outerInstance, /*this, // LUCENENET: Never read */ + acceptDocs, segmentTermsEnum /*, cost // LUCENENET: Never read */); } } @@ -216,7 +218,7 @@ internal class SVInnerScorer : BulkScorer private readonly BytesRef _spare = new BytesRef(); private readonly IBits _acceptDocs; private readonly TermsEnum _termsEnum; - private readonly long _cost; + //private readonly long _cost; // LUCENENET: Never read private int _upto; internal DocsEnum docsEnum; @@ -224,12 +226,13 @@ internal class SVInnerScorer : BulkScorer private int _scoreUpto; private int _doc; - internal SVInnerScorer(TermsIncludingScoreQuery outerInstance, Weight weight, IBits acceptDocs, TermsEnum termsEnum, long cost) + internal SVInnerScorer(TermsIncludingScoreQuery outerInstance, /* Weight weight, // LUCENENET: Never read */ + IBits acceptDocs, TermsEnum termsEnum /*, long cost // LUCENENET: Never read */) { this.outerInstance = outerInstance; _acceptDocs = acceptDocs; _termsEnum = termsEnum; - _cost = cost; + //_cost = cost; // LUCENENET: Never read _doc = -1; } @@ -298,7 +301,7 @@ internal Explanation Explain(int target) // LUCENENET NOTE: changed accessibilit int tempDocId = docsEnum.Advance(target); if (tempDocId == target) { - docId = tempDocId; + //docId = tempDocId; // LUCENENET: IDE0059: Remove unnecessary value assignment break; } } @@ -319,16 +322,13 @@ internal Explanation Explain(int target) // LUCENENET NOTE: changed accessibilit // even if other join values yield a higher score. internal class MVInnerScorer : SVInnerScorer { - private readonly TermsIncludingScoreQuery outerInstance; - - internal readonly FixedBitSet alreadyEmittedDocs; - internal MVInnerScorer(TermsIncludingScoreQuery outerInstance, Weight weight, IBits acceptDocs, - TermsEnum termsEnum, int maxDoc, long cost) - : base(outerInstance, weight, acceptDocs, termsEnum, cost) + internal MVInnerScorer(TermsIncludingScoreQuery outerInstance, /* Weight weight, // LUCENENET: Never read */ + IBits acceptDocs, TermsEnum termsEnum, int maxDoc /*, long cost // LUCENENET: Never read */) + : base(outerInstance, /*weight, // LUCENENET: Never read */ + acceptDocs, termsEnum /*, cost // LUCENENET: Never read */) { - this.outerInstance = outerInstance; alreadyEmittedDocs = new FixedBitSet(maxDoc); } diff --git a/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs b/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs index bda6f6b627..ad0a6d360f 100644 --- a/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs +++ b/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs @@ -355,9 +355,9 @@ public override string ToString(string field) public override bool Equals(object obj) { - if (obj is ToChildBlockJoinQuery) + if (obj is null) return false; + if (obj is ToChildBlockJoinQuery other) { - ToChildBlockJoinQuery other = (ToChildBlockJoinQuery)obj; return _origParentQuery.Equals(other._origParentQuery) && _parentsFilter.Equals(other._parentsFilter) && _doScores == other._doScores && diff --git a/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs b/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs index 1b9df09fb4..79a4e7ebdc 100644 --- a/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs +++ b/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs @@ -92,7 +92,7 @@ public class ToParentBlockJoinCollector : ICollector private readonly bool trackScores; private int docBase; - private ToParentBlockJoinQuery.BlockJoinScorer[] joinScorers = new ToParentBlockJoinQuery.BlockJoinScorer[0]; + private ToParentBlockJoinQuery.BlockJoinScorer[] joinScorers = Arrays.Empty(); private AtomicReaderContext currentReaderContext; private Scorer scorer; private bool queueFull; @@ -320,10 +320,9 @@ public virtual void SetNextReader(AtomicReaderContext context) private void Enroll(ToParentBlockJoinQuery query, ToParentBlockJoinQuery.BlockJoinScorer scorer) { scorer.TrackPendingChildHits(); - int? slot; - if (joinQueryID.TryGetValue(query, out slot)) + if (joinQueryID.TryGetValue(query, out int? slot)) { - joinScorers[(int) slot] = scorer; + joinScorers[(int)slot] = scorer; } else { @@ -356,9 +355,9 @@ public virtual void SetScorer(Scorer scorer) while (queue2.TryDequeue(out scorer)) { //System.out.println(" poll: " + value + "; " + value.getWeight().getQuery()); - if (scorer is ToParentBlockJoinQuery.BlockJoinScorer) + if (scorer is ToParentBlockJoinQuery.BlockJoinScorer blockJoinScorer) { - Enroll((ToParentBlockJoinQuery)scorer.Weight.Query, (ToParentBlockJoinQuery.BlockJoinScorer)scorer); + Enroll((ToParentBlockJoinQuery)scorer.Weight.Query, blockJoinScorer); } foreach (Scorer.ChildScorer sub in scorer.GetChildren()) @@ -398,8 +397,7 @@ private void SortQueue() /// if there is a low-level I/O error public virtual ITopGroups GetTopGroups(ToParentBlockJoinQuery query, Sort withinGroupSort, int offset, int maxDocsPerGroup, int withinGroupOffset, bool fillSortFields) { - int? slot; - if (!joinQueryID.TryGetValue(query, out slot)) + if (!joinQueryID.TryGetValue(query, out int? slot)) { if (totalHitCount == 0) { @@ -522,7 +520,7 @@ private ITopGroups AccumulateGroups(int slot, int offset, int maxDocsPerGro groups[groupIdx - offset] = new GroupDocs(og.Score, topDocs.MaxScore, numChildDocs, topDocs.ScoreDocs, og.Doc, groupSortValues); } - return new TopGroups(new TopGroups(sort.GetSort(), withinGroupSort == null ? null : withinGroupSort.GetSort(), 0, totalGroupedHitCount, groups, maxScore), totalHitCount); + return new TopGroups(new TopGroups(sort.GetSort(), withinGroupSort?.GetSort(), 0, totalGroupedHitCount, groups, maxScore), totalHitCount); } /// diff --git a/src/Lucene.Net.Join/ToParentBlockJoinFieldComparator.cs b/src/Lucene.Net.Join/ToParentBlockJoinFieldComparator.cs index 176e6c7c57..07843d4b27 100644 --- a/src/Lucene.Net.Join/ToParentBlockJoinFieldComparator.cs +++ b/src/Lucene.Net.Join/ToParentBlockJoinFieldComparator.cs @@ -68,9 +68,9 @@ public override FieldComparer SetNextReader(AtomicReaderContext context) { _childDocuments = null; } - else if (innerDocuments is FixedBitSet) + else if (innerDocuments is FixedBitSet fixedBitSet) { - _childDocuments = (FixedBitSet)innerDocuments; + _childDocuments = fixedBitSet; } else { @@ -82,9 +82,9 @@ public override FieldComparer SetNextReader(AtomicReaderContext context) { _parentDocuments = null; } - else if (rootDocuments is FixedBitSet) + else if (rootDocuments is FixedBitSet fixedBitSet) { - _parentDocuments = (FixedBitSet)rootDocuments; + _parentDocuments = fixedBitSet; } else { diff --git a/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs b/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs index ae816ec7cd..611e26b886 100644 --- a/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs +++ b/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs @@ -471,9 +471,9 @@ public override string ToString(string field) public override bool Equals(object obj) { - if (obj is ToParentBlockJoinQuery) + if (obj is null) return false; + if (obj is ToParentBlockJoinQuery other) { - ToParentBlockJoinQuery other = (ToParentBlockJoinQuery)obj; return _origChildQuery.Equals(other._origChildQuery) && _parentsFilter.Equals(other._parentsFilter) && _scoreMode == other._scoreMode && diff --git a/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs b/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs index fb43a9b43a..c418ea2509 100644 --- a/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs +++ b/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs @@ -7,6 +7,7 @@ using System.Collections; using System.Collections.Generic; using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; namespace Lucene.Net.Index.Memory { @@ -168,7 +169,7 @@ private class TermsAnonymousInnerClassHelper : Terms { private readonly MemoryFields outerInstance; - private MemoryIndex.Info info; + private readonly MemoryIndex.Info info; public TermsAnonymousInnerClassHelper(MemoryFields outerInstance, MemoryIndex.Info info) { @@ -231,7 +232,7 @@ public MemoryTermsEnum(MemoryIndex.MemoryIndexReader outerInstance, Info info) internal int BinarySearch(BytesRef b, BytesRef bytesRef, int low, int high, BytesRefHash hash, int[] ords, IComparer comparer) { - int mid = 0; + int mid; // LUCENENET: IDE0059: Remove unnecessary value assignment while (low <= high) { mid = (int)((uint)(low + high) >> 1); @@ -322,7 +323,7 @@ public override BytesRef Next() public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) { if (reuse is null || !(reuse is MemoryDocsEnum toReuse)) - toReuse = new MemoryDocsEnum(outerInstance); + toReuse = new MemoryDocsEnum(); return toReuse.Reset(liveDocs, info.sliceArray.freq[info.sortedTerms[termUpto]]); } @@ -354,11 +355,8 @@ public override TermState GetTermState() private class MemoryDocsEnum : DocsEnum { - private readonly MemoryIndex.MemoryIndexReader outerInstance; - - public MemoryDocsEnum(MemoryIndex.MemoryIndexReader outerInstance) + public MemoryDocsEnum() { - this.outerInstance = outerInstance; } internal bool hasNext; @@ -521,6 +519,7 @@ internal IndexSearcher Searcher set => this.searcher = value; } + [SuppressMessage("Style", "IDE0025:Use expression body for properties", Justification = "Multiple lines")] public override int NumDocs { get @@ -532,6 +531,7 @@ public override int NumDocs } } + [SuppressMessage("Style", "IDE0025:Use expression body for properties", Justification = "Multiple lines")] public override int MaxDoc { get @@ -565,8 +565,7 @@ protected internal override void DoClose() public override NumericDocValues GetNormValues(string field) { - FieldInfo fieldInfo; - if (!outerInstance.fieldInfos.TryGetValue(field, out fieldInfo) || fieldInfo.OmitsNorms) + if (!outerInstance.fieldInfos.TryGetValue(field, out FieldInfo fieldInfo) || fieldInfo.OmitsNorms) { return null; } diff --git a/src/Lucene.Net.Memory/MemoryIndex.cs b/src/Lucene.Net.Memory/MemoryIndex.cs index 19ea43eba4..eaba4548f4 100644 --- a/src/Lucene.Net.Memory/MemoryIndex.cs +++ b/src/Lucene.Net.Memory/MemoryIndex.cs @@ -179,9 +179,9 @@ public partial class MemoryIndex // private final IntBlockPool.SliceReader postingsReader; private readonly Int32BlockPool.SliceWriter postingsWriter; - private Dictionary fieldInfos = new Dictionary(); + private readonly Dictionary fieldInfos = new Dictionary(); // LUCENENET: marked readonly - private Counter bytesUsed; + private readonly Counter bytesUsed; // LUCENENET: marked readonly /// /// Constructs an empty instance. @@ -277,19 +277,13 @@ public virtual TokenStream KeywordTokenStream(ICollection keywords) throw new ArgumentException("keywords must not be null"); } - return new TokenStreamAnonymousInnerClassHelper(this, keywords); + return new TokenStreamAnonymousInnerClassHelper(keywords); } private sealed class TokenStreamAnonymousInnerClassHelper : TokenStream { - private readonly MemoryIndex outerInstance; - - private ICollection keywords; - - public TokenStreamAnonymousInnerClassHelper(MemoryIndex outerInstance, ICollection keywords) + public TokenStreamAnonymousInnerClassHelper(ICollection keywords) { - this.outerInstance = outerInstance; - this.keywords = keywords; iter = keywords.GetEnumerator(); start = 0; termAtt = AddAttribute(); @@ -321,6 +315,30 @@ public override bool IncrementToken() start += term.Length + 1; // separate words by 1 (blank) character return true; } + + /// + /// Releases resources used by the and + /// if overridden in a derived class, optionally releases unmanaged resources. + /// + /// true to release both managed and unmanaged resources; + /// false to release only unmanaged resources. + + // LUCENENET specific + protected override void Dispose(bool disposing) + { + try + { + if (disposing) + { + iter?.Dispose(); // LUCENENET specific - dispose iter and set to null + iter = null; + } + } + finally + { + base.Dispose(disposing); + } + } } /// @@ -404,10 +422,9 @@ public virtual void AddField(string fieldName, TokenStream stream, float boost, int pos = -1; BytesRefHash terms; SliceByteStartArray sliceArray; - Info info = null; long sumTotalTermFreq = 0; int offset = 0; - if (fields.TryGetValue(fieldName, out info)) + if (fields.TryGetValue(fieldName, out Info info)) { numTokens = info.numTokens; numOverlapTokens = info.numOverlapTokens; @@ -540,7 +557,7 @@ public virtual float Search(Query query) try { float[] scores = new float[1]; // inits to 0.0f (no match) - searcher.Search(query, new CollectorAnonymousInnerClassHelper(this, scores)); + searcher.Search(query, new CollectorAnonymousInnerClassHelper(scores)); float score = scores[0]; return score; } // can never happen (RAMDirectory) @@ -569,13 +586,10 @@ public virtual float Search(Query query) private class CollectorAnonymousInnerClassHelper : ICollector { - private readonly MemoryIndex outerInstance; - - private float[] scores; + private readonly float[] scores; - public CollectorAnonymousInnerClassHelper(MemoryIndex outerInstance, float[] scores) + public CollectorAnonymousInnerClassHelper(float[] scores) { - this.outerInstance = outerInstance; this.scores = scores; } diff --git a/src/Lucene.Net.Misc/Document/LazyDocument.cs b/src/Lucene.Net.Misc/Document/LazyDocument.cs index 2c076bcc1c..1436cae82f 100644 --- a/src/Lucene.Net.Misc/Document/LazyDocument.cs +++ b/src/Lucene.Net.Misc/Document/LazyDocument.cs @@ -40,8 +40,8 @@ public class LazyDocument // null until first field is loaded private Document doc; - private IDictionary> fields = new Dictionary>(); - private ISet fieldNames = new JCG.HashSet(); + private readonly IDictionary> fields = new Dictionary>(); // LUCENENET: marked readonly + private readonly ISet fieldNames = new JCG.HashSet(); // LUCENENET: marked readonly public LazyDocument(IndexReader reader, int docID) { @@ -67,8 +67,7 @@ public LazyDocument(IndexReader reader, int docID) public virtual IIndexableField GetField(FieldInfo fieldInfo) { fieldNames.Add(fieldInfo.Name); - IList values; - if (!fields.TryGetValue(fieldInfo.Number, out values) || null == values) + if (!fields.TryGetValue(fieldInfo.Number, out IList values) || null == values) { values = new List(); fields[fieldInfo.Number] = values; @@ -116,8 +115,7 @@ private void FetchRealValues(string name, int fieldNum) { Document d = GetDocument(); - IList lazyValues; - fields.TryGetValue(fieldNum, out lazyValues); + fields.TryGetValue(fieldNum, out IList lazyValues); IIndexableField[] realValues = d.GetFields(name); if (Debugging.AssertsEnabled) Debugging.Assert(realValues.Length <= lazyValues.Count, diff --git a/src/Lucene.Net.Misc/Index/CompoundFileExtractor.cs b/src/Lucene.Net.Misc/Index/CompoundFileExtractor.cs index d83f339517..b361326102 100644 --- a/src/Lucene.Net.Misc/Index/CompoundFileExtractor.cs +++ b/src/Lucene.Net.Misc/Index/CompoundFileExtractor.cs @@ -26,7 +26,7 @@ namespace Lucene.Net.Index /// /// Command-line tool for extracting sub-files out of a compound file. /// - public class CompoundFileExtractor + public static class CompoundFileExtractor // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { /// /// Prints the filename and size of each file within a given compound file. @@ -105,24 +105,18 @@ public static void Main(string[] args) if (extract) { Console.WriteLine("extract " + files[i] + " with " + len + " bytes to local directory..."); - using (IndexInput ii = cfr.OpenInput(files[i], context)) - { - - using (FileStream f = new FileStream(files[i], FileMode.Open, FileAccess.ReadWrite)) - { + using IndexInput ii = cfr.OpenInput(files[i], context); + using FileStream f = new FileStream(files[i], FileMode.Open, FileAccess.ReadWrite); - // read and write with a small buffer, which is more effective than reading byte by byte - byte[] buffer = new byte[1024]; - int chunk = buffer.Length; - while (len > 0) - { - int bufLen = (int)Math.Min(chunk, len); - ii.ReadBytes(buffer, 0, bufLen); - f.Write(buffer, 0, bufLen); - len -= bufLen; - } - - } + // read and write with a small buffer, which is more effective than reading byte by byte + byte[] buffer = new byte[1024]; + int chunk = buffer.Length; + while (len > 0) + { + int bufLen = (int)Math.Min(chunk, len); + ii.ReadBytes(buffer, 0, bufLen); + f.Write(buffer, 0, bufLen); + len -= bufLen; } } else diff --git a/src/Lucene.Net.Misc/Index/IndexSplitter.cs b/src/Lucene.Net.Misc/Index/IndexSplitter.cs index 33ce5d7664..304539e6a0 100644 --- a/src/Lucene.Net.Misc/Index/IndexSplitter.cs +++ b/src/Lucene.Net.Misc/Index/IndexSplitter.cs @@ -173,11 +173,9 @@ public virtual void Split(DirectoryInfo destDir, ICollection segs) // LU private static void CopyFile(FileInfo src, FileInfo dst) { - using (Stream @in = new FileStream(src.FullName, FileMode.Open, FileAccess.Read)) - using (Stream @out = new FileStream(dst.FullName, FileMode.OpenOrCreate, FileAccess.Write)) - { - @in.CopyTo(@out); - } + using Stream @in = new FileStream(src.FullName, FileMode.Open, FileAccess.Read); + using Stream @out = new FileStream(dst.FullName, FileMode.OpenOrCreate, FileAccess.Write); + @in.CopyTo(@out); } } } \ No newline at end of file diff --git a/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs b/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs index 29c6f005dd..e1b597f93d 100644 --- a/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs +++ b/src/Lucene.Net.Misc/Index/MultiPassIndexSplitter.cs @@ -110,14 +110,12 @@ public virtual void Split(LuceneVersion version, IndexReader @in, Store.Director } } } - using (IndexWriter w = new IndexWriter(outputs[i], - new IndexWriterConfig(version, null) { OpenMode = OpenMode.CREATE })) - { - Console.Error.WriteLine("Writing part " + (i + 1) + " ..."); - // pass the subreaders directly, as our wrapper's numDocs/hasDeletetions are not up-to-date - IList sr = input.GetSequentialSubReaders(); - w.AddIndexes(sr.ToArray()); // TODO: maybe take List here? - } + using IndexWriter w = new IndexWriter(outputs[i], + new IndexWriterConfig(version, null) { OpenMode = OpenMode.CREATE }); + Console.Error.WriteLine("Writing part " + (i + 1) + " ..."); + // pass the subreaders directly, as our wrapper's numDocs/hasDeletetions are not up-to-date + IList sr = input.GetSequentialSubReaders(); + w.AddIndexes(sr.ToArray()); // TODO: maybe take List here? } Console.Error.WriteLine("Done."); } @@ -163,23 +161,21 @@ public static void Main(string[] args) Console.Error.WriteLine("Invalid input path - skipping: " + file); continue; } - using (Store.Directory dir = FSDirectory.Open(new DirectoryInfo(args[i]))) + using Store.Directory dir = FSDirectory.Open(new DirectoryInfo(args[i])); + try { - try - { - if (!DirectoryReader.IndexExists(dir)) - { - Console.Error.WriteLine("Invalid input index - skipping: " + file); - continue; - } - } - catch (Exception) + if (!DirectoryReader.IndexExists(dir)) { Console.Error.WriteLine("Invalid input index - skipping: " + file); continue; } - indexes.Add(DirectoryReader.Open(dir)); } + catch (Exception) + { + Console.Error.WriteLine("Invalid input index - skipping: " + file); + continue; + } + indexes.Add(DirectoryReader.Open(dir)); } } if (outDir == null) diff --git a/src/Lucene.Net.Misc/Index/Sorter/BlockJoinComparatorSource.cs b/src/Lucene.Net.Misc/Index/Sorter/BlockJoinComparatorSource.cs index 2a10dacfc2..cf41032ee4 100644 --- a/src/Lucene.Net.Misc/Index/Sorter/BlockJoinComparatorSource.cs +++ b/src/Lucene.Net.Misc/Index/Sorter/BlockJoinComparatorSource.cs @@ -103,12 +103,12 @@ private class FieldComparerAnonymousInnerClassHelper : FieldComparer { private readonly BlockJoinComparerSource outerInstance; - private int[] parentSlots; - private int[] childSlots; - private int[] parentReverseMul; - private FieldComparer[] parentComparers; - private int[] childReverseMul; - private FieldComparer[] childComparers; + private readonly int[] parentSlots; + private readonly int[] childSlots; + private readonly int[] parentReverseMul; + private readonly FieldComparer[] parentComparers; + private readonly int[] childReverseMul; + private readonly FieldComparer[] childComparers; public FieldComparerAnonymousInnerClassHelper(BlockJoinComparerSource outerInstance, int[] parentSlots, int[] childSlots, int[] parentReverseMul, FieldComparer[] parentComparers, diff --git a/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs b/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs index 47a6339b95..a6abc79699 100644 --- a/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs +++ b/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs @@ -214,9 +214,9 @@ private static Sorter.DocMap Sort(int maxDoc, DocComparer comparer) private class DocMapAnonymousInnerClassHelper : Sorter.DocMap { - private int maxDoc; - private MonotonicAppendingInt64Buffer newToOld; - private MonotonicAppendingInt64Buffer oldToNew; + private readonly int maxDoc; + private readonly MonotonicAppendingInt64Buffer newToOld; + private readonly MonotonicAppendingInt64Buffer oldToNew; public DocMapAnonymousInnerClassHelper(int maxDoc, MonotonicAppendingInt64Buffer newToOld, MonotonicAppendingInt64Buffer oldToNew) { @@ -266,20 +266,17 @@ internal DocMap Sort(AtomicReader reader) comparers[i].SetNextReader(reader.AtomicContext); comparers[i].SetScorer(FAKESCORER); } - DocComparer comparer = new DocComparerAnonymousInnerClassHelper(this, reverseMul, comparers); + DocComparer comparer = new DocComparerAnonymousInnerClassHelper(reverseMul, comparers); return Sort(reader.MaxDoc, comparer); } private class DocComparerAnonymousInnerClassHelper : DocComparer { - private readonly Sorter outerInstance; + private readonly int[] reverseMul; + private readonly FieldComparer[] comparers; - private int[] reverseMul; - private FieldComparer[] comparers; - - public DocComparerAnonymousInnerClassHelper(Sorter outerInstance, int[] reverseMul, FieldComparer[] comparers) + public DocComparerAnonymousInnerClassHelper(int[] reverseMul, FieldComparer[] comparers) { - this.outerInstance = outerInstance; this.reverseMul = reverseMul; this.comparers = comparers; } diff --git a/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs b/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs index 8147cc3c6a..4bfefdacac 100644 --- a/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs +++ b/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs @@ -143,12 +143,10 @@ public bool Get(int index) public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) { DocsEnum inReuse; - SortingDocsEnum wrapReuse; - if (reuse != null && reuse is SortingDocsEnum) + if (reuse != null && reuse is SortingDocsEnum wrapReuse) { // if we're asked to reuse the given DocsEnum and it is Sorting, return // the wrapped one, since some Codecs expect it. - wrapReuse = (SortingDocsEnum)reuse; inReuse = wrapReuse.Wrapped; } else @@ -166,12 +164,10 @@ public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPositionsEnum reuse, DocsAndPositionsFlags flags) { DocsAndPositionsEnum inReuse; - SortingDocsAndPositionsEnum wrapReuse; - if (reuse != null && reuse is SortingDocsAndPositionsEnum) + if (reuse != null && reuse is SortingDocsAndPositionsEnum wrapReuse) { // if we're asked to reuse the given DocsEnum and it is Sorting, return // the wrapped one, since some Codecs expect it. - wrapReuse = (SortingDocsAndPositionsEnum)reuse; inReuse = wrapReuse.Wrapped; } else @@ -396,8 +392,8 @@ protected override int CompareSaved(int i, int j) private readonly int maxDoc; private readonly DocFreqSorter sorter; - private int[] docs; - private int[] freqs; + private readonly int[] docs; // LUCENENET: marked readonly + private readonly int[] freqs; // LUCENENET: marked readonly private int docIt = -1; private readonly int upto; private readonly bool withFreqs; @@ -569,8 +565,8 @@ protected override int CompareSaved(int i, int j) private readonly int maxDoc; private readonly DocOffsetSorter sorter; - private int[] docs; - private long[] offsets; + private readonly int[] docs; // LUCENENET: marked readonly + private readonly long[] offsets; // LUCENENET: marked readonly private readonly int upto; private readonly IndexInput postingInput; diff --git a/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs b/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs index c692723a06..ceae5b67f0 100644 --- a/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs +++ b/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs @@ -143,8 +143,8 @@ private class DocMapAnonymousInnerClassHelper : MergePolicy.DocMap { private readonly SortingOneMerge outerInstance; - private MergeState mergeState; - private MonotonicAppendingInt64Buffer deletes; + private readonly MergeState mergeState; + private readonly MonotonicAppendingInt64Buffer deletes; public DocMapAnonymousInnerClassHelper(SortingOneMerge outerInstance, MergeState mergeState, MonotonicAppendingInt64Buffer deletes) { @@ -189,13 +189,11 @@ public override string SegString(Directory dir) /// public static bool IsSorted(AtomicReader reader, Sort sort) { - if (reader is SegmentReader) + if (reader is SegmentReader segReader) { - SegmentReader segReader = (SegmentReader)reader; IDictionary diagnostics = segReader.SegmentInfo.Info.Diagnostics; - string diagnosticsSort; - if (diagnostics != null - && diagnostics.TryGetValue(SORTER_ID_PROP, out diagnosticsSort) + if (diagnostics != null + && diagnostics.TryGetValue(SORTER_ID_PROP, out string diagnosticsSort) && sort.ToString().Equals(diagnosticsSort, StringComparison.Ordinal)) { return true; diff --git a/src/Lucene.Net.Misc/Misc/GetTermInfo.cs b/src/Lucene.Net.Misc/Misc/GetTermInfo.cs index db44cfa60b..4416591019 100644 --- a/src/Lucene.Net.Misc/Misc/GetTermInfo.cs +++ b/src/Lucene.Net.Misc/Misc/GetTermInfo.cs @@ -26,14 +26,14 @@ namespace Lucene.Net.Misc /// /// Utility to get document frequency and total number of occurrences (sum of the tf for each doc) of a term. /// - public class GetTermInfo + public static class GetTermInfo // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { public static void Main(string[] args) { - FSDirectory dir = null; - string inputStr = null; - string field = null; + FSDirectory dir; // LUCENENET: IDE0059: Remove unnecessary value assignment + string inputStr; // LUCENENET: IDE0059: Remove unnecessary value assignment + string field; // LUCENENET: IDE0059: Remove unnecessary value assignment if (args.Length == 3) { diff --git a/src/Lucene.Net.Misc/Misc/HighFreqTerms.cs b/src/Lucene.Net.Misc/Misc/HighFreqTerms.cs index 19876285af..c91be30741 100644 --- a/src/Lucene.Net.Misc/Misc/HighFreqTerms.cs +++ b/src/Lucene.Net.Misc/Misc/HighFreqTerms.cs @@ -37,9 +37,8 @@ namespace Lucene.Net.Misc /// /// /// - public class HighFreqTerms + public static class HighFreqTerms // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { - // The top numTerms will be displayed public const int DEFAULT_NUMTERMS = 100; @@ -73,14 +72,12 @@ public static void Main(string[] args) } } - using (IndexReader reader = DirectoryReader.Open(dir)) - { - TermStats[] terms = GetHighFreqTerms(reader, numTerms, field, comparer); + using IndexReader reader = DirectoryReader.Open(dir); + TermStats[] terms = GetHighFreqTerms(reader, numTerms, field, comparer); - for (int i = 0; i < terms.Length; i++) - { - Console.WriteLine("{0}:{1} \t totalTF = {2:#,##0} \t doc freq = {3:#,##0} \n", terms[i].Field, terms[i].GetTermText(), terms[i].TotalTermFreq, terms[i].DocFreq); - } + for (int i = 0; i < terms.Length; i++) + { + Console.WriteLine("{0}:{1} \t totalTF = {2:#,##0} \t doc freq = {3:#,##0} \n", terms[i].Field, terms[i].GetTermText(), terms[i].TotalTermFreq, terms[i].DocFreq); } } diff --git a/src/Lucene.Net.Misc/Misc/IndexMergeTool.cs b/src/Lucene.Net.Misc/Misc/IndexMergeTool.cs index 0669e2ee9a..defd30841e 100644 --- a/src/Lucene.Net.Misc/Misc/IndexMergeTool.cs +++ b/src/Lucene.Net.Misc/Misc/IndexMergeTool.cs @@ -27,7 +27,7 @@ namespace Lucene.Net.Misc /// Merges indices specified on the command line into the index /// specified as the first command line argument. /// - public class IndexMergeTool + public static class IndexMergeTool // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { public static void Main(string[] args) { @@ -38,36 +38,34 @@ public static void Main(string[] args) //Console.Error.WriteLine("Usage: IndexMergeTool [index3] ..."); //Environment.Exit(1); } - using (FSDirectory mergedIndex = FSDirectory.Open(new System.IO.DirectoryInfo(args[0]))) - { - using (IndexWriter writer = new IndexWriter(mergedIndex, + using FSDirectory mergedIndex = FSDirectory.Open(new System.IO.DirectoryInfo(args[0])); + using (IndexWriter writer = new IndexWriter(mergedIndex, #pragma warning disable 612, 618 new IndexWriterConfig(LuceneVersion.LUCENE_CURRENT, null) #pragma warning restore 612, 618 { OpenMode = OpenMode.CREATE })) + { + Directory[] indexes = new Directory[args.Length - 1]; + try { - Directory[] indexes = new Directory[args.Length - 1]; - try + for (int i = 1; i < args.Length; i++) { - for (int i = 1; i < args.Length; i++) - { - indexes[i - 1] = FSDirectory.Open(new System.IO.DirectoryInfo(args[i])); - } + indexes[i - 1] = FSDirectory.Open(new System.IO.DirectoryInfo(args[i])); + } - Console.WriteLine("Merging..."); - writer.AddIndexes(indexes); + Console.WriteLine("Merging..."); + writer.AddIndexes(indexes); - Console.WriteLine("Full merge..."); - writer.ForceMerge(1); - } - finally - { - // LUCENENET specific - dispose directories - IOUtils.Dispose(indexes); - } + Console.WriteLine("Full merge..."); + writer.ForceMerge(1); + } + finally + { + // LUCENENET specific - dispose directories + IOUtils.Dispose(indexes); } - Console.WriteLine("Done."); } + Console.WriteLine("Done."); } } } \ No newline at end of file diff --git a/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs b/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs index bf03c34e3a..b52954bde6 100644 --- a/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs +++ b/src/Lucene.Net.Misc/Util/Fst/ListOfOutputs.cs @@ -89,13 +89,12 @@ public override object Subtract(object @object, object inc) public override object Add(object prefix, object output) { if (Debugging.AssertsEnabled) Debugging.Assert(!(prefix is IList)); - if (!(output is IList)) + if (!(output is IList outputList)) { return outputs.Add((T)prefix, (T)output); } else { - IList outputList = (IList)output; IList addedList = new JCG.List(outputList.Count); foreach (object _output in outputList) { @@ -113,14 +112,13 @@ public override void Write(object output, DataOutput @out) public override void WriteFinalOutput(object output, DataOutput @out) { - if (!(output is IList)) + if (!(output is IList outputList)) { @out.WriteVInt32(1); outputs.Write((T)output, @out); } else { - IList outputList = (IList)output; @out.WriteVInt32(outputList.Count); foreach (var eachOutput in outputList) { @@ -156,14 +154,12 @@ public override object ReadFinalOutput(DataInput @in) public override string OutputToString(object output) { - if (!(output is IList)) + if (!(output is IList outputList)) { return outputs.OutputToString((T)output); } else { - IList outputList = (IList)output; - StringBuilder b = new StringBuilder(); b.Append('['); diff --git a/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs b/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs index b5a3e93c24..23fb938d19 100644 --- a/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs +++ b/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs @@ -1,7 +1,6 @@ using Lucene.Net.Diagnostics; using Lucene.Net.Store; using System; -using System.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Fst @@ -27,8 +26,8 @@ namespace Lucene.Net.Util.Fst /// An FST implementation where each output /// is one or two non-negative long values. If it's a /// output, is - /// returned; else, TwoLongs. Order - /// is preserved in the TwoLongs case, ie .first is the first + /// returned; else, . Order + /// is preserved in the case, ie .first is the first /// input/output added to , and .second is the /// second. You cannot store 0 output with this (that's /// reserved to mean "no output")! @@ -84,9 +83,8 @@ public override string ToString() public override bool Equals(object other) { - if (other is TwoInt64s) + if (other is TwoInt64s other2) { - TwoInt64s other2 = (TwoInt64s)other; return first == other2.first && second == other2.second; } else @@ -113,23 +111,21 @@ private UpToTwoPositiveInt64Outputs(bool doShare) this.doShare = doShare; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static UpToTwoPositiveInt64Outputs GetSingleton(bool doShare) { return doShare ? singletonShare : singletonNoShare; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] + [System.Diagnostics.CodeAnalysis.SuppressMessage("Performance", "CA1822:Mark members as static", Justification = "This is a shipped public API")] public long? Get(long v) { - if (v == 0) - { - return NO_OUTPUT; - } - else - { - return v; - } + return v == 0 ? NO_OUTPUT : v; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] + [System.Diagnostics.CodeAnalysis.SuppressMessage("Performance", "CA1822:Mark members as static", Justification = "This is a shipped public API")] public TwoInt64s Get(long first, long second) { return new TwoInt64s(first, second); @@ -262,7 +258,8 @@ public override object Read(DataInput @in) } } - private bool Valid(long? o) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static bool Valid(long? o) // LUCENENET: CA1822: Mark members as static { Debugging.Assert(o != null); Debugging.Assert(o is long?); @@ -271,7 +268,8 @@ private bool Valid(long? o) } // Used only by assert - private bool Valid(object o, bool allowDouble) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static bool Valid(object o, bool allowDouble) // LUCENENET: CA1822: Mark members as static { if (!allowDouble) { diff --git a/src/Lucene.Net.Queries/BooleanFilter.cs b/src/Lucene.Net.Queries/BooleanFilter.cs index 164985c926..24caf4ac26 100644 --- a/src/Lucene.Net.Queries/BooleanFilter.cs +++ b/src/Lucene.Net.Queries/BooleanFilter.cs @@ -119,7 +119,7 @@ private static DocIdSetIterator GetDISI(Filter filter, AtomicReaderContext conte { // we dont pass acceptDocs, we will filter at the end using an additional filter DocIdSet set = filter.GetDocIdSet(context, null); - return set == null ? null : set.GetIterator(); + return set?.GetIterator(); } /// diff --git a/src/Lucene.Net.Queries/BoostingQuery.cs b/src/Lucene.Net.Queries/BoostingQuery.cs index 1291a2faed..c19115096a 100644 --- a/src/Lucene.Net.Queries/BoostingQuery.cs +++ b/src/Lucene.Net.Queries/BoostingQuery.cs @@ -53,10 +53,11 @@ public BoostingQuery(Query match, Query context, float boost) public override Query Rewrite(IndexReader reader) { - BooleanQuery result = new BooleanQueryAnonymousInnerClassHelper(this); - result.Add(match, Occur.MUST); - result.Add(context, Occur.SHOULD); - return result; + return new BooleanQueryAnonymousInnerClassHelper(this) + { + { match, Occur.MUST }, + { context, Occur.SHOULD } + }; } private class BooleanQueryAnonymousInnerClassHelper : BooleanQuery diff --git a/src/Lucene.Net.Queries/ChainedFilter.cs b/src/Lucene.Net.Queries/ChainedFilter.cs index 3d433e2ceb..a9c43f99a4 100644 --- a/src/Lucene.Net.Queries/ChainedFilter.cs +++ b/src/Lucene.Net.Queries/ChainedFilter.cs @@ -109,7 +109,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo return BitsFilteredDocIdSet.Wrap(GetDocIdSet(context, DEFAULT, index), acceptDocs); } - private DocIdSetIterator GetDISI(Filter filter, AtomicReaderContext context) + private static DocIdSetIterator GetDISI(Filter filter, AtomicReaderContext context) // LUCENENET: CA1822: Mark members as static { // we dont pass acceptDocs, we will filter at the end using an additional filter DocIdSet docIdSet = filter.GetDocIdSet(context, null); @@ -206,22 +206,22 @@ public override string ToString() /// private void DoChain(FixedBitSet result, int logic, DocIdSet dis) { - if (dis is FixedBitSet) + if (dis is FixedBitSet fixedBitSet) { // optimized case for FixedBitSets switch (logic) { case OR: - result.Or((FixedBitSet)dis); + result.Or(fixedBitSet); break; case AND: - result.And((FixedBitSet)dis); + result.And(fixedBitSet); break; case ANDNOT: - result.AndNot((FixedBitSet)dis); + result.AndNot(fixedBitSet); break; case XOR: - result.Xor((FixedBitSet)dis); + result.Xor(fixedBitSet); break; default: DoChain(result, DEFAULT, dis); diff --git a/src/Lucene.Net.Queries/CustomScoreQuery.cs b/src/Lucene.Net.Queries/CustomScoreQuery.cs index 33beedf6ca..18e3d84f15 100644 --- a/src/Lucene.Net.Queries/CustomScoreQuery.cs +++ b/src/Lucene.Net.Queries/CustomScoreQuery.cs @@ -277,7 +277,7 @@ public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) { valSrcScorers[i] = valSrcWeights[i].GetScorer(context, acceptDocs); } - return new CustomScorer(outerInstance, outerInstance.GetCustomScoreProvider(context), this, queryWeight, + return new CustomScorer(outerInstance.GetCustomScoreProvider(context), this, queryWeight, subQueryScorer, valSrcScorers); } @@ -320,8 +320,6 @@ private Explanation DoExplain(AtomicReaderContext info, int doc) /// private class CustomScorer : Scorer { - private readonly CustomScoreQuery outerInstance; - private readonly float qWeight; private readonly Scorer subQueryScorer; private readonly Scorer[] valSrcScorers; @@ -329,11 +327,10 @@ private class CustomScorer : Scorer private readonly float[] vScores; // reused in score() to avoid allocating this array for each doc // constructor - internal CustomScorer(CustomScoreQuery outerInstance, CustomScoreProvider provider, CustomWeight w, + internal CustomScorer(CustomScoreProvider provider, CustomWeight w, float qWeight, Scorer subQueryScorer, Scorer[] valSrcScorers) : base(w) { - this.outerInstance = outerInstance; this.qWeight = qWeight; this.subQueryScorer = subQueryScorer; this.valSrcScorers = valSrcScorers; diff --git a/src/Lucene.Net.Queries/Function/BoostedQuery.cs b/src/Lucene.Net.Queries/Function/BoostedQuery.cs index 884d09ba1a..db79ea9825 100644 --- a/src/Lucene.Net.Queries/Function/BoostedQuery.cs +++ b/src/Lucene.Net.Queries/Function/BoostedQuery.cs @@ -70,14 +70,14 @@ private class BoostedWeight : Weight { private readonly BoostedQuery outerInstance; - private readonly IndexSearcher searcher; + //private readonly IndexSearcher searcher; // LUCENENET: Never read internal readonly Weight qWeight; internal readonly IDictionary fcontext; public BoostedWeight(BoostedQuery outerInstance, IndexSearcher searcher) { this.outerInstance = outerInstance; - this.searcher = searcher; + //this.searcher = searcher; // LUCENENET: Never read this.qWeight = outerInstance.q.CreateWeight(searcher); this.fcontext = ValueSource.NewContext(searcher); outerInstance.boostVal.CreateWeight(fcontext, searcher); diff --git a/src/Lucene.Net.Queries/Function/DocValues/BoolDocValues.cs b/src/Lucene.Net.Queries/Function/DocValues/BoolDocValues.cs index 182d46041c..dc9d43b683 100644 --- a/src/Lucene.Net.Queries/Function/DocValues/BoolDocValues.cs +++ b/src/Lucene.Net.Queries/Function/DocValues/BoolDocValues.cs @@ -28,7 +28,7 @@ public abstract class BoolDocValues : FunctionValues { protected readonly ValueSource m_vs; - public BoolDocValues(ValueSource vs) + protected BoolDocValues(ValueSource vs) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.m_vs = vs; } diff --git a/src/Lucene.Net.Queries/Function/DocValues/DocTermsIndexDocValues.cs b/src/Lucene.Net.Queries/Function/DocValues/DocTermsIndexDocValues.cs index 6472fe3241..eb7c280d1f 100644 --- a/src/Lucene.Net.Queries/Function/DocValues/DocTermsIndexDocValues.cs +++ b/src/Lucene.Net.Queries/Function/DocValues/DocTermsIndexDocValues.cs @@ -38,7 +38,7 @@ public abstract class DocTermsIndexDocValues : FunctionValues protected readonly BytesRef m_spare = new BytesRef(); protected readonly CharsRef m_spareChars = new CharsRef(); - public DocTermsIndexDocValues(ValueSource vs, AtomicReaderContext context, string field) + protected DocTermsIndexDocValues(ValueSource vs, AtomicReaderContext context, string field) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { try { diff --git a/src/Lucene.Net.Queries/Function/DocValues/DoubleDocValues.cs b/src/Lucene.Net.Queries/Function/DocValues/DoubleDocValues.cs index 2142742c9c..8ed8e0d6cf 100644 --- a/src/Lucene.Net.Queries/Function/DocValues/DoubleDocValues.cs +++ b/src/Lucene.Net.Queries/Function/DocValues/DoubleDocValues.cs @@ -30,7 +30,7 @@ public abstract class DoubleDocValues : FunctionValues { protected readonly ValueSource m_vs; - public DoubleDocValues(ValueSource vs) + protected DoubleDocValues(ValueSource vs) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.m_vs = vs; } diff --git a/src/Lucene.Net.Queries/Function/DocValues/FloatDocValues.cs b/src/Lucene.Net.Queries/Function/DocValues/FloatDocValues.cs index dd1a82d2a7..0d8fe10b08 100644 --- a/src/Lucene.Net.Queries/Function/DocValues/FloatDocValues.cs +++ b/src/Lucene.Net.Queries/Function/DocValues/FloatDocValues.cs @@ -31,7 +31,7 @@ public abstract class SingleDocValues : FunctionValues { protected readonly ValueSource m_vs; - public SingleDocValues(ValueSource vs) + protected SingleDocValues(ValueSource vs) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.m_vs = vs; } diff --git a/src/Lucene.Net.Queries/Function/DocValues/IntDocValues.cs b/src/Lucene.Net.Queries/Function/DocValues/IntDocValues.cs index 177c249eb3..58c8a058ca 100644 --- a/src/Lucene.Net.Queries/Function/DocValues/IntDocValues.cs +++ b/src/Lucene.Net.Queries/Function/DocValues/IntDocValues.cs @@ -32,7 +32,7 @@ public abstract class Int32DocValues : FunctionValues { protected readonly ValueSource m_vs; - public Int32DocValues(ValueSource vs) + protected Int32DocValues(ValueSource vs) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.m_vs = vs; } diff --git a/src/Lucene.Net.Queries/Function/DocValues/LongDocValues.cs b/src/Lucene.Net.Queries/Function/DocValues/LongDocValues.cs index f70371650f..6f204e22c6 100644 --- a/src/Lucene.Net.Queries/Function/DocValues/LongDocValues.cs +++ b/src/Lucene.Net.Queries/Function/DocValues/LongDocValues.cs @@ -32,7 +32,7 @@ public abstract class Int64DocValues : FunctionValues { protected readonly ValueSource m_vs; - public Int64DocValues(ValueSource vs) + protected Int64DocValues(ValueSource vs) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.m_vs = vs; } diff --git a/src/Lucene.Net.Queries/Function/DocValues/StrDocValues.cs b/src/Lucene.Net.Queries/Function/DocValues/StrDocValues.cs index 84b4e64b70..36d8089429 100644 --- a/src/Lucene.Net.Queries/Function/DocValues/StrDocValues.cs +++ b/src/Lucene.Net.Queries/Function/DocValues/StrDocValues.cs @@ -27,7 +27,7 @@ public abstract class StrDocValues : FunctionValues { protected readonly ValueSource m_vs; - public StrDocValues(ValueSource vs) + protected StrDocValues(ValueSource vs) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.m_vs = vs; } diff --git a/src/Lucene.Net.Queries/Function/FunctionQuery.cs b/src/Lucene.Net.Queries/Function/FunctionQuery.cs index 9bd0b4b844..02b7fc75de 100644 --- a/src/Lucene.Net.Queries/Function/FunctionQuery.cs +++ b/src/Lucene.Net.Queries/Function/FunctionQuery.cs @@ -199,11 +199,8 @@ public override string ToString(string field) /// public override bool Equals(object o) { - var other = o as FunctionQuery; - if (other == null) - { - return false; - } + if (o is null) return false; + if (!(o is FunctionQuery other)) return false; return Boost == other.Boost && func.Equals(other.func); } diff --git a/src/Lucene.Net.Queries/Function/ValueSources/ByteFieldSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/ByteFieldSource.cs index 2450fdeca5..9628298eb6 100644 --- a/src/Lucene.Net.Queries/Function/ValueSources/ByteFieldSource.cs +++ b/src/Lucene.Net.Queries/Function/ValueSources/ByteFieldSource.cs @@ -128,9 +128,8 @@ public override object ObjectVal(int doc) public override bool Equals(object o) { - var other = o as ByteFieldSource; - if (other == null) - return false; + if (o is null) return false; + if (!(o is ByteFieldSource other)) return false; return base.Equals(other) && (this.parser == null ? other.parser == null : this.parser.GetType() == other.parser.GetType()); } diff --git a/src/Lucene.Net.Queries/Function/ValueSources/DefFunction.cs b/src/Lucene.Net.Queries/Function/ValueSources/DefFunction.cs index 81bf9318b2..40cf6b994c 100644 --- a/src/Lucene.Net.Queries/Function/ValueSources/DefFunction.cs +++ b/src/Lucene.Net.Queries/Function/ValueSources/DefFunction.cs @@ -44,12 +44,9 @@ public override FunctionValues GetValues(IDictionary fcontext, AtomicReaderConte private class ValuesAnonymousInnerClassHelper : Values { - private readonly DefFunction outerInstance; - public ValuesAnonymousInnerClassHelper(DefFunction outerInstance, FunctionValues[] valsArr) : base(outerInstance, valsArr) { - this.outerInstance = outerInstance; upto = valsArr.Length - 1; } diff --git a/src/Lucene.Net.Queries/Function/ValueSources/DualFloatFunction.cs b/src/Lucene.Net.Queries/Function/ValueSources/DualFloatFunction.cs index 7023c42b00..d46128e91b 100644 --- a/src/Lucene.Net.Queries/Function/ValueSources/DualFloatFunction.cs +++ b/src/Lucene.Net.Queries/Function/ValueSources/DualFloatFunction.cs @@ -35,7 +35,7 @@ public abstract class DualSingleFunction : ValueSource /// the base. /// the exponent. - public DualSingleFunction(ValueSource a, ValueSource b) + protected DualSingleFunction(ValueSource a, ValueSource b) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.m_a = a; this.m_b = b; diff --git a/src/Lucene.Net.Queries/Function/ValueSources/EnumFieldSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/EnumFieldSource.cs index 3ae9fe363c..b95cf8d827 100644 --- a/src/Lucene.Net.Queries/Function/ValueSources/EnumFieldSource.cs +++ b/src/Lucene.Net.Queries/Function/ValueSources/EnumFieldSource.cs @@ -126,10 +126,10 @@ public Int32DocValuesAnonymousInnerClassHelper(EnumFieldSource outerInstance, En this.outerInstance = outerInstance; this.arr = arr; this.valid = valid; - val = new MutableValueInt32(); + //val = new MutableValueInt32(); // LUCENENET: Never read } - private readonly MutableValueInt32 val; + //private readonly MutableValueInt32 val; // LUCENENET: Never read /// /// NOTE: This was floatVal() in Lucene diff --git a/src/Lucene.Net.Queries/Function/ValueSources/FieldCacheSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/FieldCacheSource.cs index c0dde80caa..a8ec5c3c54 100644 --- a/src/Lucene.Net.Queries/Function/ValueSources/FieldCacheSource.cs +++ b/src/Lucene.Net.Queries/Function/ValueSources/FieldCacheSource.cs @@ -29,7 +29,7 @@ public abstract class FieldCacheSource : ValueSource protected readonly string m_field; protected readonly IFieldCache m_cache = Search.FieldCache.DEFAULT; - public FieldCacheSource(string field) + protected FieldCacheSource(string field) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.m_field = field; } @@ -45,11 +45,8 @@ public override string GetDescription() public override bool Equals(object o) { - var other = o as FieldCacheSource; - if (other == null) - { - return false; - } + if (o is null) return false; + if (!(o is FieldCacheSource other)) return false; return m_field.Equals(other.m_field, StringComparison.Ordinal) && m_cache == other.m_cache; } diff --git a/src/Lucene.Net.Queries/Function/ValueSources/IDFValueSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/IDFValueSource.cs index 9b1b6ff76c..7d2da7210d 100644 --- a/src/Lucene.Net.Queries/Function/ValueSources/IDFValueSource.cs +++ b/src/Lucene.Net.Queries/Function/ValueSources/IDFValueSource.cs @@ -59,13 +59,13 @@ public override FunctionValues GetValues(IDictionary context, AtomicReaderContex // tries extra hard to cast the sim to TFIDFSimilarity internal static TFIDFSimilarity AsTFIDF(Similarity sim, string field) { - while (sim is PerFieldSimilarityWrapper) + while (sim is PerFieldSimilarityWrapper perFieldSimilarityWrapper) { - sim = ((PerFieldSimilarityWrapper)sim).Get(field); + sim = perFieldSimilarityWrapper.Get(field); } - if (sim is TFIDFSimilarity) + if (sim is TFIDFSimilarity similarity) { - return (TFIDFSimilarity)sim; + return similarity; } else { diff --git a/src/Lucene.Net.Queries/Function/ValueSources/IntFieldSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/IntFieldSource.cs index 2cd5186a41..24a3989e2c 100644 --- a/src/Lucene.Net.Queries/Function/ValueSources/IntFieldSource.cs +++ b/src/Lucene.Net.Queries/Function/ValueSources/IntFieldSource.cs @@ -73,10 +73,10 @@ public Int32DocValuesAnonymousInnerClassHelper(Int32FieldSource outerInstance, I this.outerInstance = outerInstance; this.arr = arr; this.valid = valid; - val = new MutableValueInt32(); + //val = new MutableValueInt32(); // LUCENENET: Never read } - private readonly MutableValueInt32 val; + //private readonly MutableValueInt32 val; // LUCENENET: Never read /// /// NOTE: This was floatVal() in Lucene diff --git a/src/Lucene.Net.Queries/Function/ValueSources/MultiBoolFunction.cs b/src/Lucene.Net.Queries/Function/ValueSources/MultiBoolFunction.cs index ce5bdc36c7..ff5a7878bd 100644 --- a/src/Lucene.Net.Queries/Function/ValueSources/MultiBoolFunction.cs +++ b/src/Lucene.Net.Queries/Function/ValueSources/MultiBoolFunction.cs @@ -33,7 +33,7 @@ public abstract class MultiBoolFunction : BoolFunction { protected readonly IList m_sources; - public MultiBoolFunction(IList sources) + protected MultiBoolFunction(IList sources) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.m_sources = sources; } diff --git a/src/Lucene.Net.Queries/Function/ValueSources/MultiFloatFunction.cs b/src/Lucene.Net.Queries/Function/ValueSources/MultiFloatFunction.cs index 525731161e..156eda40bc 100644 --- a/src/Lucene.Net.Queries/Function/ValueSources/MultiFloatFunction.cs +++ b/src/Lucene.Net.Queries/Function/ValueSources/MultiFloatFunction.cs @@ -35,7 +35,7 @@ public abstract class MultiSingleFunction : ValueSource { protected readonly ValueSource[] m_sources; - public MultiSingleFunction(ValueSource[] sources) + protected MultiSingleFunction(ValueSource[] sources) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.m_sources = sources; } diff --git a/src/Lucene.Net.Queries/Function/ValueSources/NormValueSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/NormValueSource.cs index 3ceb5037e4..2e0dac121e 100644 --- a/src/Lucene.Net.Queries/Function/ValueSources/NormValueSource.cs +++ b/src/Lucene.Net.Queries/Function/ValueSources/NormValueSource.cs @@ -68,20 +68,17 @@ public override FunctionValues GetValues(IDictionary context, AtomicReaderContex return new ConstDoubleDocValues(0.0, this); } - return new SingleDocValuesAnonymousInnerClassHelper(this, this, similarity, norms); + return new SingleDocValuesAnonymousInnerClassHelper(this, similarity, norms); } private class SingleDocValuesAnonymousInnerClassHelper : SingleDocValues { - private readonly NormValueSource outerInstance; - private readonly TFIDFSimilarity similarity; private readonly NumericDocValues norms; - public SingleDocValuesAnonymousInnerClassHelper(NormValueSource outerInstance, NormValueSource @this, TFIDFSimilarity similarity, NumericDocValues norms) + public SingleDocValuesAnonymousInnerClassHelper(NormValueSource @this, TFIDFSimilarity similarity, NumericDocValues norms) : base(@this) { - this.outerInstance = outerInstance; this.similarity = similarity; this.norms = norms; } diff --git a/src/Lucene.Net.Queries/Function/ValueSources/OrdFieldSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/OrdFieldSource.cs index efa8bbd538..c43c7bdd94 100644 --- a/src/Lucene.Net.Queries/Function/ValueSources/OrdFieldSource.cs +++ b/src/Lucene.Net.Queries/Function/ValueSources/OrdFieldSource.cs @@ -66,28 +66,25 @@ public override FunctionValues GetValues(IDictionary context, AtomicReaderContex IndexReader topReader = ReaderUtil.GetTopLevelContext(readerContext).Reader; AtomicReader r = SlowCompositeReaderWrapper.Wrap(topReader); SortedDocValues sindex = FieldCache.DEFAULT.GetTermsIndex(r, m_field); - return new Int32DocValuesAnonymousInnerClassHelper(this, this, off, sindex); + return new Int32DocValuesAnonymousInnerClassHelper(this, off, sindex); } private sealed class Int32DocValuesAnonymousInnerClassHelper : Int32DocValues { - private readonly OrdFieldSource outerInstance; - private readonly int off; private readonly SortedDocValues sindex; - public Int32DocValuesAnonymousInnerClassHelper(OrdFieldSource outerInstance, OrdFieldSource @this, int off, SortedDocValues sindex) + public Int32DocValuesAnonymousInnerClassHelper(OrdFieldSource @this, int off, SortedDocValues sindex) : base(@this) { - this.outerInstance = outerInstance; this.off = off; this.sindex = sindex; } - private string ToTerm(string readableValue) - { - return readableValue; - } + //private string ToTerm(string readableValue) // LUCENENET: IDE0051: Remove unused private member + //{ + // return readableValue; + //} /// /// NOTE: This was intVal() in Lucene diff --git a/src/Lucene.Net.Queries/Function/ValueSources/ReverseOrdFieldSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/ReverseOrdFieldSource.cs index 8e320ef906..3b54938861 100644 --- a/src/Lucene.Net.Queries/Function/ValueSources/ReverseOrdFieldSource.cs +++ b/src/Lucene.Net.Queries/Function/ValueSources/ReverseOrdFieldSource.cs @@ -69,21 +69,18 @@ public override FunctionValues GetValues(IDictionary context, AtomicReaderContex var sindex = FieldCache.DEFAULT.GetTermsIndex(r, field); var end = sindex.ValueCount; - return new Int32DocValuesAnonymousInnerClassHelper(this, this, off, sindex, end); + return new Int32DocValuesAnonymousInnerClassHelper(this, off, sindex, end); } private class Int32DocValuesAnonymousInnerClassHelper : Int32DocValues { - private readonly ReverseOrdFieldSource outerInstance; - private readonly int off; private readonly SortedDocValues sindex; private readonly int end; - public Int32DocValuesAnonymousInnerClassHelper(ReverseOrdFieldSource outerInstance, ReverseOrdFieldSource @this, int off, SortedDocValues sindex, int end) + public Int32DocValuesAnonymousInnerClassHelper(ReverseOrdFieldSource @this, int off, SortedDocValues sindex, int end) : base(@this) { - this.outerInstance = outerInstance; this.off = off; this.sindex = sindex; this.end = end; diff --git a/src/Lucene.Net.Queries/Function/ValueSources/SimpleBoolFunction.cs b/src/Lucene.Net.Queries/Function/ValueSources/SimpleBoolFunction.cs index c71258d4d7..9f609c7dad 100644 --- a/src/Lucene.Net.Queries/Function/ValueSources/SimpleBoolFunction.cs +++ b/src/Lucene.Net.Queries/Function/ValueSources/SimpleBoolFunction.cs @@ -33,7 +33,7 @@ public abstract class SimpleBoolFunction : BoolFunction { protected readonly ValueSource m_source; - public SimpleBoolFunction(ValueSource source) + protected SimpleBoolFunction(ValueSource source) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.m_source = source; } diff --git a/src/Lucene.Net.Queries/Function/ValueSources/SimpleFloatFunction.cs b/src/Lucene.Net.Queries/Function/ValueSources/SimpleFloatFunction.cs index eef0434108..f30fb7ffde 100644 --- a/src/Lucene.Net.Queries/Function/ValueSources/SimpleFloatFunction.cs +++ b/src/Lucene.Net.Queries/Function/ValueSources/SimpleFloatFunction.cs @@ -28,7 +28,7 @@ namespace Lucene.Net.Queries.Function.ValueSources /// public abstract class SimpleSingleFunction : SingularFunction { - public SimpleSingleFunction(ValueSource source) + protected SimpleSingleFunction(ValueSource source) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : base(source) { } diff --git a/src/Lucene.Net.Queries/Function/ValueSources/SingleFunction.cs b/src/Lucene.Net.Queries/Function/ValueSources/SingleFunction.cs index 3cde387ac9..5de2beb110 100644 --- a/src/Lucene.Net.Queries/Function/ValueSources/SingleFunction.cs +++ b/src/Lucene.Net.Queries/Function/ValueSources/SingleFunction.cs @@ -30,7 +30,7 @@ public abstract class SingularFunction : ValueSource { protected readonly ValueSource m_source; - public SingularFunction(ValueSource source) + protected SingularFunction(ValueSource source) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.m_source = source; } diff --git a/src/Lucene.Net.Queries/Function/ValueSources/SumTotalTermFreqValueSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/SumTotalTermFreqValueSource.cs index fa2b2ea7e6..b6d427fa2b 100644 --- a/src/Lucene.Net.Queries/Function/ValueSources/SumTotalTermFreqValueSource.cs +++ b/src/Lucene.Net.Queries/Function/ValueSources/SumTotalTermFreqValueSource.cs @@ -78,19 +78,16 @@ public override void CreateWeight(IDictionary context, IndexSearcher searcher) } } long ttf = sumTotalTermFreq; - context[this] = new Int64DocValuesAnonymousInnerClassHelper(this, this, ttf); + context[this] = new Int64DocValuesAnonymousInnerClassHelper(this, ttf); } private class Int64DocValuesAnonymousInnerClassHelper : Int64DocValues { - private readonly SumTotalTermFreqValueSource outerInstance; + private readonly long ttf; - private long ttf; - - public Int64DocValuesAnonymousInnerClassHelper(SumTotalTermFreqValueSource outerInstance, SumTotalTermFreqValueSource @this, long ttf) + public Int64DocValuesAnonymousInnerClassHelper(SumTotalTermFreqValueSource @this, long ttf) : base(@this) { - this.outerInstance = outerInstance; this.ttf = ttf; } diff --git a/src/Lucene.Net.Queries/Function/ValueSources/TFValueSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/TFValueSource.cs index fb7dd3935f..5b243d3332 100644 --- a/src/Lucene.Net.Queries/Function/ValueSources/TFValueSource.cs +++ b/src/Lucene.Net.Queries/Function/ValueSources/TFValueSource.cs @@ -101,20 +101,13 @@ public virtual void Reset() if (docs == null) { - docs = new DocsEnumAnonymousInnerClassHelper(this); + docs = new DocsEnumAnonymousInnerClassHelper(); } atDoc = -1; } private class DocsEnumAnonymousInnerClassHelper : DocsEnum { - private readonly SingleDocValuesAnonymousInnerClassHelper outerInstance; - - public DocsEnumAnonymousInnerClassHelper(SingleDocValuesAnonymousInnerClassHelper outerInstance) - { - this.outerInstance = outerInstance; - } - public override int Freq => 0; public override int DocID => DocIdSetIterator.NO_MORE_DOCS; diff --git a/src/Lucene.Net.Queries/Function/ValueSources/TermFreqValueSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/TermFreqValueSource.cs index 7924732994..7985232156 100644 --- a/src/Lucene.Net.Queries/Function/ValueSources/TermFreqValueSource.cs +++ b/src/Lucene.Net.Queries/Function/ValueSources/TermFreqValueSource.cs @@ -53,7 +53,7 @@ private class Int32DocValuesAnonymousInnerClassHelper : Int32DocValues { private readonly TermFreqValueSource outerInstance; - private Terms terms; + private readonly Terms terms; public Int32DocValuesAnonymousInnerClassHelper(TermFreqValueSource outerInstance, TermFreqValueSource @this, Terms terms) : base(@this) @@ -91,20 +91,13 @@ public virtual void Reset() if (docs == null) { - docs = new DocsEnumAnonymousInnerClassHelper(this); + docs = new DocsEnumAnonymousInnerClassHelper(); } atDoc = -1; } private class DocsEnumAnonymousInnerClassHelper : DocsEnum { - private readonly Int32DocValuesAnonymousInnerClassHelper outerInstance; - - public DocsEnumAnonymousInnerClassHelper(Int32DocValuesAnonymousInnerClassHelper outerInstance) - { - this.outerInstance = outerInstance; - } - public override int Freq => 0; public override int DocID => DocIdSetIterator.NO_MORE_DOCS; diff --git a/src/Lucene.Net.Queries/Function/ValueSources/TotalTermFreqValueSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/TotalTermFreqValueSource.cs index 4f31457080..c165255cd4 100644 --- a/src/Lucene.Net.Queries/Function/ValueSources/TotalTermFreqValueSource.cs +++ b/src/Lucene.Net.Queries/Function/ValueSources/TotalTermFreqValueSource.cs @@ -75,19 +75,16 @@ public override void CreateWeight(IDictionary context, IndexSearcher searcher) } } var ttf = totalTermFreq; - context[this] = new Int64DocValuesAnonymousInnerClassHelper(this, this, ttf); + context[this] = new Int64DocValuesAnonymousInnerClassHelper(this, ttf); } private class Int64DocValuesAnonymousInnerClassHelper : Int64DocValues { - private readonly TotalTermFreqValueSource outerInstance; - private readonly long ttf; - public Int64DocValuesAnonymousInnerClassHelper(TotalTermFreqValueSource outerInstance, TotalTermFreqValueSource @this, long ttf) + public Int64DocValuesAnonymousInnerClassHelper(TotalTermFreqValueSource @this, long ttf) : base(@this) { - this.outerInstance = outerInstance; this.ttf = ttf; } diff --git a/src/Lucene.Net.Queries/Mlt/MoreLikeThis.cs b/src/Lucene.Net.Queries/Mlt/MoreLikeThis.cs index 80e2ce3b93..fab5b4ed3d 100644 --- a/src/Lucene.Net.Queries/Mlt/MoreLikeThis.cs +++ b/src/Lucene.Net.Queries/Mlt/MoreLikeThis.cs @@ -600,8 +600,7 @@ private void AddTermFrequencies(TextReader r, IDictionary termFre } // increment frequency - Int32 cnt; - if (!termFreqMap.TryGetValue(word, out cnt)) + if (!termFreqMap.TryGetValue(word, out Int32 cnt)) { termFreqMap[word] = new Int32(); } diff --git a/src/Lucene.Net.Queries/TermsFilter.cs b/src/Lucene.Net.Queries/TermsFilter.cs index 37c2e5bf0d..e448d9ed8f 100644 --- a/src/Lucene.Net.Queries/TermsFilter.cs +++ b/src/Lucene.Net.Queries/TermsFilter.cs @@ -73,7 +73,7 @@ public FieldAndTermEnumAnonymousInnerClassHelper(IList terms) } // we need to sort for deduplication and to have a common cache key - readonly IEnumerator iter; + private readonly IEnumerator iter; public override bool MoveNext() { if (iter.MoveNext()) @@ -112,7 +112,7 @@ public FieldAndTermEnumAnonymousInnerClassHelper2(string field, IList } // we need to sort for deduplication and to have a common cache key - readonly IEnumerator iter; + private readonly IEnumerator iter; public override bool MoveNext() { if (iter.MoveNext()) @@ -225,7 +225,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo { return result; } - Terms terms = null; + Terms terms; // LUCENENET: IDE0059: Remove unnecessary value assignment TermsEnum termsEnum = null; DocsEnum docs = null; foreach (TermsAndField termsAndField in this.termsAndFields) diff --git a/src/Lucene.Net.QueryParser/Classic/MultiFieldQueryParser.cs b/src/Lucene.Net.QueryParser/Classic/MultiFieldQueryParser.cs index 09c156382d..39447e262b 100644 --- a/src/Lucene.Net.QueryParser/Classic/MultiFieldQueryParser.cs +++ b/src/Lucene.Net.QueryParser/Classic/MultiFieldQueryParser.cs @@ -138,13 +138,13 @@ protected internal override Query GetFieldQuery(string field, string queryText, private void ApplySlop(Query q, int slop) { - if (q is PhraseQuery) + if (q is PhraseQuery phraseQuery) { - ((PhraseQuery)q).Slop = slop; + phraseQuery.Slop = slop; } - else if (q is MultiPhraseQuery) + else if (q is MultiPhraseQuery multiPhraseQuery) { - ((MultiPhraseQuery)q).Slop = slop; + multiPhraseQuery.Slop = slop; } } @@ -277,7 +277,7 @@ public static Query Parse(LuceneVersion matchVersion, string[] queries, string[] { QueryParser qp = new QueryParser(matchVersion, fields[i], analyzer); Query q = qp.Parse(queries[i]); - if (q != null && (!(q is BooleanQuery) || ((BooleanQuery)q).Clauses.Count > 0)) + if (q != null && (!(q is BooleanQuery booleanQuery) || booleanQuery.Clauses.Count > 0)) { bQuery.Add(q, Occur.SHOULD); } @@ -325,7 +325,7 @@ public static Query Parse(LuceneVersion matchVersion, string query, string[] fie { QueryParser qp = new QueryParser(matchVersion, fields[i], analyzer); Query q = qp.Parse(query); - if (q != null && (!(q is BooleanQuery) || ((BooleanQuery)q).Clauses.Count > 0)) + if (q != null && (!(q is BooleanQuery booleanQuery) || booleanQuery.Clauses.Count > 0)) { bQuery.Add(q, flags[i]); } @@ -371,7 +371,7 @@ public static Query Parse(LuceneVersion matchVersion, string[] queries, string[] { QueryParser qp = new QueryParser(matchVersion, fields[i], analyzer); Query q = qp.Parse(queries[i]); - if (q != null && (!(q is BooleanQuery) || ((BooleanQuery)q).Clauses.Count > 0)) + if (q != null && (!(q is BooleanQuery booleanQuery) || booleanQuery.Clauses.Count > 0)) { bQuery.Add(q, flags[i]); } diff --git a/src/Lucene.Net.QueryParser/Classic/QueryParser.cs b/src/Lucene.Net.QueryParser/Classic/QueryParser.cs index e70d3a98be..9c5993a6c4 100644 --- a/src/Lucene.Net.QueryParser/Classic/QueryParser.cs +++ b/src/Lucene.Net.QueryParser/Classic/QueryParser.cs @@ -98,6 +98,7 @@ namespace Lucene.Net.QueryParsers.Classic /// As of 3.1, is false by default. /// /// + [System.Diagnostics.CodeAnalysis.SuppressMessage("Performance", "CA1822:Mark members as static", Justification = "This class is based on generated code")] public class QueryParser : QueryParserBase { // NOTE: This was moved into the QueryParserBase class. @@ -264,7 +265,7 @@ public Query Query(string field) public Query Clause(string field) { Query q; - Token fieldToken = null, boost = null; + Token fieldToken, boost = null; // LUCENENET: IDE0059: Remove unnecessary value assignment if (Jj_2_1(2)) { switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) @@ -594,14 +595,14 @@ private bool Jj_3R_3() private Token jj_scanpos, jj_lastpos; private int jj_la; private int jj_gen; - private int[] jj_la1 = new int[21]; - private static uint[] jj_la1_0 = new uint[] // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006) + private readonly int[] jj_la1 = new int[21]; // LUCENENET: marked readonly + private static readonly uint[] jj_la1_0 = new uint[] // LUCENENET: marked readonly // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006) { 0x300, 0x300, 0x1c00, 0x1c00, 0xfda7f00, 0x120000, 0x40000, 0xfda6000, 0x9d22000, 0x200000, 0x200000, 0x40000, 0x6000000, 0x80000000, 0x10000000, 0x80000000, 0x60000000, 0x40000, 0x200000, 0x40000, 0xfda2000, }; - private static int[] jj_la1_1 = new int[] // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006) + private static readonly int[] jj_la1_1 = new int[] // LUCENENET: marked readonly // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006) { 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, @@ -735,7 +736,7 @@ public LookaheadSuccess(SerializationInfo info, StreamingContext context) #endif } - private LookaheadSuccess jj_ls = new LookaheadSuccess(); + private readonly LookaheadSuccess jj_ls = new LookaheadSuccess(); // LUCENENET: marked readonly private bool Jj_scan_token(int kind) { if (jj_scanpos == jj_lastpos) @@ -800,10 +801,10 @@ private int Jj_ntk() return (jj_ntk = Jj_nt.Kind); } - private List jj_expentries = new List(); + private readonly List jj_expentries = new List(); // LUCENENET: marked readonly private int[] jj_expentry; private int jj_kind = -1; - private int[] jj_lasttokens = new int[100]; + private readonly int[] jj_lasttokens = new int[100]; // LUCENENET: marked readonly private int jj_endpos; private void Jj_add_error_token(int kind, int pos) @@ -889,6 +890,7 @@ public virtual ParseException GenerateParseException() return new ParseException(Token, exptokseq, QueryParserConstants.TokenImage); } + /// Enable tracing. public void Enable_tracing() { diff --git a/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs b/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs index b121b3a4ce..962b492554 100644 --- a/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs +++ b/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs @@ -114,25 +114,25 @@ protected MethodRemovedUseAnother(SerializationInfo info, StreamingContext conte //int phraseSlop = 0; //float fuzzyMinSim = FuzzyQuery.DefaultMinSimilarity; //int fuzzyPrefixLength = FuzzyQuery.DefaultPrefixLength; - CultureInfo locale = null; // LUCENENET NOTE: null indicates read CultureInfo.CurrentCulture on the fly - TimeZoneInfo timeZone = null; // LUCENENET NOTE: null indicates read TimeZoneInfo.Local on the fly + private CultureInfo locale = null; // LUCENENET NOTE: null indicates read CultureInfo.CurrentCulture on the fly + private TimeZoneInfo timeZone = null; // LUCENENET NOTE: null indicates read TimeZoneInfo.Local on the fly // TODO: Work out what the default date resolution SHOULD be (was null in Java, which isn't valid for an enum type) /// /// the default date resolution /// - DateTools.Resolution dateResolution = DateTools.Resolution.DAY; + private DateTools.Resolution dateResolution = DateTools.Resolution.DAY; /// /// maps field names to date resolutions /// - IDictionary fieldToDateResolution = null; + private IDictionary fieldToDateResolution = null; /// /// Whether or not to analyze range terms when constructing RangeQuerys /// (For example, analyzing terms into collation keys for locale-sensitive RangeQuery) /// - bool analyzeRangeTerms = false; + private bool analyzeRangeTerms = false; /// /// So the generated QueryParser(CharStream) won't error out @@ -190,8 +190,7 @@ public virtual Query Parse(string query) try { // TopLevelQuery is a Query followed by the end-of-input (EOF) - Query res = TopLevelQuery(m_field); - return res != null ? res : NewBooleanQuery(false); + return TopLevelQuery(m_field) ?? NewBooleanQuery(false); } catch (ParseException tme) { @@ -295,7 +294,7 @@ public virtual Query Parse(string query) /// public virtual CultureInfo Locale // LUCENENET TODO: API - Rename Culture { - get => this.locale == null ? CultureInfo.CurrentCulture : this.locale; + get => this.locale ?? CultureInfo.CurrentCulture; set => this.locale = value; } @@ -309,7 +308,7 @@ public virtual Query Parse(string query) /// public virtual TimeZoneInfo TimeZone { - get => this.timeZone == null ? TimeZoneInfo.Local : this.timeZone; + get => this.timeZone ?? TimeZoneInfo.Local; set => this.timeZone = value; } @@ -462,13 +461,13 @@ protected internal virtual Query GetFieldQuery(string field, string queryText, i { Query query = GetFieldQuery(field, queryText, true); - if (query is PhraseQuery) + if (query is PhraseQuery phraseQuery) { - ((PhraseQuery)query).Slop = slop; + phraseQuery.Slop = slop; } - if (query is MultiPhraseQuery) + if (query is MultiPhraseQuery multiPhraseQuery) { - ((MultiPhraseQuery)query).Slop = slop; + multiPhraseQuery.Slop = slop; } return query; @@ -487,8 +486,6 @@ protected internal virtual Query GetRangeQuery(string field, } string shortDateFormat = Locale.DateTimeFormat.ShortDatePattern; - DateTime d1; - DateTime d2 = DateTime.MaxValue; // We really don't care what we set this to, but we need something or the compiler will complain below DateTools.Resolution resolution = GetDateResolution(field); // LUCENENET specific: This doesn't emulate java perfectly. @@ -512,12 +509,12 @@ protected internal virtual Query GetRangeQuery(string field, // to DateTime.TryParse(part1, Locale, DateTimeStyles.None, out d1); // rather than TryParseExact - if (DateTime.TryParseExact(part1, shortDateFormat, Locale, DateTimeStyles.None, out d1)) + if (DateTime.TryParseExact(part1, shortDateFormat, Locale, DateTimeStyles.None, out DateTime d1)) { part1 = DateTools.DateToString(d1, resolution); } - if (DateTime.TryParseExact(part2, shortDateFormat, Locale, DateTimeStyles.None, out d2)) + if (DateTime.TryParseExact(part2, shortDateFormat, Locale, DateTimeStyles.None, out DateTime d2)) { if (endInclusive) { diff --git a/src/Lucene.Net.QueryParser/Classic/QueryParserConstants.cs b/src/Lucene.Net.QueryParser/Classic/QueryParserConstants.cs index 0cb094cb21..9bcdcc36c2 100644 --- a/src/Lucene.Net.QueryParser/Classic/QueryParserConstants.cs +++ b/src/Lucene.Net.QueryParser/Classic/QueryParserConstants.cs @@ -95,7 +95,7 @@ public static class LexicalToken public const int DEFAULT = 2; } - // NOTE: In Java, this was an interface. However, in + // LUCENENET NOTE: In Java, this was an interface. However, in // .NET we cannot define constants in an interface. // So, instead we are making it a static class so it // can be shared between classes with different base classes. @@ -107,78 +107,6 @@ public static class LexicalToken /// public static class QueryParserConstants { - ///// End of File. - //public const int EndOfFileToken = 0; - ///// RegularExpression Id. - //public const int NumCharToken = 1; - ///// RegularExpression Id. - //public const int EscapedCharToken = 2; - ///// RegularExpression Id. - //public const int TermStartCharToken = 3; - ///// RegularExpression Id. - //public const int TermCharToken = 4; - ///// RegularExpression Id. - //public const int WhitespaceToken = 5; - ///// RegularExpression Id. - //public const int QuotedCharToken = 6; - ///// RegularExpression Id. - //public const int AndToken = 8; - ///// RegularExpression Id. - //public const int OrToken = 9; - ///// RegularExpression Id. - //public const int NotToken = 10; - ///// RegularExpression Id. - //public const int PlusToken = 11; - ///// RegularExpression Id. - //public const int MinusToken = 12; - ///// RegularExpression Id. - //public const int BareOperToken = 13; - ///// RegularExpression Id. - //public const int LParanToken = 14; - ///// RegularExpression Id. - //public const int RParenToken = 15; - ///// RegularExpression Id. - //public const int ColonToken = 16; - ///// RegularExpression Id. - //public const int StarToken = 17; - ///// RegularExpression Id. - //public const int CaratToken = 18; - ///// RegularExpression Id. - //public const int QuotedToken = 19; - ///// RegularExpression Id. - //public const int TermToken = 20; - ///// RegularExpression Id. - //public const int FuzzySlopToken = 21; - ///// RegularExpression Id. - //public const int PrefixTermToken = 22; - ///// RegularExpression Id. - //public const int WildTermToken = 23; - ///// RegularExpression Id. - //public const int RegExpTermToken = 24; - ///// RegularExpression Id. - //public const int RangeInStartToken = 25; - ///// RegularExpression Id. - //public const int RangeExStartToken = 26; - ///// RegularExpression Id. - //public const int NumberToken = 27; - ///// RegularExpression Id. - //public const int RangeToToken = 28; - ///// RegularExpression Id. - //public const int RangeInEndToken = 29; - ///// RegularExpression Id. - //public const int RangeExEndToken = 30; - ///// RegularExpression Id. - //public const int RangeQuotedToken = 31; - ///// RegularExpression Id. - //public const int RangeGoopToken = 32; - - ///// Lexical state. - //public const int BoostToken = 0; - ///// Lexical state. - //public const int RangeToken = 1; - ///// Lexical state. - //public const int DefaultToken = 2; - /// Literal token values. public static string[] TokenImage = new string[] { "", diff --git a/src/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs b/src/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs index 7ea9cc711e..f307bb7d91 100644 --- a/src/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs +++ b/src/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs @@ -23,6 +23,9 @@ namespace Lucene.Net.QueryParsers.Classic */ /// Token Manager. + [SuppressMessage("Performance", "CA1822:Mark members as static", Justification = "This class is based on generated code")] + [SuppressMessage("Style", "IDE0059:Unnecessary assignment of a value", Justification = "This class is based on generated code")] + [SuppressMessage("CodeQuality", "IDE0051:Remove unused private members", Justification = "This class is based on generated code")] public class QueryParserTokenManager //: QueryParserConstants { private void InitBlock() @@ -32,9 +35,11 @@ private void InitBlock() temp_writer.AutoFlush = true; debugStream = temp_writer; } - + /// Debug output. +#pragma warning disable IDE0052 // Remove unread private members private TextWriter debugStream; // LUCENENET specific - made private, since we already have a setter +#pragma warning restore IDE0052 // Remove unread private members /// Set debug output. public virtual void SetDebugStream(TextWriter ds) { @@ -50,6 +55,7 @@ private int JjStopStringLiteralDfa_2(int pos, long active0) } } + private int JjStartNfa_2(int pos, long active0) { return JjMoveNfa_2(JjStopStringLiteralDfa_2(pos, active0), pos + 1); @@ -781,7 +787,7 @@ private int JjMoveNfa_0(int startState, int curPos) else if (m_curChar < 128) { ulong l = (ulong) (1L << (m_curChar & 63)); - do + do { switch (jjstateSet[--i]) { @@ -1154,8 +1160,8 @@ private static bool JjCanMove_2(int hiByte, int i1, int i2, ulong l1, ulong l2) internal static readonly ulong[] jjtoToken = new ulong[] { 0x1ffffff01L }; internal static readonly long[] jjtoSkip = new long[] { 0x80L }; protected ICharStream m_input_stream; - private uint[] jjrounds = new uint[49]; - private int[] jjstateSet = new int[98]; + private readonly uint[] jjrounds = new uint[49]; // LUCENENET: marked readonly + private readonly int[] jjstateSet = new int[98]; // LUCENENET: marked readonly protected char m_curChar; /// Constructor. public QueryParserTokenManager(ICharStream stream) @@ -1211,7 +1217,7 @@ protected internal virtual Token JjFillToken() int beginColumn; int endColumn; string im = jjstrLiteralImages[jjmatchedKind]; - curTokenImage = (im == null)?m_input_stream.Image:im; + curTokenImage = im ?? m_input_stream.Image; beginLine = m_input_stream.BeginLine; beginColumn = m_input_stream.BeginColumn; endLine = m_input_stream.EndLine; diff --git a/src/Lucene.Net.QueryParser/Classic/TokenMgrError.cs b/src/Lucene.Net.QueryParser/Classic/TokenMgrError.cs index 46dcb9812a..261a3c8b4e 100644 --- a/src/Lucene.Net.QueryParser/Classic/TokenMgrError.cs +++ b/src/Lucene.Net.QueryParser/Classic/TokenMgrError.cs @@ -37,16 +37,16 @@ public class TokenMgrError : Exception */ /// Lexical error occurred. - internal static readonly int LEXICAL_ERROR = 0; + internal const int LEXICAL_ERROR = 0; /// An attempt was made to create a second instance of a static token manager. - internal static readonly int STATIC_LEXER_ERROR = 1; + internal const int STATIC_LEXER_ERROR = 1; /// Tried to change to an invalid lexical state. - internal static readonly int INVALID_LEXICAL_STATE = 2; + internal const int INVALID_LEXICAL_STATE = 2; /// Detected (and bailed out of) an infinite loop in the token manager. - internal static readonly int LOOP_DETECTED = 3; + internal const int LOOP_DETECTED = 3; /// Indicates the reason why the exception is thrown. It will have /// one of the above 4 values. diff --git a/src/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs b/src/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs index 11968eb618..e5c5e9c116 100644 --- a/src/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs +++ b/src/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs @@ -279,10 +279,10 @@ public override Query Rewrite(IndexReader reader) numNegatives++; } - if (qc is BooleanQuery) + if (qc is BooleanQuery booleanQuery) { List sc = new List(); - AddComplexPhraseClause(sc, (BooleanQuery)qc); + AddComplexPhraseClause(sc, booleanQuery); if (sc.Count > 0) { allSpanClauses[i] = sc[0]; @@ -298,9 +298,8 @@ public override Query Rewrite(IndexReader reader) } else { - if (qc is TermQuery) + if (qc is TermQuery tq) { - TermQuery tq = (TermQuery)qc; allSpanClauses[i] = new SpanTermQuery(tq.Term); } else @@ -333,7 +332,7 @@ public override Query Rewrite(IndexReader reader) SpanQuery[] includeClauses = positiveClauses .ToArray(); - SpanQuery include = null; + SpanQuery include; // LUCENENET: IDE0059: Remove unnecessary value assignment if (includeClauses.Length == 1) { include = includeClauses[0]; // only one positive clause @@ -370,16 +369,14 @@ private void AddComplexPhraseClause(IList spanClauses, BooleanQuery q chosenList = nots; } - if (childQuery is TermQuery) + if (childQuery is TermQuery tq) { - TermQuery tq = (TermQuery)childQuery; SpanTermQuery stq = new SpanTermQuery(tq.Term); stq.Boost = tq.Boost; chosenList.Add(stq); } - else if (childQuery is BooleanQuery) + else if (childQuery is BooleanQuery cbq) { - BooleanQuery cbq = (BooleanQuery)childQuery; AddComplexPhraseClause(chosenList, cbq); } else diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Builders/QueryTreeBuilder.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Builders/QueryTreeBuilder.cs index 4d23937d72..d8d69a3179 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Core/Builders/QueryTreeBuilder.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Core/Builders/QueryTreeBuilder.cs @@ -126,9 +126,9 @@ private IQueryBuilder GetBuilder(IQueryNode node) { IQueryBuilder builder = null; - if (this.fieldNameBuilders != null && node is IFieldableNode) + if (this.fieldNameBuilders != null && node is IFieldableNode fieldableNode) { - string field = ((IFieldableNode)node).Field; + string field = fieldableNode.Field; this.fieldNameBuilders.TryGetValue(field, out builder); } @@ -160,7 +160,7 @@ private IQueryBuilder GetBuilder(IQueryNode node) return builder; } - private void ProcessNode(IQueryNode node, IQueryBuilder builder) + private static void ProcessNode(IQueryNode node, IQueryBuilder builder) // LUCENENET: CA1822: Mark members as static { if (builder == null) { @@ -183,8 +183,7 @@ private IQueryBuilder GetQueryBuilder(Type clazz) { if (typeof(IQueryNode).IsAssignableFrom(clazz)) { - IQueryBuilder result; - this.queryNodeBuilders.TryGetValue(clazz, out result); + this.queryNodeBuilders.TryGetValue(clazz, out IQueryBuilder result); return result; } diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Config/AbstractQueryConfig.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Config/AbstractQueryConfig.cs index 7cdcd0984a..eb44707bd1 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Core/Config/AbstractQueryConfig.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Core/Config/AbstractQueryConfig.cs @@ -50,9 +50,8 @@ public virtual T Get(ConfigurationKey key) { throw new ArgumentException("key cannot be null!"); } - object result; - this.configMap.TryGetValue(key, out result); - return result == null ? default(T) : (T)result; + this.configMap.TryGetValue(key, out object result); + return result == null ? default : (T)result; } /// diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Config/FieldConfig.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Config/FieldConfig.cs index 1543d986fa..25fef283eb 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Core/Config/FieldConfig.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Core/Config/FieldConfig.cs @@ -24,7 +24,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Core.Config /// public class FieldConfig : AbstractQueryConfig { - private string fieldName; + private readonly string fieldName; // LUCENENET: marked readonly /// /// Constructs a @@ -33,12 +33,7 @@ public class FieldConfig : AbstractQueryConfig /// if the field name is null public FieldConfig(string fieldName) { - if (fieldName == null) - { - throw new ArgumentException("field name should not be null!"); - } - - this.fieldName = fieldName; + this.fieldName = fieldName ?? throw new ArgumentNullException(nameof(fieldName), "field name should not be null!"); } /// diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/AnyQueryNode.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/AnyQueryNode.cs index 3e2e67eb80..520b553ac7 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/AnyQueryNode.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/AnyQueryNode.cs @@ -49,15 +49,11 @@ public AnyQueryNode(IList clauses, string field, { if (clause is FieldQueryNode) { - if (clause is QueryNode) - { - ((QueryNode)clause).m_toQueryStringIgnoreFields = true; - } - - if (clause is IFieldableNode) - { - ((IFieldableNode)clause).Field = field; - } + if (clause is QueryNode queryNode) + queryNode.m_toQueryStringIgnoreFields = true; + + if (clause is IFieldableNode fieldableNode) + fieldableNode.Field = field; } } } diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/GroupQueryNode.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/GroupQueryNode.cs index 0d4562795c..d5c5691433 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/GroupQueryNode.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/GroupQueryNode.cs @@ -75,8 +75,10 @@ public override IQueryNode CloneTree() public virtual void SetChild(IQueryNode child) { - List list = new List(); - list.Add(child); + List list = new List + { + child + }; this.Set(list); } } diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/ModifierQueryNode.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/ModifierQueryNode.cs index 029e2358cd..a2c810cf29 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/ModifierQueryNode.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/ModifierQueryNode.cs @@ -107,8 +107,10 @@ public override IQueryNode CloneTree() public virtual void SetChild(IQueryNode child) { - List list = new List(); - list.Add(child); + List list = new List + { + child + }; this.Set(list); } } diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/PhraseSlopQueryNode.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/PhraseSlopQueryNode.cs index e8be156785..2a7cd6072d 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/PhraseSlopQueryNode.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/PhraseSlopQueryNode.cs @@ -93,9 +93,9 @@ public virtual string Field { IQueryNode child = GetChild(); - if (child is IFieldableNode) + if (child is IFieldableNode fieldableNode) { - return ((IFieldableNode)child).Field; + return fieldableNode.Field; } return null; @@ -104,9 +104,9 @@ public virtual string Field { IQueryNode child = GetChild(); - if (child is IFieldableNode) + if (child is IFieldableNode fieldableNode) { - ((IFieldableNode)child).Field = value; + fieldableNode.Field = value; } } } diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/ProximityQueryNode.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/ProximityQueryNode.cs index 1c6ae1e22c..40fa7ba9f6 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/ProximityQueryNode.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/ProximityQueryNode.cs @@ -48,7 +48,7 @@ public enum Type private ProximityQueryNode.Type proximityType = ProximityQueryNode.Type.SENTENCE; private int distance = -1; - private bool inorder = false; + private readonly bool inorder = false; // LUCENENET: marked readonly private string field = null; /// @@ -103,10 +103,10 @@ private static void ClearFields(IList nodes, string field) foreach (IQueryNode clause in nodes) { - if (clause is FieldQueryNode) + if (clause is FieldQueryNode fieldQueryNode) { - ((FieldQueryNode)clause).m_toQueryStringIgnoreFields = true; - ((FieldQueryNode)clause).Field = field; + fieldQueryNode.m_toQueryStringIgnoreFields = true; + fieldQueryNode.Field = field; } } } @@ -218,7 +218,9 @@ public class ProximityType { internal int pDistance = 0; - ProximityQueryNode.Type pType/* = null*/; +#pragma warning disable IDE0052 // Assigned never read + internal ProximityQueryNode.Type pType/* = null*/; // LUCENENET: Not nullable +#pragma warning restore IDE0052 // Assigned never read public ProximityType(ProximityQueryNode.Type type) : this(type, 0) diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/SlopQueryNode.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/SlopQueryNode.cs index e1de81cca6..664cc1dfe2 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/SlopQueryNode.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/SlopQueryNode.cs @@ -99,9 +99,9 @@ public virtual string Field { IQueryNode child = GetChild(); - if (child is IFieldableNode) + if (child is IFieldableNode fieldableNode) { - return ((IFieldableNode)child).Field; + return fieldableNode.Field; } return null; @@ -110,9 +110,9 @@ public virtual string Field { IQueryNode child = GetChild(); - if (child is IFieldableNode) + if (child is IFieldableNode fieldableNode) { - ((IFieldableNode)child).Field = value; + fieldableNode.Field = value; } } } diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/TokenizedPhraseQueryNode.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/TokenizedPhraseQueryNode.cs index 3e4be43c79..824695bbce 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/TokenizedPhraseQueryNode.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Core/Nodes/TokenizedPhraseQueryNode.cs @@ -102,9 +102,9 @@ public virtual string Field foreach (IQueryNode child in children) { - if (child is IFieldableNode) + if (child is IFieldableNode fieldableNode) { - ((IFieldableNode)child).Field = value; + fieldableNode.Field = value; } } } diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Processors/QueryNodeProcessorImpl.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Processors/QueryNodeProcessorImpl.cs index e7b678a553..1ab9d378e6 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Core/Processors/QueryNodeProcessorImpl.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Core/Processors/QueryNodeProcessorImpl.cs @@ -67,16 +67,16 @@ namespace Lucene.Net.QueryParsers.Flexible.Core.Processors /// public abstract class QueryNodeProcessor : IQueryNodeProcessor { - private List childrenListPool = new List(); + private readonly List childrenListPool = new List(); // LUCENENET: marked readonly private QueryConfigHandler queryConfig; - public QueryNodeProcessor() + protected QueryNodeProcessor() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { // empty constructor } - public QueryNodeProcessor(QueryConfigHandler queryConfigHandler) + protected QueryNodeProcessor(QueryConfigHandler queryConfigHandler) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.queryConfig = queryConfigHandler; } diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Processors/RemoveDeletedQueryNodesProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Processors/RemoveDeletedQueryNodesProcessor.cs index 87a8b19163..a9a95286de 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Core/Processors/RemoveDeletedQueryNodesProcessor.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Core/Processors/RemoveDeletedQueryNodesProcessor.cs @@ -50,7 +50,7 @@ protected override IQueryNode PostProcessNode(IQueryNode node) if (!node.IsLeaf) { IList children = node.GetChildren(); - bool removeBoolean = false; + bool removeBoolean; // LUCENENET: IDE0059: Remove unnecessary value assignment if (children == null || children.Count == 0) { diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/QueryNodeError.cs b/src/Lucene.Net.QueryParser/Flexible/Core/QueryNodeError.cs index 332727aa2b..bee10892e6 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Core/QueryNodeError.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Core/QueryNodeError.cs @@ -36,7 +36,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Core #endif public class QueryNodeError : Exception, INLSException { - private IMessage message; + private readonly IMessage message; // LUCENENET: marked readonly /// /// diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/QueryParserHelper.cs b/src/Lucene.Net.QueryParser/Flexible/Core/QueryParserHelper.cs index d6d6ba40c8..de50d0a0fe 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Core/QueryParserHelper.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Core/QueryParserHelper.cs @@ -107,12 +107,7 @@ public virtual void SetQueryNodeProcessor(IQueryNodeProcessor processor) /// public virtual void SetSyntaxParser(ISyntaxParser syntaxParser) { - if (syntaxParser == null) - { - throw new ArgumentException("textParser should not be null!"); - } - - this.syntaxParser = syntaxParser; + this.syntaxParser = syntaxParser ?? throw new ArgumentNullException(nameof(syntaxParser), "textParser should not be null!"); } /// @@ -124,12 +119,7 @@ public virtual void SetSyntaxParser(ISyntaxParser syntaxParser) /// public virtual void SetQueryBuilder(IQueryBuilder queryBuilder) { - if (queryBuilder == null) - { - throw new ArgumentException("queryBuilder should not be null!"); - } - - this.builder = queryBuilder; + this.builder = queryBuilder ?? throw new ArgumentNullException(nameof(queryBuilder), "queryBuilder should not be null!"); } /// diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Util/QueryNodeOperation.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Util/QueryNodeOperation.cs index 8f8a241637..768a4cf3e7 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Core/Util/QueryNodeOperation.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Core/Util/QueryNodeOperation.cs @@ -1,5 +1,6 @@ using Lucene.Net.QueryParsers.Flexible.Core.Nodes; using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; namespace Lucene.Net.QueryParsers.Flexible.Core.Util { @@ -61,13 +62,15 @@ public static IQueryNode LogicalAnd(IQueryNode q1, IQueryNode q2) else op = ANDOperation.NONE; - IQueryNode result = null; + IQueryNode result; // LUCENENET: IDE0059: Remove unnecessary value assignment switch (op) { case ANDOperation.NONE: - List children = new List(); - children.Add(q1.CloneTree()); - children.Add(q2.CloneTree()); + List children = new List + { + q1.CloneTree(), + q2.CloneTree() + }; result = new AndQueryNode(children); return result; case ANDOperation.Q1: diff --git a/src/Lucene.Net.QueryParser/Flexible/Core/Util/UnescapedCharSequence.cs b/src/Lucene.Net.QueryParser/Flexible/Core/Util/UnescapedCharSequence.cs index 8ccd205bb1..a62c7e74e0 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Core/Util/UnescapedCharSequence.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Core/Util/UnescapedCharSequence.cs @@ -187,8 +187,8 @@ public bool WasEscaped(int index) public static bool WasEscaped(ICharSequence text, int index) { - if (text is UnescapedCharSequence) - return ((UnescapedCharSequence)text).wasEscaped[index]; + if (text is UnescapedCharSequence unescapedCharSequence) + return unescapedCharSequence.wasEscaped[index]; else return false; } @@ -196,10 +196,10 @@ public static ICharSequence ToLower(ICharSequence text, CultureInfo locale) { var lowercaseText = locale.TextInfo.ToLower(text.ToString()); - if (text is UnescapedCharSequence) + if (text is UnescapedCharSequence unescapedCharSequence) { char[] chars = lowercaseText.ToCharArray(); - bool[] wasEscaped = ((UnescapedCharSequence)text).wasEscaped; + bool[] wasEscaped = unescapedCharSequence.wasEscaped; return new UnescapedCharSequence(chars, wasEscaped, 0, chars.Length); } else diff --git a/src/Lucene.Net.QueryParser/Flexible/Messages/MessageImpl.cs b/src/Lucene.Net.QueryParser/Flexible/Messages/MessageImpl.cs index 8135925f5c..cd4a191c3a 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Messages/MessageImpl.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Messages/MessageImpl.cs @@ -1,4 +1,5 @@ -using System; +using Lucene.Net.Support; +using System; using System.Globalization; using System.Text; @@ -30,9 +31,9 @@ namespace Lucene.Net.QueryParsers.Flexible.Messages #endif public class Message : IMessage { - private string key; + private readonly string key; // LUCENENET: marked readonly - private object[] arguments = new object[0]; + private readonly object[] arguments = Arrays.Empty(); // LUCENENET: marked readonly public Message(string key) { diff --git a/src/Lucene.Net.QueryParser/Flexible/Messages/NLS.cs b/src/Lucene.Net.QueryParser/Flexible/Messages/NLS.cs index 060e535302..bdaf3adbef 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Messages/NLS.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Messages/NLS.cs @@ -39,14 +39,14 @@ namespace Lucene.Net.QueryParsers.Flexible.Messages /// /// MessageBundle classes may subclass this type. /// - public class NLS + public abstract class NLS // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { /// /// LUCENENET specific factory reference to inject instances of /// into this class. /// private static IResourceManagerFactory resourceManagerFactory = new BundleResourceManagerFactory(); - private static IDictionary bundles = new Dictionary(0); + private static readonly IDictionary bundles = new Dictionary(0); // LUCENENET: marked readonly protected NLS() { @@ -73,11 +73,7 @@ public static IResourceManagerFactory GetResourceManagerFactory() // in a centralized DI configuration builder. public static void SetResourceManagerFactory(IResourceManagerFactory resourceManagerFactory) { - if (resourceManagerFactory == null) - { - throw new ArgumentNullException("resourceManagerFactory"); - } - NLS.resourceManagerFactory = resourceManagerFactory; + NLS.resourceManagerFactory = resourceManagerFactory ?? throw new ArgumentNullException(nameof(resourceManagerFactory)); } public static string GetLocalizedMessage(string key) @@ -128,9 +124,7 @@ protected static void InitializeMessages(string bundleName, Type clazz) if (!bundles.ContainsKey(bundleName)) bundles[bundleName] = clazz; } -#pragma warning disable 168 - catch (Exception e) -#pragma warning restore 168 + catch (Exception) // LUCENENET: IDE0059: Remove unnecessary value assignment { // ignore all errors and exceptions // because this function is supposed to be called at class load time. @@ -215,16 +209,12 @@ private static void ValidateMessage(string key, Type clazz) } } } -#pragma warning disable 168 - catch (MissingManifestResourceException e) -#pragma warning restore 168 + catch (MissingManifestResourceException) // LUCENENET: IDE0059: Remove unnecessary value assignment { //System.err.println("WARN: Message with key:" + key + " and locale: " // + Locale.getDefault() + " not found."); } -#pragma warning disable 168 - catch (Exception e) -#pragma warning restore 168 + catch (Exception) // LUCENENET: IDE0059: Remove unnecessary value assignment { // ignore all other errors and exceptions // since this code is just a test to see if the message is present on the diff --git a/src/Lucene.Net.QueryParser/Flexible/Precedence/Processors/BooleanModifiersQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Precedence/Processors/BooleanModifiersQueryNodeProcessor.cs index 77af5db55d..17e347eda6 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Precedence/Processors/BooleanModifiersQueryNodeProcessor.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Precedence/Processors/BooleanModifiersQueryNodeProcessor.cs @@ -37,7 +37,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Precedence.Processors /// public class BooleanModifiersQueryNodeProcessor : QueryNodeProcessor { - private List childrenBuffer = new List(); + private readonly List childrenBuffer = new List(); // LUCENENET: marked readonly private bool usingAnd = false; @@ -95,14 +95,12 @@ protected override IQueryNode PostProcessNode(IQueryNode node) private IQueryNode ApplyModifier(IQueryNode node, Modifier mod) { // check if modifier is not already defined and is default - if (!(node is ModifierQueryNode)) + if (!(node is ModifierQueryNode modNode)) { return new ModifierQueryNode(node, mod); } else { - ModifierQueryNode modNode = (ModifierQueryNode)node; - if (modNode.Modifier == Modifier.MOD_NONE) { return new ModifierQueryNode(modNode.GetChild(), mod); diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/BooleanQueryNodeBuilder.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/BooleanQueryNodeBuilder.cs index 63f4de52b9..838568507c 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/BooleanQueryNodeBuilder.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/BooleanQueryNodeBuilder.cs @@ -80,9 +80,8 @@ public virtual Query Build(IQueryNode queryNode) private static Occur GetModifierValue(IQueryNode node) { - if (node is ModifierQueryNode) + if (node is ModifierQueryNode mNode) { - ModifierQueryNode mNode = ((ModifierQueryNode)node); switch (mNode.Modifier) { case Modifier.MOD_REQ: diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/MultiPhraseQueryNodeBuilder.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/MultiPhraseQueryNodeBuilder.cs index 6da62e5674..7d808a6d23 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/MultiPhraseQueryNodeBuilder.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/MultiPhraseQueryNodeBuilder.cs @@ -54,8 +54,7 @@ public virtual Query Build(IQueryNode queryNode) TermQuery termQuery = (TermQuery)termNode .GetTag(QueryTreeBuilder.QUERY_TREE_BUILDER_TAGID); - List termList; - if (!positionTermMap.TryGetValue(termNode.PositionIncrement, out termList) || termList == null) + if (!positionTermMap.TryGetValue(termNode.PositionIncrement, out List termList) || termList == null) { termList = new List(); positionTermMap[termNode.PositionIncrement] = termList; diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/SlopQueryNodeBuilder.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/SlopQueryNodeBuilder.cs index d58cf70a44..5be404bdae 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/SlopQueryNodeBuilder.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/SlopQueryNodeBuilder.cs @@ -41,9 +41,9 @@ public virtual Query Build(IQueryNode queryNode) Query query = (Query)phraseSlopNode.GetChild().GetTag( QueryTreeBuilder.QUERY_TREE_BUILDER_TAGID); - if (query is PhraseQuery) + if (query is PhraseQuery phraseQuery) { - ((PhraseQuery)query).Slop = phraseSlopNode.Value; + phraseQuery.Slop = phraseSlopNode.Value; } else { diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/StandardBooleanQueryNodeBuilder.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/StandardBooleanQueryNodeBuilder.cs index b145c0c069..d30e03fe8f 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/StandardBooleanQueryNodeBuilder.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Builders/StandardBooleanQueryNodeBuilder.cs @@ -79,9 +79,8 @@ public virtual Query Build(IQueryNode queryNode) private static Occur GetModifierValue(IQueryNode node) { - if (node is ModifierQueryNode) + if (node is ModifierQueryNode mNode) { - ModifierQueryNode mNode = ((ModifierQueryNode)node); Modifier modifier = mNode.Modifier; if (Modifier.MOD_NONE.Equals(modifier)) diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Config/FieldBoostMapFCListener.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Config/FieldBoostMapFCListener.cs index c701b076dd..0679361d8d 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Config/FieldBoostMapFCListener.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Config/FieldBoostMapFCListener.cs @@ -32,7 +32,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Config /// public class FieldBoostMapFCListener : IFieldConfigListener { - private QueryConfigHandler config = null; + private readonly QueryConfigHandler config = null; // LUCENENET: marked readonly public FieldBoostMapFCListener(QueryConfigHandler config) { @@ -45,8 +45,7 @@ public virtual void BuildFieldConfig(FieldConfig fieldConfig) if (fieldBoostMap != null) { - float? boost; - if (fieldBoostMap.TryGetValue(fieldConfig.Field, out boost) && boost != null) + if (fieldBoostMap.TryGetValue(fieldConfig.Field, out float? boost) && boost != null) { fieldConfig.Set(ConfigurationKeys.BOOST, boost); } diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Config/FieldDateResolutionFCListener.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Config/FieldDateResolutionFCListener.cs index e4b0136e32..93592f9cce 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Config/FieldDateResolutionFCListener.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Config/FieldDateResolutionFCListener.cs @@ -33,7 +33,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Config /// public class FieldDateResolutionFCListener : IFieldConfigListener { - private QueryConfigHandler config = null; + private readonly QueryConfigHandler config = null; // LUCENENET: marked readonly public FieldDateResolutionFCListener(QueryConfigHandler config) { diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Config/NumericFieldConfigListener.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Config/NumericFieldConfigListener.cs index f17ce30a92..911e711820 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Config/NumericFieldConfigListener.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Config/NumericFieldConfigListener.cs @@ -41,12 +41,7 @@ public class NumericFieldConfigListener : IFieldConfigListener /// the it will listen too public NumericFieldConfigListener(QueryConfigHandler config) { - if (config == null) - { - throw new ArgumentException("config cannot be null!"); - } - - this.config = config; + this.config = config ?? throw new ArgumentNullException(nameof(config), "config cannot be null!"); } public virtual void BuildFieldConfig(FieldConfig fieldConfig) @@ -56,8 +51,7 @@ public virtual void BuildFieldConfig(FieldConfig fieldConfig) if (numericConfigMap != null) { - NumericConfig numericConfig; - if (numericConfigMap.TryGetValue(fieldConfig.Field, out numericConfig) && numericConfig != null) + if (numericConfigMap.TryGetValue(fieldConfig.Field, out NumericConfig numericConfig) && numericConfig != null) { fieldConfig.Set(ConfigurationKeys.NUMERIC_CONFIG, numericConfig); } diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/AbstractRangeQueryNode.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/AbstractRangeQueryNode.cs index 2f740b1854..d541ba3369 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/AbstractRangeQueryNode.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/AbstractRangeQueryNode.cs @@ -140,9 +140,11 @@ public virtual void SetBounds(T lower, T upper, bool lowerInclusive, this.lowerInclusive = lowerInclusive; this.upperInclusive = upperInclusive; - List children = new List(2); - children.Add(lower); - children.Add(upper); + List children = new List(2) + { + lower, + upper + }; Set(children); } diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/MultiPhraseQueryNode.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/MultiPhraseQueryNode.cs index 7c931b938c..66b9e483e1 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/MultiPhraseQueryNode.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/MultiPhraseQueryNode.cs @@ -99,9 +99,9 @@ public virtual string Field { foreach (IQueryNode child in children) { - if (child is IFieldableNode) + if (child is IFieldableNode fieldableNode) { - ((IFieldableNode)child).Field = value; + fieldableNode.Field = value; } } } diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/StandardBooleanQueryNode.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/StandardBooleanQueryNode.cs index f311be0c9e..d77c5ff6ad 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/StandardBooleanQueryNode.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/StandardBooleanQueryNode.cs @@ -29,7 +29,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Nodes /// public class StandardBooleanQueryNode : BooleanQueryNode { - private bool disableCoord; + private readonly bool disableCoord; // LUCENENET: marked readonly public StandardBooleanQueryNode(IList clauses, bool disableCoord) : base(clauses) diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/EscapeQuerySyntaxImpl.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/EscapeQuerySyntaxImpl.cs index 251b227d90..9d084dbbbe 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/EscapeQuerySyntaxImpl.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/EscapeQuerySyntaxImpl.cs @@ -220,9 +220,9 @@ public virtual ICharSequence Escape(ICharSequence text, CultureInfo locale, Esca // anything else) // since we need to preserve the UnescapedCharSequence and escape the // original escape chars - if (text is UnescapedCharSequence) + if (text is UnescapedCharSequence unescapedCharSequence) { - text = ((UnescapedCharSequence)text).ToStringEscaped(wildcardChars); + text = unescapedCharSequence.ToStringEscaped(wildcardChars); } else { diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/FastCharStream.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/FastCharStream.cs index 4de606cb63..46da7ecea4 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/FastCharStream.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/FastCharStream.cs @@ -29,15 +29,15 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser /// public sealed class FastCharStream : ICharStream { - char[] buffer = null; + private char[] buffer = null; - int bufferLength = 0; // end of valid chars - int bufferPosition = 0; // next char to read + private int bufferLength = 0; // end of valid chars + private int bufferPosition = 0; // next char to read - int tokenStart = 0; // offset in buffer - int bufferStart = 0; // position in file of buffer + private int tokenStart = 0; // offset in buffer + private int bufferStart = 0; // position in file of buffer - TextReader input; // source of chars + private readonly TextReader input; // source of chars // LUCENENET: marked readonly /// /// Constructs from a . @@ -118,10 +118,9 @@ public void Done() { input.Dispose(); } -#pragma warning disable 168 - catch (IOException e) -#pragma warning restore 168 + catch (IOException) // LUCENENET: IDE0059: Remove unnecessary value assignment { + // ignore } } diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/ParseException.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/ParseException.cs index 1891a52b3e..609cb55450 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/ParseException.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/ParseException.cs @@ -206,7 +206,7 @@ private static string Initialize(Token currentToken, /// when these raw version cannot be used as part of an ASCII /// string literal. /// - static string AddEscapes(string str) + private static string AddEscapes(string str) { StringBuilder retval = new StringBuilder(); char ch; diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParser.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParser.cs index f69e99d587..f060ca52c6 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParser.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParser.cs @@ -6,6 +6,7 @@ using Lucene.Net.QueryParsers.Flexible.Standard.Nodes; using System; using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.IO; #if FEATURE_SERIALIZABLE_EXCEPTIONS @@ -34,11 +35,14 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser /// /// Parser for the standard Lucene syntax /// + [SuppressMessage("Performance", "CA1822:Mark members as static", Justification = "This class is based on generated code")] + [SuppressMessage("Style", "IDE0059:Unnecessary assignment of a value", Justification = "This class is based on generated code")] + [SuppressMessage("Style", "IDE0028:Collection initialization can be simplified", Justification = "This class is based on generated code")] public class StandardSyntaxParser : ISyntaxParser /*, StandardSyntaxParserConstants*/ { - private static readonly int CONJ_NONE = 0; - private static readonly int CONJ_AND = 2; - private static readonly int CONJ_OR = 2; + private const int CONJ_NONE = 0; + private const int CONJ_AND = 2; + private const int CONJ_OR = 2; // syntax parser constructor @@ -157,6 +161,7 @@ public IQueryNode TopLevelQuery(string field) throw new Exception("Missing return statement in function"); } + // These changes were made to introduce operator precedence: // - Clause() now returns a QueryNode. // - The modifiers are consumed by Clause() and returned as part of the QueryNode Object @@ -549,10 +554,9 @@ public IQueryNode Clause(string field) q = new BoostQueryNode(q, f); } } -#pragma warning disable 168 - catch (Exception ignored) -#pragma warning restore 168 + catch (Exception) // LUCENENET: IDE0059: Remove unnecessary value assignment { + //ignored /* Should this be handled somehow? (defaults to "no boost", if * boost number is invalid) */ @@ -775,10 +779,9 @@ public IQueryNode Term(string field) phraseSlop = (int)Convert.ToSingle(fuzzySlop.Image.Substring(1), CultureInfo.InvariantCulture); q = new SlopQueryNode(q, phraseSlop); } -#pragma warning disable 168 - catch (Exception ignored) -#pragma warning restore 168 + catch (Exception) // LUCENENET: IDE0059: Remove unnecessary value assignment { + // ignored /* Should this be handled somehow? (defaults to "no PhraseSlop", if * slop number is invalid) */ @@ -802,10 +805,9 @@ public IQueryNode Term(string field) q = new BoostQueryNode(q, f); } } -#pragma warning disable 168 - catch (Exception ignored) -#pragma warning restore 168 + catch (Exception) // LUCENENET: IDE0059: Remove unnecessary value assignment { + // ignored /* Should this be handled somehow? (defaults to "no boost", if * boost number is invalid) */ @@ -1160,10 +1162,10 @@ private int Jj_ntk() return (jj_ntk = Jj_nt.Kind); } - private List jj_expentries = new List(); + private readonly List jj_expentries = new List(); // LUCENENET: marked readonly private int[] jj_expentry; private int jj_kind = -1; - private int[] jj_lasttokens = new int[100]; + private readonly int[] jj_lasttokens = new int[100]; // LUCENENET: marked readonly private int jj_endpos; private void Jj_add_error_token(int kind, int pos) @@ -1252,6 +1254,7 @@ public virtual ParseException GenerateParseException() return new ParseException(Token, exptokseq, StandardSyntaxParserConstants.TokenImage); } + /// Enable tracing. public void Enable_tracing() { diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParserTokenManager.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParserTokenManager.cs index c70d65edc7..1b73994a95 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParserTokenManager.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParserTokenManager.cs @@ -1,4 +1,5 @@ using Lucene.Net.Support.IO; +using System.Diagnostics.CodeAnalysis; using System.IO; using Console = Lucene.Net.Util.SystemConsole; @@ -24,10 +25,14 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser /// /// Token Manager. /// + [SuppressMessage("Performance", "CA1822:Mark members as static", Justification = "This class is based on generated code")] + [SuppressMessage("Style", "IDE0059:Unnecessary assignment of a value", Justification = "This class is based on generated code")] public class StandardSyntaxParserTokenManager /*: StandardSyntaxParserConstants*/ { /// Debug output. +#pragma warning disable IDE0052 // Remove unread private members private TextWriter debugStream = Console.Out; // LUCENENET specific - made private, since we already have a setter +#pragma warning restore IDE0052 // Remove unread private members /// Set debug output. public void SetDebugStream(TextWriter ds) { debugStream = new SafeTextWriterWrapper(ds); } private int JjStopStringLiteralDfa_2(int pos, long active0) @@ -783,12 +788,12 @@ private static bool JjCanMove_2(int hiByte, int i1, int i2, ulong l1, ulong l2) -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, -1, -1, -1, -1, 1, 1, 2, -1, 2, 2, -1, -1, }; - static readonly long[] jjtoToken = { + private static readonly long[] jjtoToken = { 0x3ffffff01L, }; - static readonly long[] jjtoSkip = { - 0x80L, - }; + //static readonly long[] jjtoSkip = { // LUCENENET: Never read + // 0x80L, + //}; protected ICharStream m_input_stream; private readonly uint[] jjrounds = new uint[33]; private readonly int[] jjstateSet = new int[66]; @@ -847,7 +852,7 @@ protected Token JjFillToken() int beginColumn; int endColumn; string im = jjstrLiteralImages[jjmatchedKind]; - curTokenImage = (im == null) ? m_input_stream.GetImage() : im; + curTokenImage = im ?? m_input_stream.GetImage(); beginLine = m_input_stream.BeginLine; beginColumn = m_input_stream.BeginColumn; endLine = m_input_stream.EndLine; diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/TokenMgrError.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/TokenMgrError.cs index a4efda0a8a..c5678c5fe9 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/TokenMgrError.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/TokenMgrError.cs @@ -40,22 +40,22 @@ public class TokenMgrError : Exception /// /// Lexical error occurred. /// - internal static readonly int LEXICAL_ERROR = 0; + internal const int LEXICAL_ERROR = 0; /// /// An attempt was made to create a second instance of a static token manager. /// - internal static readonly int STATIC_LEXER_ERROR = 1; + internal const int STATIC_LEXER_ERROR = 1; /// /// Tried to change to an invalid lexical state. /// - internal static readonly int INVALID_LEXICAL_STATE = 2; + internal const int INVALID_LEXICAL_STATE = 2; /// /// Detected (and bailed out of) an infinite loop in the token manager. /// - internal static readonly int LOOP_DETECTED = 3; + internal const int LOOP_DETECTED = 3; /// /// Indicates the reason why the exception is thrown. It will have diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AllowLeadingWildcardProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AllowLeadingWildcardProcessor.cs index 9ad864ef58..253a51e8fc 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AllowLeadingWildcardProcessor.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AllowLeadingWildcardProcessor.cs @@ -60,10 +60,8 @@ public override IQueryNode Process(IQueryNode queryTree) protected override IQueryNode PostProcessNode(IQueryNode node) { - if (node is WildcardQueryNode) + if (node is WildcardQueryNode wildcardNode) { - WildcardQueryNode wildcardNode = (WildcardQueryNode)node; - if (wildcardNode.Text.Length > 0) { // Validate if the wildcard was escaped diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs index 5a220bf637..2579e3e1be 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/AnalyzerQueryNodeProcessor.cs @@ -185,9 +185,7 @@ protected override IQueryNode PostProcessNode(IQueryNode node) if (Debugging.AssertsEnabled) Debugging.Assert(hasNext == true); term = termAtt.ToString(); } -#pragma warning disable 168 - catch (IOException e) -#pragma warning restore 168 + catch (IOException) // LUCENENET: IDE0059: Remove unnecessary value assignment { // safe to ignore, because we know the number of tokens } @@ -216,9 +214,7 @@ protected override IQueryNode PostProcessNode(IQueryNode node) if (Debugging.AssertsEnabled) Debugging.Assert(hasNext == true); term = termAtt.ToString(); } -#pragma warning disable 168 - catch (IOException e) -#pragma warning restore 168 + catch (IOException) // LUCENENET: IDE0059: Remove unnecessary value assignment { // safe to ignore, because we know the number of tokens } @@ -243,9 +239,7 @@ protected override IQueryNode PostProcessNode(IQueryNode node) if (Debugging.AssertsEnabled) Debugging.Assert(hasNext == true); term = termAtt.ToString(); } -#pragma warning disable 168 - catch (IOException e) -#pragma warning restore 168 + catch (IOException) // LUCENENET: IDE0059: Remove unnecessary value assignment { // safe to ignore, because we know the number of tokens } @@ -314,9 +308,7 @@ protected override IQueryNode PostProcessNode(IQueryNode node) positionIncrement = posIncrAtt.PositionIncrement; } } -#pragma warning disable 168 - catch (IOException e) -#pragma warning restore 168 + catch (IOException) // LUCENENET: IDE0059: Remove unnecessary value assignment { // safe to ignore, because we know the number of tokens } @@ -387,9 +379,7 @@ protected override IQueryNode PostProcessNode(IQueryNode node) positionIncrement = posIncrAtt.PositionIncrement; } } -#pragma warning disable 168 - catch (IOException e) -#pragma warning restore 168 + catch (IOException) // LUCENENET: IDE0059: Remove unnecessary value assignment { // safe to ignore, because we know the number of tokens } diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/BooleanQuery2ModifierNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/BooleanQuery2ModifierNodeProcessor.cs index 08b40d21de..67a2d33fef 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/BooleanQuery2ModifierNodeProcessor.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/BooleanQuery2ModifierNodeProcessor.cs @@ -49,11 +49,11 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors /// public class BooleanQuery2ModifierNodeProcessor : IQueryNodeProcessor { - internal readonly static string TAG_REMOVE = "remove"; - internal readonly static string TAG_MODIFIER = "wrapWithModifier"; - internal readonly static string TAG_BOOLEAN_ROOT = "booleanRoot"; + internal const string TAG_REMOVE = "remove"; + internal const string TAG_MODIFIER = "wrapWithModifier"; + internal const string TAG_BOOLEAN_ROOT = "booleanRoot"; - QueryConfigHandler queryConfigHandler; + private QueryConfigHandler queryConfigHandler; private readonly List childrenBuffer = new List(); @@ -169,14 +169,12 @@ protected virtual bool IsDefaultBooleanQueryNode(IQueryNode toTest) private IQueryNode ApplyModifier(IQueryNode node, Modifier mod) { // check if modifier is not already defined and is default - if (!(node is ModifierQueryNode)) + if (!(node is ModifierQueryNode modNode)) { return new BooleanModifierNode(node, mod); } else { - ModifierQueryNode modNode = (ModifierQueryNode)node; - if (modNode.Modifier == Modifier.MOD_NONE) { return new ModifierQueryNode(modNode.GetChild(), mod); @@ -188,9 +186,8 @@ private IQueryNode ApplyModifier(IQueryNode node, Modifier mod) protected virtual void TagModifierButDoNotOverride(IQueryNode node, Modifier mod) { - if (node is ModifierQueryNode) + if (node is ModifierQueryNode modNode) { - ModifierQueryNode modNode = (ModifierQueryNode)node; if (modNode.Modifier == Modifier.MOD_NONE) { node.SetTag(TAG_MODIFIER, mod); diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/BooleanSingleChildOptimizationQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/BooleanSingleChildOptimizationQueryNodeProcessor.cs index cf61e18c17..17dbd334e0 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/BooleanSingleChildOptimizationQueryNodeProcessor.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/BooleanSingleChildOptimizationQueryNodeProcessor.cs @@ -46,10 +46,8 @@ protected override IQueryNode PostProcessNode(IQueryNode node) { IQueryNode child = children[0]; - if (child is ModifierQueryNode) + if (child is ModifierQueryNode modNode) { - ModifierQueryNode modNode = (ModifierQueryNode)child; - if (modNode is BooleanModifierNode || modNode.Modifier == Modifier.MOD_NONE) { diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/BoostQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/BoostQueryNodeProcessor.cs index 5aa03b34de..9a9342b8ac 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/BoostQueryNodeProcessor.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/BoostQueryNodeProcessor.cs @@ -36,10 +36,9 @@ public class BoostQueryNodeProcessor : QueryNodeProcessor { protected override IQueryNode PostProcessNode(IQueryNode node) { - if (node is IFieldableNode && + if (node is IFieldableNode fieldNode && (node.Parent == null || !(node.Parent is IFieldableNode))) { - IFieldableNode fieldNode = (IFieldableNode)node; QueryConfigHandler config = GetQueryConfigHandler(); if (config != null) diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/FuzzyQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/FuzzyQueryNodeProcessor.cs index 50f7c95d3c..ef8a338d29 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/FuzzyQueryNodeProcessor.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/FuzzyQueryNodeProcessor.cs @@ -44,12 +44,11 @@ protected override IQueryNode PostProcessNode(IQueryNode node) protected override IQueryNode PreProcessNode(IQueryNode node) { - if (node is FuzzyQueryNode) + if (node is FuzzyQueryNode fuzzyNode) { - FuzzyQueryNode fuzzyNode = (FuzzyQueryNode)node; QueryConfigHandler config = GetQueryConfigHandler(); - FuzzyConfig fuzzyConfig = null; + FuzzyConfig fuzzyConfig; // LUCENENET: IDE0059: Remove unnecessary value assignment if (config != null && (fuzzyConfig = config.Get(ConfigurationKeys.FUZZY_CONFIG)) != null) { diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/GroupQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/GroupQueryNodeProcessor.cs index 7080334c0f..fbe6392d45 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/GroupQueryNodeProcessor.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/GroupQueryNodeProcessor.cs @@ -67,9 +67,9 @@ public virtual IQueryNode Process(IQueryNode queryTree) this.usingAnd = Operator.AND == defaultOperator; - if (queryTree is GroupQueryNode) + if (queryTree is GroupQueryNode groupQueryNode) { - queryTree = ((GroupQueryNode)queryTree).GetChild(); + queryTree = groupQueryNode.GetChild(); } this.queryNodeList = new List(); @@ -108,10 +108,8 @@ private IQueryNode ApplyModifier(IQueryNode node, IQueryNode parent) { if (parent is OrQueryNode) { - if (node is ModifierQueryNode) + if (node is ModifierQueryNode modNode) { - ModifierQueryNode modNode = (ModifierQueryNode)node; - if (modNode.Modifier == Modifier.MOD_REQ) { return modNode.GetChild(); @@ -120,10 +118,8 @@ private IQueryNode ApplyModifier(IQueryNode node, IQueryNode parent) } else { - if (node is ModifierQueryNode) + if (node is ModifierQueryNode modNode) { - ModifierQueryNode modNode = (ModifierQueryNode)node; - if (modNode.Modifier == Modifier.MOD_NONE) { return new BooleanModifierNode(modNode.GetChild(), Modifier.MOD_REQ); @@ -139,10 +135,8 @@ private IQueryNode ApplyModifier(IQueryNode node, IQueryNode parent) { if (node.Parent is AndQueryNode) { - if (node is ModifierQueryNode) + if (node is ModifierQueryNode modNode) { - ModifierQueryNode modNode = (ModifierQueryNode)node; - if (modNode.Modifier == Modifier.MOD_NONE) { return new BooleanModifierNode(modNode.GetChild(), Modifier.MOD_REQ); diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/MatchAllDocsQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/MatchAllDocsQueryNodeProcessor.cs index d7a1298a43..0121eaf9a4 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/MatchAllDocsQueryNodeProcessor.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/MatchAllDocsQueryNodeProcessor.cs @@ -37,10 +37,8 @@ public MatchAllDocsQueryNodeProcessor() protected override IQueryNode PostProcessNode(IQueryNode node) { - if (node is FieldQueryNode) + if (node is FieldQueryNode fqn) { - FieldQueryNode fqn = (FieldQueryNode)node; - if (fqn.Field.ToString().Equals("*", StringComparison.Ordinal) && fqn.Text.ToString().Equals("*", StringComparison.Ordinal)) { diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/MultiFieldQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/MultiFieldQueryNodeProcessor.cs index cfb07a8294..f5ba0e632f 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/MultiFieldQueryNodeProcessor.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/MultiFieldQueryNodeProcessor.cs @@ -64,11 +64,9 @@ protected override void ProcessChildren(IQueryNode queryTree) protected override IQueryNode PreProcessNode(IQueryNode node) { - if (node is IFieldableNode) + if (node is IFieldableNode fieldNode) { this.processChildren = false; - IFieldableNode fieldNode = (IFieldableNode)node; - if (fieldNode.Field == null) { string[] fields = GetQueryConfigHandler().Get(ConfigurationKeys.MULTI_FIELDS); @@ -89,8 +87,10 @@ protected override IQueryNode PreProcessNode(IQueryNode node) } else { - List children = new List(); - children.Add(fieldNode); + List children = new List + { + fieldNode + }; for (int i = 1; i < fields.Length; i++) { diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/NumericQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/NumericQueryNodeProcessor.cs index d3bd06a409..c1e1ae594f 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/NumericQueryNodeProcessor.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/NumericQueryNodeProcessor.cs @@ -61,14 +61,13 @@ public NumericQueryNodeProcessor() protected override IQueryNode PostProcessNode(IQueryNode node) { - if (node is FieldQueryNode + if (node is FieldQueryNode fieldNode && !(node.Parent is IRangeQueryNode)) { QueryConfigHandler config = GetQueryConfigHandler(); if (config != null) { - FieldQueryNode fieldNode = (FieldQueryNode)node; FieldConfig fieldConfig = config.GetFieldConfig(fieldNode .GetFieldAsString()); @@ -82,7 +81,7 @@ protected override IQueryNode PostProcessNode(IQueryNode node) NumberFormat numberFormat = numericConfig.NumberFormat; string text = fieldNode.GetTextAsString(); /*Number*/ - object number = null; + object number; // LUCENENET: IDE0059: Remove unnecessary value assignment if (text.Length > 0) { diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/NumericRangeQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/NumericRangeQueryNodeProcessor.cs index 154ef4e56b..784d800176 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/NumericRangeQueryNodeProcessor.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/NumericRangeQueryNodeProcessor.cs @@ -56,13 +56,12 @@ public NumericRangeQueryNodeProcessor() protected override IQueryNode PostProcessNode(IQueryNode node) { - if (node is TermRangeQueryNode) + if (node is TermRangeQueryNode termRangeNode) { QueryConfigHandler config = GetQueryConfigHandler(); if (config != null) { - TermRangeQueryNode termRangeNode = (TermRangeQueryNode)node; FieldConfig fieldConfig = config.GetFieldConfig(StringUtils .ToString(termRangeNode.Field)); diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/OpenRangeQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/OpenRangeQueryNodeProcessor.cs index 261d56baa2..dff3d3dce8 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/OpenRangeQueryNodeProcessor.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/OpenRangeQueryNodeProcessor.cs @@ -36,24 +36,21 @@ public OpenRangeQueryNodeProcessor() { } protected override IQueryNode PostProcessNode(IQueryNode node) { - if (node is TermRangeQueryNode) + if (node is TermRangeQueryNode rangeNode) { - TermRangeQueryNode rangeNode = (TermRangeQueryNode)node; FieldQueryNode lowerNode = (FieldQueryNode)rangeNode.LowerBound; FieldQueryNode upperNode = (FieldQueryNode)rangeNode.UpperBound; ICharSequence lowerText = lowerNode.Text; ICharSequence upperText = upperNode.Text; if (OPEN_RANGE_TOKEN.Equals(upperNode.GetTextAsString(), StringComparison.Ordinal) - && (!(upperText is UnescapedCharSequence) || !((UnescapedCharSequence)upperText) - .WasEscaped(0))) + && (!(upperText is UnescapedCharSequence unescapedUpperText) || !unescapedUpperText.WasEscaped(0))) { upperText = "".AsCharSequence(); } if (OPEN_RANGE_TOKEN.Equals(lowerNode.GetTextAsString(), StringComparison.Ordinal) - && (!(lowerText is UnescapedCharSequence) || !((UnescapedCharSequence)lowerText) - .WasEscaped(0))) + && (!(lowerText is UnescapedCharSequence unescapedLowerText) || !unescapedLowerText.WasEscaped(0))) { lowerText = "".AsCharSequence(); } diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/PhraseSlopQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/PhraseSlopQueryNodeProcessor.cs index 6d279d3d8c..cf5fa84943 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/PhraseSlopQueryNodeProcessor.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/PhraseSlopQueryNodeProcessor.cs @@ -37,10 +37,8 @@ public PhraseSlopQueryNodeProcessor() protected override IQueryNode PostProcessNode(IQueryNode node) { - if (node is SlopQueryNode) + if (node is SlopQueryNode phraseSlopNode) { - SlopQueryNode phraseSlopNode = (SlopQueryNode)node; - if (!(phraseSlopNode.GetChild() is TokenizedPhraseQueryNode) && !(phraseSlopNode.GetChild() is MultiPhraseQueryNode)) { diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/RemoveEmptyNonLeafQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/RemoveEmptyNonLeafQueryNodeProcessor.cs index a403f8c83b..1f4bc46a12 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/RemoveEmptyNonLeafQueryNodeProcessor.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/RemoveEmptyNonLeafQueryNodeProcessor.cs @@ -35,7 +35,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Processors /// public class RemoveEmptyNonLeafQueryNodeProcessor : QueryNodeProcessor { - private List childrenBuffer = new List(); + private readonly List childrenBuffer = new List(); // LUCENENET: marked readonly public RemoveEmptyNonLeafQueryNodeProcessor() { diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/TermRangeQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/TermRangeQueryNodeProcessor.cs index 56ad89e4a3..36fa3f05e2 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/TermRangeQueryNodeProcessor.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/TermRangeQueryNodeProcessor.cs @@ -55,9 +55,8 @@ public TermRangeQueryNodeProcessor() protected override IQueryNode PostProcessNode(IQueryNode node) { - if (node is TermRangeQueryNode) + if (node is TermRangeQueryNode termRangeNode) { - TermRangeQueryNode termRangeNode = (TermRangeQueryNode)node; FieldQueryNode upper = (FieldQueryNode)termRangeNode.UpperBound; FieldQueryNode lower = (FieldQueryNode)termRangeNode.LowerBound; @@ -105,16 +104,14 @@ protected override IQueryNode PostProcessNode(IQueryNode node) try { string shortDateFormat = locale.DateTimeFormat.ShortDatePattern; - DateTime d1; - DateTime d2 = DateTime.MaxValue; // We really don't care what we set this to, but we need something or the compiler will complain below - if (DateTime.TryParseExact(part1, shortDateFormat, locale, DateTimeStyles.None, out d1)) + if (DateTime.TryParseExact(part1, shortDateFormat, locale, DateTimeStyles.None, out DateTime d1)) { part1 = DateTools.DateToString(d1, dateRes); lower.Text = new StringCharSequence(part1); } - if (DateTime.TryParseExact(part2, shortDateFormat, locale, DateTimeStyles.None, out d2)) + if (DateTime.TryParseExact(part2, shortDateFormat, locale, DateTimeStyles.None, out DateTime d2)) { if (inclusive) { @@ -143,9 +140,7 @@ protected override IQueryNode PostProcessNode(IQueryNode node) } } -#pragma warning disable 168 - catch (Exception e) -#pragma warning restore 168 + catch (Exception) // LUCENENET: IDE0059: Remove unnecessary value assignment { // do nothing } diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/WildcardQueryNodeProcessor.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/WildcardQueryNodeProcessor.cs index 51e78a2138..42c61f5633 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/WildcardQueryNodeProcessor.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Processors/WildcardQueryNodeProcessor.cs @@ -76,7 +76,7 @@ protected override IQueryNode PostProcessNode(IQueryNode node) return node; } - private bool IsWildcard(string text) + private static bool IsWildcard(string text) // LUCENENET: CA1822: Mark members as static { if (text == null || text.Length <= 0) return false; diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/QueryParserUtil.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/QueryParserUtil.cs index d96544fb43..82b3f02bed 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/QueryParserUtil.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/QueryParserUtil.cs @@ -57,7 +57,7 @@ public static Query Parse(string[] queries, string[] fields, Analyzer analyzer) Query q = qp.Parse(queries[i], fields[i]); if (q != null && // q never null, just being defensive - (!(q is BooleanQuery) || ((BooleanQuery)q).Clauses.Count > 0)) + (!(q is BooleanQuery booleanQuery) || booleanQuery.Clauses.Count > 0)) { bQuery.Add(q, Occur.SHOULD); } @@ -106,7 +106,7 @@ public static Query Parse(string query, string[] fields, Query q = qp.Parse(query, fields[i]); if (q != null && // q never null, just being defensive - (!(q is BooleanQuery) || ((BooleanQuery)q).Clauses.Count > 0)) + (!(q is BooleanQuery booleanQuery) || booleanQuery.Clauses.Count > 0)) { bQuery.Add(q, flags[i]); } @@ -156,7 +156,7 @@ public static Query Parse(string[] queries, string[] fields, Query q = qp.Parse(queries[i], fields[i]); if (q != null && // q never null, just being defensive - (!(q is BooleanQuery) || ((BooleanQuery)q).Clauses.Count > 0)) + (!(q is BooleanQuery booleanQuery) || booleanQuery.Clauses.Count > 0)) { bQuery.Add(q, flags[i]); } diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/StandardQueryParser.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/StandardQueryParser.cs index e7a066c6b8..c3fbe29fb1 100644 --- a/src/Lucene.Net.QueryParser/Flexible/Standard/StandardQueryParser.cs +++ b/src/Lucene.Net.QueryParser/Flexible/Standard/StandardQueryParser.cs @@ -7,6 +7,7 @@ using Lucene.Net.QueryParsers.Flexible.Standard.Parser; using Lucene.Net.QueryParsers.Flexible.Standard.Processors; using Lucene.Net.Search; +using Lucene.Net.Support; using System; using System.Collections.Generic; using System.Globalization; @@ -174,11 +175,7 @@ public virtual Operator DefaultOperator /// public virtual bool LowercaseExpandedTerms { - get - { - bool? lowercaseExpandedTerms = QueryConfigHandler.Get(ConfigurationKeys.LOWERCASE_EXPANDED_TERMS); - return lowercaseExpandedTerms.HasValue ? lowercaseExpandedTerms.Value : true; - } + get => QueryConfigHandler.Get(ConfigurationKeys.LOWERCASE_EXPANDED_TERMS) ?? true; set => QueryConfigHandler.Set(ConfigurationKeys.LOWERCASE_EXPANDED_TERMS, value); } @@ -193,11 +190,7 @@ public virtual bool LowercaseExpandedTerms /// public virtual bool AllowLeadingWildcard { - get - { - bool? allowLeadingWildcard = QueryConfigHandler.Get(ConfigurationKeys.ALLOW_LEADING_WILDCARD); - return allowLeadingWildcard.HasValue ? allowLeadingWildcard.Value : false; - } + get => QueryConfigHandler.Get(ConfigurationKeys.ALLOW_LEADING_WILDCARD) ?? false; set => QueryConfigHandler.Set(ConfigurationKeys.ALLOW_LEADING_WILDCARD, value); } @@ -212,11 +205,7 @@ public virtual bool AllowLeadingWildcard /// public virtual bool EnablePositionIncrements { - get - { - bool? enablePositionsIncrements = QueryConfigHandler.Get(ConfigurationKeys.ENABLE_POSITION_INCREMENTS); - return enablePositionsIncrements.HasValue ? enablePositionsIncrements.Value : false; - } + get => QueryConfigHandler.Get(ConfigurationKeys.ENABLE_POSITION_INCREMENTS) ?? false; set => QueryConfigHandler.Set(ConfigurationKeys.ENABLE_POSITION_INCREMENTS, value); } @@ -246,7 +235,7 @@ public virtual void SetMultiFields(string[] fields) if (fields == null) { - fields = new string[0]; + fields = Arrays.Empty(); } QueryConfigHandler.Set(ConfigurationKeys.MULTI_FIELDS, fields); @@ -310,18 +299,14 @@ public virtual CultureInfo Locale get { var culture = QueryConfigHandler.Get(ConfigurationKeys.LOCALE); - return culture == null ? CultureInfo.CurrentCulture : culture; + return culture ?? CultureInfo.CurrentCulture; } set => QueryConfigHandler.Set(ConfigurationKeys.LOCALE, value); } public virtual TimeZoneInfo TimeZone { - get - { - var timeZone = QueryConfigHandler.Get(ConfigurationKeys.TIMEZONE); - return timeZone == null ? TimeZoneInfo.Local : timeZone; - } + get => QueryConfigHandler.Get(ConfigurationKeys.TIMEZONE) ?? TimeZoneInfo.Local; set => QueryConfigHandler.Set(ConfigurationKeys.TIMEZONE, value); } @@ -348,11 +333,7 @@ public virtual Analyzer Analyzer /// public virtual int PhraseSlop { - get - { - int? phraseSlop = QueryConfigHandler.Get(ConfigurationKeys.PHRASE_SLOP); - return phraseSlop.HasValue ? phraseSlop.Value : 0; - } + get => QueryConfigHandler.Get(ConfigurationKeys.PHRASE_SLOP) ?? 0; set => QueryConfigHandler.Set(ConfigurationKeys.PHRASE_SLOP, value); } diff --git a/src/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj b/src/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj index df15788e89..de5097b7b9 100644 --- a/src/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj +++ b/src/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj @@ -31,7 +31,9 @@ Query parsers and parsing framework for the Lucene.Net full-text search engine library from The Apache Software Foundation. $(PackageTags);query;queryparser bin\$(Configuration)\$(TargetFramework)\$(AssemblyName).xml + $(NoWarn);1591;1573 + $(NoWarn);IDE0060 diff --git a/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs b/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs index 0cb8bace9d..0900084f9c 100644 --- a/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs +++ b/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs @@ -499,9 +499,11 @@ private void BuildQueryTree(State state, Query branch) // this is necessary any time a term, phrase, or subquery is negated if (state.Not % 2 == 1) { - BooleanQuery nq = new BooleanQuery(); - nq.Add(branch, Occur.MUST_NOT); - nq.Add(new MatchAllDocsQuery(), Occur.SHOULD); + BooleanQuery nq = new BooleanQuery + { + { branch, Occur.MUST_NOT }, + { new MatchAllDocsQuery(), Occur.SHOULD } + }; branch = nq; } @@ -525,8 +527,10 @@ private void BuildQueryTree(State state, Query branch) // the proper precedence and the current operation will take over as the top of the tree if (!state.PreviousOperationIsSet || state.PreviousOperation != state.CurrentOperation) { - BooleanQuery bq = new BooleanQuery(); - bq.Add(state.Top, state.CurrentOperation); + BooleanQuery bq = new BooleanQuery + { + { state.Top, state.CurrentOperation } + }; state.Top = bq; } @@ -568,8 +572,7 @@ private int ParseFuzziness(State state) slopLength++; } } - int fuzziness = 0; - int.TryParse(new string(slopText, 0, slopLength), out fuzziness); // LUCENENET TODO: Find a way to pass culture + int.TryParse(new string(slopText, 0, slopLength), out int fuzziness); // LUCENENET TODO: Find a way to pass culture // negative -> 0 if (fuzziness < 0) { diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs index 956411d57b..17c7b6372d 100644 --- a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs +++ b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs @@ -1,6 +1,7 @@ using Lucene.Net.QueryParsers.Surround.Query; using System; using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; using System.IO; #if FEATURE_SERIALIZABLE_EXCEPTIONS using System.Runtime.Serialization; @@ -54,6 +55,8 @@ namespace Lucene.Net.QueryParsers.Surround.Parser /// to two terms may appear between a and b. /// /// + [SuppressMessage("Performance", "CA1822:Mark members as static", Justification = "This class is based on generated code")] + [SuppressMessage("Style", "IDE0028:Collection initialization can be simplified", Justification = "This class is based on generated code")] public class QueryParser { internal readonly int minimumPrefixLength = 3; @@ -546,7 +549,7 @@ public SrndQuery SimpleTerm() public void OptionalWeights(SrndQuery q) { - Token weight = null; + Token weight; // LUCENENET: IDE0059: Remove unnecessary value assignment while (true) { switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) @@ -605,7 +608,8 @@ private bool Jj_3_1() private int jj_la; private int jj_gen; private readonly int[] jj_la1 = new int[10]; - private static int[] jj_la1_0 = new int[] { 0x100, 0x200, 0x400, 0x1000, 0x800, 0x7c3b00, 0x1b00, 0x8000, 0x7c0000, 0x20000, };// LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006) + private static readonly int[] jj_la1_0 = new int[] { // LUCENENET: marked readonly // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006) + 0x100, 0x200, 0x400, 0x1000, 0x800, 0x7c3b00, 0x1b00, 0x8000, 0x7c0000, 0x20000, }; // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006) //static QueryParser() @@ -779,10 +783,10 @@ private int Jj_ntk() return (jj_ntk = Jj_nt.Kind); } - private IList jj_expentries = new List(); + private readonly IList jj_expentries = new List(); // LUCENENET: marked readonly private int[] jj_expentry; private int jj_kind = -1; - private int[] jj_lasttokens = new int[100]; + private readonly int[] jj_lasttokens = new int[100]; // LUCENENET: marked readonly private int jj_endpos; private void Jj_add_error_token(int kind, int pos) diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs index 6e8716b071..af7fea8b6c 100644 --- a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs +++ b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs @@ -24,10 +24,16 @@ namespace Lucene.Net.QueryParsers.Surround.Parser /// /// Token Manager. /// + [SuppressMessage("Performance", "CA1822:Mark members as static", Justification = "This class is based on generated code")] + [SuppressMessage("Style", "IDE0059:Unnecessary assignment of a value", Justification = "This class is based on generated code")] + [SuppressMessage("CodeQuality", "IDE0051:Remove unused private members", Justification = "This class is based on generated code")] + [SuppressMessage("Style", "IDE1006:Naming Styles", Justification = "This class is based on generated code")] public class QueryParserTokenManager //: QueryParserConstants { /// Debug output. +#pragma warning disable IDE0052 // Remove unread private members private TextWriter debugStream; // LUCENENET specific - made private, since we already have a setter +#pragma warning restore IDE0052 // Remove unread private members /// Set debug output. public virtual void SetDebugStream(TextWriter ds) { @@ -51,6 +57,7 @@ private int JjStopAtPos(int pos, int kind) jjmatchedPos = pos; return pos + 1; } + private int jjMoveStringLiteralDfa0_1() { switch (m_curChar) @@ -619,7 +626,7 @@ protected Token JjFillToken() int beginColumn; int endColumn; string im = jjstrLiteralImages[jjmatchedKind]; - curTokenImage = (im == null) ? m_input_stream.Image : im; + curTokenImage = im ?? m_input_stream.Image; beginLine = m_input_stream.BeginLine; beginColumn = m_input_stream.BeginColumn; endLine = m_input_stream.EndLine; diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs b/src/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs index 3e6e92cfde..b5b28b8499 100644 --- a/src/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs +++ b/src/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs @@ -37,16 +37,16 @@ public class TokenMgrError : Exception */ /// Lexical error occurred. - internal static readonly int LEXICAL_ERROR = 0; + internal const int LEXICAL_ERROR = 0; /// An attempt was made to create a second instance of a static token manager. - internal static readonly int STATIC_LEXER_ERROR = 1; + internal const int STATIC_LEXER_ERROR = 1; /// Tried to change to an invalid lexical state. - internal static readonly int INVALID_LEXICAL_STATE = 2; + internal const int INVALID_LEXICAL_STATE = 2; /// Detected (and bailed out of) an infinite loop in the token manager. - internal static readonly int LOOP_DETECTED = 3; + internal const int LOOP_DETECTED = 3; /// Indicates the reason why the exception is thrown. It will have /// one of the above 4 values. diff --git a/src/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs b/src/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs index 28cb8a5a8e..08582940bd 100644 --- a/src/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs +++ b/src/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs @@ -51,7 +51,7 @@ public BasicQueryFactory() { } - private int maxBasicQueries; + private readonly int maxBasicQueries; // LUCENENET: marked readonly private int queriesMade; public virtual int NrQueriesMade => queriesMade; diff --git a/src/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs index 92c8ccda68..56138f938e 100644 --- a/src/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs +++ b/src/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs @@ -26,7 +26,7 @@ namespace Lucene.Net.QueryParsers.Surround.Query /// public abstract class ComposedQuery : SrndQuery { - public ComposedQuery(IList qs, bool operatorInfix, string opName) + protected ComposedQuery(IList qs, bool operatorInfix, string opName) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { Recompose(qs); this.operatorInfix = operatorInfix; diff --git a/src/Lucene.Net.QueryParser/Surround/Query/DistanceQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/DistanceQuery.cs index ffcddf2a1b..018bb5b3c1 100644 --- a/src/Lucene.Net.QueryParser/Surround/Query/DistanceQuery.cs +++ b/src/Lucene.Net.QueryParser/Surround/Query/DistanceQuery.cs @@ -39,10 +39,10 @@ public DistanceQuery( this.ordered = ordered; } - private int opDistance; + private readonly int opDistance; // LUCENENET: marked readonly public virtual int OpDistance => opDistance; - private bool ordered; + private readonly bool ordered; // LUCENENET: marked readonly public virtual bool QueriesOrdered => ordered; diff --git a/src/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs index 60fb8bb1a4..16f8ee5e7c 100644 --- a/src/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs +++ b/src/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs @@ -25,8 +25,8 @@ namespace Lucene.Net.QueryParsers.Surround.Query /// public class FieldsQuery : SrndQuery /* mostly untested */ { - private SrndQuery q; - private IList fieldNames; + private readonly SrndQuery q; // LUCENENET: marked readonly + private readonly IList fieldNames; // LUCENENET: marked readonly private readonly char fieldOp; private readonly string orOperatorName = "OR"; /* for expanded queries, not normally visible */ @@ -40,8 +40,10 @@ public FieldsQuery(SrndQuery q, IList fieldNames, char fieldOp) public FieldsQuery(SrndQuery q, string fieldName, char fieldOp) { this.q = q; - var fieldNameList = new List(); - fieldNameList.Add(fieldName); + var fieldNameList = new List + { + fieldName + }; this.fieldNames = fieldNameList; this.fieldOp = fieldOp; } diff --git a/src/Lucene.Net.QueryParser/Surround/Query/OrQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/OrQuery.cs index 30b883e767..27efa5ef51 100644 --- a/src/Lucene.Net.QueryParser/Surround/Query/OrQuery.cs +++ b/src/Lucene.Net.QueryParser/Surround/Query/OrQuery.cs @@ -43,9 +43,9 @@ public virtual string DistanceSubQueryNotAllowed() while (sqi.MoveNext()) { SrndQuery leq = sqi.Current; - if (leq is IDistanceSubQuery) + if (leq is IDistanceSubQuery distanceSubQuery) { - string m = ((IDistanceSubQuery)leq).DistanceSubQueryNotAllowed(); + string m = distanceSubQuery.DistanceSubQueryNotAllowed(); if (m != null) { return m; diff --git a/src/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs b/src/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs index 023826defa..cbaaad9c3b 100644 --- a/src/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs +++ b/src/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs @@ -27,12 +27,12 @@ namespace Lucene.Net.QueryParsers.Surround.Query /// public abstract class SimpleTerm : SrndQuery, IDistanceSubQuery, IComparable { - public SimpleTerm(bool q) + protected SimpleTerm(bool q) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { quoted = q; } - private bool quoted; + private readonly bool quoted; // LUCENENET: marked readonly internal bool IsQuoted => quoted; public virtual string Quote => "\""; diff --git a/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs b/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs index c01e8946fe..abded9ce14 100644 --- a/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs +++ b/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs @@ -68,10 +68,10 @@ public SpanNearClauseFactory(IndexReader reader, string fieldName, BasicQueryFac this.qf = qf; } - private IndexReader reader; - private string fieldName; - private IDictionary weightBySpanQuery; - private BasicQueryFactory qf; + private readonly IndexReader reader; // LUCENENET: marked readonly + private readonly string fieldName; // LUCENENET: marked readonly + private readonly IDictionary weightBySpanQuery; // LUCENENET: marked readonly + private readonly BasicQueryFactory qf; // LUCENENET: marked readonly public virtual IndexReader IndexReader => reader; diff --git a/src/Lucene.Net.QueryParser/Surround/Query/SrndQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/SrndQuery.cs index 15ef179c20..f73de2f890 100644 --- a/src/Lucene.Net.QueryParser/Surround/Query/SrndQuery.cs +++ b/src/Lucene.Net.QueryParser/Surround/Query/SrndQuery.cs @@ -86,16 +86,7 @@ public virtual Search.Query MakeLuceneQueryField(string fieldName, BasicQueryFac /// public virtual object Clone() { - object clone = null; - try - { - clone = base.MemberwiseClone(); - } - catch (Exception e) - { - throw new InvalidOperationException(e.Message, e); // shouldn't happen - } - return clone; + return MemberwiseClone(); // LUCENENET: never throws in .NET } /// diff --git a/src/Lucene.Net.QueryParser/Xml/Builders/BoostingQueryBuilder.cs b/src/Lucene.Net.QueryParser/Xml/Builders/BoostingQueryBuilder.cs index 96f4bcd859..82700776d6 100644 --- a/src/Lucene.Net.QueryParser/Xml/Builders/BoostingQueryBuilder.cs +++ b/src/Lucene.Net.QueryParser/Xml/Builders/BoostingQueryBuilder.cs @@ -26,7 +26,7 @@ namespace Lucene.Net.QueryParsers.Xml.Builders /// public class BoostingQueryBuilder : IQueryBuilder { - private static float DEFAULT_BOOST = 0.01f; + private const float DEFAULT_BOOST = 0.01f; private readonly IQueryBuilder factory; diff --git a/src/Lucene.Net.QueryParser/Xml/Builders/CachedFilterBuilder.cs b/src/Lucene.Net.QueryParser/Xml/Builders/CachedFilterBuilder.cs index 63281b1131..825161c829 100644 --- a/src/Lucene.Net.QueryParser/Xml/Builders/CachedFilterBuilder.cs +++ b/src/Lucene.Net.QueryParser/Xml/Builders/CachedFilterBuilder.cs @@ -81,8 +81,7 @@ public virtual Filter GetFilter(XmlElement e) f = filterFactory.GetFilter(childElement); cacheKey = f; } - Filter cachedFilter; - if (filterCache.TryGetValue(cacheKey, out cachedFilter) && cachedFilter != null) + if (filterCache.TryGetValue(cacheKey, out Filter cachedFilter) && cachedFilter != null) { return cachedFilter; // cache hit } diff --git a/src/Lucene.Net.QueryParser/Xml/Builders/DisjunctionMaxQueryBuilder.cs b/src/Lucene.Net.QueryParser/Xml/Builders/DisjunctionMaxQueryBuilder.cs index 399a5290e7..113d735857 100644 --- a/src/Lucene.Net.QueryParser/Xml/Builders/DisjunctionMaxQueryBuilder.cs +++ b/src/Lucene.Net.QueryParser/Xml/Builders/DisjunctionMaxQueryBuilder.cs @@ -46,9 +46,8 @@ public virtual Query GetQuery(XmlElement e) for (int i = 0; i < nl.Count; i++) { XmlNode node = nl.Item(i); - if (node is XmlElement) + if (node is XmlElement queryElem) { // all elements are disjuncts. - XmlElement queryElem = (XmlElement)node; Query q = factory.GetQuery(queryElem); dq.Add(q); } diff --git a/src/Lucene.Net.QueryParser/Xml/Builders/FuzzyLikeThisQueryBuilder.cs b/src/Lucene.Net.QueryParser/Xml/Builders/FuzzyLikeThisQueryBuilder.cs index dc96dc4649..1bdaeb98ae 100644 --- a/src/Lucene.Net.QueryParser/Xml/Builders/FuzzyLikeThisQueryBuilder.cs +++ b/src/Lucene.Net.QueryParser/Xml/Builders/FuzzyLikeThisQueryBuilder.cs @@ -27,12 +27,12 @@ namespace Lucene.Net.QueryParsers.Xml.Builders /// public class FuzzyLikeThisQueryBuilder : IQueryBuilder { - private static readonly int DEFAULT_MAX_NUM_TERMS = 50; + private const int DEFAULT_MAX_NUM_TERMS = 50; #pragma warning disable 612, 618 - private static readonly float DEFAULT_MIN_SIMILARITY = SlowFuzzyQuery.defaultMinSimilarity; + private const float DEFAULT_MIN_SIMILARITY = SlowFuzzyQuery.defaultMinSimilarity; #pragma warning restore 612, 618 - private static readonly int DEFAULT_PREFIX_LENGTH = 1; - private static readonly bool DEFAULT_IGNORE_TF = false; + private const int DEFAULT_PREFIX_LENGTH = 1; + private const bool DEFAULT_IGNORE_TF = false; private readonly Analyzer analyzer; diff --git a/src/Lucene.Net.QueryParser/Xml/Builders/LikeThisQueryBuilder.cs b/src/Lucene.Net.QueryParser/Xml/Builders/LikeThisQueryBuilder.cs index 15e6a7c548..b576373725 100644 --- a/src/Lucene.Net.QueryParser/Xml/Builders/LikeThisQueryBuilder.cs +++ b/src/Lucene.Net.QueryParser/Xml/Builders/LikeThisQueryBuilder.cs @@ -34,9 +34,9 @@ namespace Lucene.Net.QueryParsers.Xml.Builders /// public class LikeThisQueryBuilder : IQueryBuilder { - private static readonly int DEFAULT_MAX_QUERY_TERMS = 20; - private static readonly int DEFAULT_MIN_TERM_FREQUENCY = 1; - private static readonly float DEFAULT_PERCENT_TERMS_TO_MATCH = 30; //default is a 3rd of selected terms must match + private const int DEFAULT_MAX_QUERY_TERMS = 20; + private const int DEFAULT_MIN_TERM_FREQUENCY = 1; + private const float DEFAULT_PERCENT_TERMS_TO_MATCH = 30; //default is a 3rd of selected terms must match private readonly Analyzer analyzer; private readonly string[] defaultFieldNames; diff --git a/src/Lucene.Net.QueryParser/Xml/Builders/SpanQueryBuilderFactory.cs b/src/Lucene.Net.QueryParser/Xml/Builders/SpanQueryBuilderFactory.cs index b3fd8fd562..26c7daf936 100644 --- a/src/Lucene.Net.QueryParser/Xml/Builders/SpanQueryBuilderFactory.cs +++ b/src/Lucene.Net.QueryParser/Xml/Builders/SpanQueryBuilderFactory.cs @@ -41,8 +41,7 @@ public virtual void AddBuilder(string nodeName, ISpanQueryBuilder builder) public virtual SpanQuery GetSpanQuery(XmlElement e) { - ISpanQueryBuilder builder; - if (!builders.TryGetValue(e.Name, out builder) || builder == null) + if (!builders.TryGetValue(e.Name, out ISpanQueryBuilder builder) || builder == null) { throw new ParserException("No SpanQueryObjectBuilder defined for node " + e.Name); } diff --git a/src/Lucene.Net.QueryParser/Xml/Builders/UserInputQueryBuilder.cs b/src/Lucene.Net.QueryParser/Xml/Builders/UserInputQueryBuilder.cs index 40d34e7a13..21eb6ff053 100644 --- a/src/Lucene.Net.QueryParser/Xml/Builders/UserInputQueryBuilder.cs +++ b/src/Lucene.Net.QueryParser/Xml/Builders/UserInputQueryBuilder.cs @@ -31,9 +31,9 @@ namespace Lucene.Net.QueryParsers.Xml.Builders /// public class UserInputQueryBuilder : IQueryBuilder { - private QueryParser unSafeParser; - private Analyzer analyzer; - private string defaultField; + private readonly QueryParser unSafeParser; // LUCENENET: marked readonly + private readonly Analyzer analyzer; // LUCENENET: marked readonly + private readonly string defaultField; // LUCENENET: marked readonly /// /// This constructor has the disadvantage of not being able to change choice of default field name diff --git a/src/Lucene.Net.QueryParser/Xml/DOMUtils.cs b/src/Lucene.Net.QueryParser/Xml/DOMUtils.cs index 364f61d0b3..0cff1f937f 100644 --- a/src/Lucene.Net.QueryParser/Xml/DOMUtils.cs +++ b/src/Lucene.Net.QueryParser/Xml/DOMUtils.cs @@ -26,7 +26,7 @@ namespace Lucene.Net.QueryParsers.Xml /// /// Helper methods for parsing XML /// - public class DOMUtils + public static class DOMUtils // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { public static XmlElement GetChildByTagOrFail(XmlElement e, string name) { @@ -113,9 +113,8 @@ public static string GetAttributeWithInheritance(XmlElement element, string attr { return null; } - if (n is XmlElement) + if (n is XmlElement parent) { - XmlElement parent = (XmlElement)n; return GetAttributeWithInheritance(parent, attributeName); } return null; //we reached the top level of the document without finding attribute diff --git a/src/Lucene.Net.QueryParser/Xml/FilterBuilderFactory.cs b/src/Lucene.Net.QueryParser/Xml/FilterBuilderFactory.cs index 853599caca..b2da0ae90f 100644 --- a/src/Lucene.Net.QueryParser/Xml/FilterBuilderFactory.cs +++ b/src/Lucene.Net.QueryParser/Xml/FilterBuilderFactory.cs @@ -26,12 +26,11 @@ namespace Lucene.Net.QueryParsers.Xml /// public class FilterBuilderFactory : IFilterBuilder { - IDictionary builders = new Dictionary(); + private readonly IDictionary builders = new Dictionary(); // LUCENENET: marked readonly public virtual Filter GetFilter(XmlElement n) { - IFilterBuilder builder; - if (!builders.TryGetValue(n.Name, out builder) || builder == null) + if (!builders.TryGetValue(n.Name, out IFilterBuilder builder) || builder == null) { throw new ParserException("No FilterBuilder defined for node " + n.Name); } @@ -45,8 +44,7 @@ public virtual void AddBuilder(string nodeName, IFilterBuilder builder) public virtual IFilterBuilder GetFilterBuilder(string nodeName) { - IFilterBuilder result; - builders.TryGetValue(nodeName, out result); + builders.TryGetValue(nodeName, out IFilterBuilder result); return result; } } diff --git a/src/Lucene.Net.QueryParser/Xml/QueryBuilderFactory.cs b/src/Lucene.Net.QueryParser/Xml/QueryBuilderFactory.cs index 5972de184e..8dd2184cba 100644 --- a/src/Lucene.Net.QueryParser/Xml/QueryBuilderFactory.cs +++ b/src/Lucene.Net.QueryParser/Xml/QueryBuilderFactory.cs @@ -26,12 +26,11 @@ namespace Lucene.Net.QueryParsers.Xml /// public class QueryBuilderFactory : IQueryBuilder { - IDictionary builders = new Dictionary(); + private readonly IDictionary builders = new Dictionary(); // LUCENENET: marked readonly public virtual Query GetQuery(XmlElement n) { - IQueryBuilder builder; - if (!builders.TryGetValue(n.Name, out builder) || builder == null) + if (!builders.TryGetValue(n.Name, out IQueryBuilder builder) || builder == null) { throw new ParserException("No QueryObjectBuilder defined for node " + n.Name); } @@ -45,8 +44,7 @@ public virtual void AddBuilder(string nodeName, IQueryBuilder builder) public virtual IQueryBuilder GetQueryBuilder(string nodeName) { - IQueryBuilder result; - builders.TryGetValue(nodeName, out result); + builders.TryGetValue(nodeName, out IQueryBuilder result); return result; } } diff --git a/src/Lucene.Net.QueryParser/Xml/QueryTemplateManager.cs b/src/Lucene.Net.QueryParser/Xml/QueryTemplateManager.cs index 243b2deb2c..0299dd7bd0 100644 --- a/src/Lucene.Net.QueryParser/Xml/QueryTemplateManager.cs +++ b/src/Lucene.Net.QueryParser/Xml/QueryTemplateManager.cs @@ -31,14 +31,11 @@ namespace Lucene.Net.QueryParsers.Xml /// be easily changed/optimized by a DBA. /// The static methods can be used on their own or by creating an instance of this class you can store and /// re-use compiled stylesheets for fast use (e.g. in a server environment) - /// - /// LUCENENET (.NET Core): This is not compiled this because .NET Standard - /// does not currently support XSL Transform. /// public class QueryTemplateManager { - IDictionary compiledTemplatesCache = new Dictionary(); - XslCompiledTransform defaultCompiledTemplates = null; + private readonly IDictionary compiledTemplatesCache = new Dictionary(); // LUCENENET: marked readonly + private XslCompiledTransform defaultCompiledTemplates; public QueryTemplateManager() { @@ -87,14 +84,10 @@ public virtual XmlDocument GetQueryAsDOM(IDictionary formPropert public static string GetQueryAsXmlString(IDictionary formProperties, XslCompiledTransform template) { // TODO: Suppress XML header with encoding (as Strings have no encoding) - using (var stream = new MemoryStream()) - { - TransformCriteria(formProperties, template, stream); - using (StreamReader reader = new StreamReader(stream)) - { - return reader.ReadToEnd(); - } - } + using var stream = new MemoryStream(); + TransformCriteria(formProperties, template, stream); + using StreamReader reader = new StreamReader(stream); + return reader.ReadToEnd(); } /// @@ -103,14 +96,10 @@ public static string GetQueryAsXmlString(IDictionary formPropert public static string GetQueryAsXmlString(IDictionary formProperties, Stream xslIs) { // TODO: Suppress XML header with encoding (as Strings have no encoding) - using (var stream = new MemoryStream()) - { - TransformCriteria(formProperties, xslIs, stream); - using (StreamReader reader = new StreamReader(stream)) - { - return reader.ReadToEnd(); - } - } + using var stream = new MemoryStream(); + TransformCriteria(formProperties, xslIs, stream); + using StreamReader reader = new StreamReader(stream); + return reader.ReadToEnd(); } /// @@ -184,12 +173,10 @@ public static void TransformCriteria(IDictionary formProperties, /// public static XslCompiledTransform GetTemplates(Stream xslIs) { - using (var reader = XmlReader.Create(xslIs)) - { - XslCompiledTransform xslt = new XslCompiledTransform(); - xslt.Load(reader); - return xslt; - } + using var reader = XmlReader.Create(xslIs); + XslCompiledTransform xslt = new XslCompiledTransform(); + xslt.Load(reader); + return xslt; } } } diff --git a/src/Lucene.Net.Replicator/Http/HttpReplicator.cs b/src/Lucene.Net.Replicator/Http/HttpReplicator.cs index 61b1104833..fa50fe3dc8 100644 --- a/src/Lucene.Net.Replicator/Http/HttpReplicator.cs +++ b/src/Lucene.Net.Replicator/Http/HttpReplicator.cs @@ -71,10 +71,8 @@ public virtual SessionToken CheckForUpdate(string currentVersion) HttpResponseMessage response = base.ExecuteGet(ReplicationService.ReplicationAction.UPDATE.ToString(), parameters); return DoAction(response, () => { - using (DataInputStream inputStream = new DataInputStream(ResponseInputStream(response))) - { - return inputStream.ReadByte() == 0 ? null : new SessionToken(inputStream); - } + using DataInputStream inputStream = new DataInputStream(ResponseInputStream(response)); + return inputStream.ReadByte() == 0 ? null : new SessionToken(inputStream); }); } diff --git a/src/Lucene.Net.Replicator/Http/ReplicationService.cs b/src/Lucene.Net.Replicator/Http/ReplicationService.cs index f89c94cce5..90e20cdafe 100644 --- a/src/Lucene.Net.Replicator/Http/ReplicationService.cs +++ b/src/Lucene.Net.Replicator/Http/ReplicationService.cs @@ -139,14 +139,12 @@ public virtual void Perform(IReplicationRequest request, IReplicationResponse re throw new InvalidOperationException("invalid path, must contain shard ID and action, e.g. */s1/update"); } - ReplicationAction action; - if (!Enum.TryParse(pathElements[ACTION_IDX], true, out action)) + if (!Enum.TryParse(pathElements[ACTION_IDX], true, out ReplicationAction action)) { throw new InvalidOperationException("Unsupported action provided: " + pathElements[ACTION_IDX]); } - IReplicator replicator; - if (!replicators.TryGetValue(pathElements[SHARD_IDX], out replicator)) + if (!replicators.TryGetValue(pathElements[SHARD_IDX], out IReplicator replicator)) { throw new InvalidOperationException("unrecognized shard ID " + pathElements[SHARD_IDX]); } diff --git a/src/Lucene.Net.Replicator/IndexAndTaxonomyRevision.cs b/src/Lucene.Net.Replicator/IndexAndTaxonomyRevision.cs index 6646742c95..2d845a3bb7 100644 --- a/src/Lucene.Net.Replicator/IndexAndTaxonomyRevision.cs +++ b/src/Lucene.Net.Replicator/IndexAndTaxonomyRevision.cs @@ -51,7 +51,7 @@ public class IndexAndTaxonomyRevision : IRevision public class SnapshotDirectoryTaxonomyWriter : DirectoryTaxonomyWriter { private SnapshotDeletionPolicy sdp; - private IndexWriter writer; + private IndexWriter writer; // LUCENENET TODO: Why does disposing this in Dispose(true) throw an excpetion? /// /// @@ -136,7 +136,7 @@ public IndexAndTaxonomyRevision(IndexWriter indexWriter, SnapshotDirectoryTaxono { this.indexSdp = indexWriter.Config.IndexDeletionPolicy as SnapshotDeletionPolicy; if (indexSdp == null) - throw new ArgumentException("IndexWriter must be created with SnapshotDeletionPolicy", "indexWriter"); + throw new ArgumentException("IndexWriter must be created with SnapshotDeletionPolicy", nameof(indexWriter)); this.indexWriter = indexWriter; this.taxonomyWriter = taxonomyWriter; @@ -170,11 +170,10 @@ public virtual int CompareTo(string version) public virtual int CompareTo(IRevision other) { if (other == null) - throw new ArgumentNullException("other"); + throw new ArgumentNullException(nameof(other)); - IndexAndTaxonomyRevision itr = other as IndexAndTaxonomyRevision; - if(itr == null) - throw new ArgumentException(string.Format("Cannot compare IndexAndTaxonomyRevision to a {0}", other.GetType()), "other"); + if (!(other is IndexAndTaxonomyRevision itr)) + throw new ArgumentException($"Cannot compare IndexAndTaxonomyRevision to a {other.GetType()}", nameof(other)); int cmp = indexCommit.CompareTo(itr.indexCommit); return cmp != 0 ? cmp : taxonomyCommit.CompareTo(itr.taxonomyCommit); diff --git a/src/Lucene.Net.Replicator/IndexReplicationHandler.cs b/src/Lucene.Net.Replicator/IndexReplicationHandler.cs index 9ceb9b0981..f9b8438580 100644 --- a/src/Lucene.Net.Replicator/IndexReplicationHandler.cs +++ b/src/Lucene.Net.Replicator/IndexReplicationHandler.cs @@ -168,8 +168,10 @@ public static void CleanupOldIndexFiles(Directory directory, string segmentsFile if (commit != null && commit.SegmentsFileName.Equals(segmentsFile, StringComparison.Ordinal)) { - ISet commitFiles = new JCG.HashSet(commit.FileNames); - commitFiles.Add(IndexFileNames.SEGMENTS_GEN); + ISet commitFiles = new JCG.HashSet(commit.FileNames) + { + IndexFileNames.SEGMENTS_GEN + }; Regex matcher = IndexFileNames.CODEC_FILE_PATTERN; foreach (string file in directory.ListAll()) diff --git a/src/Lucene.Net.Replicator/IndexRevision.cs b/src/Lucene.Net.Replicator/IndexRevision.cs index 58d6a7b61f..166a84bafe 100644 --- a/src/Lucene.Net.Replicator/IndexRevision.cs +++ b/src/Lucene.Net.Replicator/IndexRevision.cs @@ -103,7 +103,7 @@ public IndexRevision(IndexWriter writer) { sdp = writer.Config.IndexDeletionPolicy as SnapshotDeletionPolicy; if (sdp == null) - throw new ArgumentException("IndexWriter must be created with SnapshotDeletionPolicy", "writer"); + throw new ArgumentException("IndexWriter must be created with SnapshotDeletionPolicy", nameof(writer)); this.writer = writer; this.commit = sdp.Snapshot(); diff --git a/src/Lucene.Net.Replicator/LocalReplicator.cs b/src/Lucene.Net.Replicator/LocalReplicator.cs index 6a06852eaa..82e95ab0b5 100644 --- a/src/Lucene.Net.Replicator/LocalReplicator.cs +++ b/src/Lucene.Net.Replicator/LocalReplicator.cs @@ -144,10 +144,9 @@ private void CheckExpiredSessions() /// private void ReleaseSession(string sessionId) { - ReplicationSession session; // if we're called concurrently by close() and release(), could be that one // thread beats the other to release the session. - if (sessions.TryGetValue(sessionId, out session)) + if (sessions.TryGetValue(sessionId, out ReplicationSession session)) { sessions.Remove(sessionId); session.Revision.DecRef(); @@ -237,8 +236,7 @@ public virtual Stream ObtainFile(string sessionId, string source, string fileNam { EnsureOpen(); - ReplicationSession session; - if (sessions.TryGetValue(sessionId, out session) && session != null && session.IsExpired(ExpirationThreshold)) + if (sessions.TryGetValue(sessionId, out ReplicationSession session) && session != null && session.IsExpired(ExpirationThreshold)) { ReleaseSession(sessionId); session = null; @@ -272,7 +270,7 @@ public virtual void Publish(IRevision revision) if (compare < 0) { revision.Release(); - throw new ArgumentException(string.Format("Cannot publish an older revision: rev={0} current={1}", revision, currentRevision), "revision"); + throw new ArgumentException(string.Format("Cannot publish an older revision: rev={0} current={1}", revision, currentRevision), nameof(revision)); } } diff --git a/src/Lucene.Net.Replicator/PerSessionDirectoryFactory.cs b/src/Lucene.Net.Replicator/PerSessionDirectoryFactory.cs index 74f86cbb21..149ce8701f 100644 --- a/src/Lucene.Net.Replicator/PerSessionDirectoryFactory.cs +++ b/src/Lucene.Net.Replicator/PerSessionDirectoryFactory.cs @@ -49,7 +49,7 @@ public virtual Directory GetDirectory(string sessionId, string source) public virtual void CleanupSession(string sessionId) { - if (string.IsNullOrEmpty(sessionId)) throw new ArgumentException("sessionID cannot be empty", "sessionId"); + if (string.IsNullOrEmpty(sessionId)) throw new ArgumentException("sessionID cannot be empty", nameof(sessionId)); string sessionDirectory = Path.Combine(workingDirectory, sessionId); System.IO.Directory.Delete(sessionDirectory, true); diff --git a/src/Lucene.Net.Replicator/ReplicationClient.cs b/src/Lucene.Net.Replicator/ReplicationClient.cs index 902b1f9307..c6703f79ec 100644 --- a/src/Lucene.Net.Replicator/ReplicationClient.cs +++ b/src/Lucene.Net.Replicator/ReplicationClient.cs @@ -390,6 +390,7 @@ protected virtual void Dispose(bool disposing) return; StopUpdateThread(); + infoStream.Dispose(); // LUCENENET specific disposed = true; } diff --git a/src/Lucene.Net.Replicator/RevisionFile.cs b/src/Lucene.Net.Replicator/RevisionFile.cs index 3fd163317c..4d5c563951 100644 --- a/src/Lucene.Net.Replicator/RevisionFile.cs +++ b/src/Lucene.Net.Replicator/RevisionFile.cs @@ -45,7 +45,7 @@ public class RevisionFile : IEquatable /// Optional, the length of the file. public RevisionFile(string fileName, long length = -1) { - if (string.IsNullOrEmpty(fileName)) throw new ArgumentException("fileName must not be null or empty", "fileName"); + if (string.IsNullOrEmpty(fileName)) throw new ArgumentException("fileName must not be null or empty", nameof(fileName)); FileName = fileName; Length = length; @@ -53,7 +53,7 @@ public RevisionFile(string fileName, long length = -1) public override bool Equals(object obj) { - if (ReferenceEquals(null, obj)) return false; + if (obj is null) return false; if (ReferenceEquals(this, obj)) return true; if (obj.GetType() != this.GetType()) return false; return Equals((RevisionFile)obj); @@ -62,7 +62,7 @@ public override bool Equals(object obj) // LUCENENET specific Equals overload public virtual bool Equals(RevisionFile other) { - if (ReferenceEquals(null, other)) return false; + if (other is null) return false; if (ReferenceEquals(this, other)) return true; return string.Equals(FileName, other.FileName, StringComparison.Ordinal) && Length == other.Length; } diff --git a/src/Lucene.Net.Sandbox/Queries/FuzzyLikeThisQuery.cs b/src/Lucene.Net.Sandbox/Queries/FuzzyLikeThisQuery.cs index bb7108e03a..93ac28f492 100644 --- a/src/Lucene.Net.Sandbox/Queries/FuzzyLikeThisQuery.cs +++ b/src/Lucene.Net.Sandbox/Queries/FuzzyLikeThisQuery.cs @@ -298,8 +298,7 @@ public override Query Rewrite(IndexReader reader) ScoreTerm st = q.Pop(); //List l = variantQueries.get(st.fuzziedSourceTerm); // if(l==null) - List l; - if (!variantQueries.TryGetValue(st.FuzziedSourceTerm, out l) || l == null) + if (!variantQueries.TryGetValue(st.FuzziedSourceTerm, out List l) || l == null) { l = new List(); variantQueries[st.FuzziedSourceTerm] = l; diff --git a/src/Lucene.Net.Sandbox/Queries/SlowFuzzyQuery.cs b/src/Lucene.Net.Sandbox/Queries/SlowFuzzyQuery.cs index 0e2e753e88..5b935cc49d 100644 --- a/src/Lucene.Net.Sandbox/Queries/SlowFuzzyQuery.cs +++ b/src/Lucene.Net.Sandbox/Queries/SlowFuzzyQuery.cs @@ -37,13 +37,13 @@ namespace Lucene.Net.Sandbox.Queries [Obsolete("Use FuzzyQuery instead.")] public class SlowFuzzyQuery : MultiTermQuery { - public readonly static float defaultMinSimilarity = LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE; - public readonly static int defaultPrefixLength = 0; - public readonly static int defaultMaxExpansions = 50; + public const float defaultMinSimilarity = LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE; + public const int defaultPrefixLength = 0; + public const int defaultMaxExpansions = 50; - private float minimumSimilarity; - private int prefixLength; - private bool termLongEnough = false; + private readonly float minimumSimilarity; // LUCENENET: marked readonly + private readonly int prefixLength; // LUCENENET: marked readonly + private readonly bool termLongEnough = false; // LUCENENET: marked readonly protected Term m_term; diff --git a/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs b/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs index 34f2783be8..38a512368a 100644 --- a/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs +++ b/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs @@ -147,13 +147,11 @@ public override object MissingValue internal class TermOrdValComparerAnonymousHelper : FieldComparer.TermOrdValComparer { private readonly SortedSetSortField outerInstance; - private readonly int numHits; public TermOrdValComparerAnonymousHelper(SortedSetSortField outerInstance, int numHits) : base(numHits, outerInstance.Field, outerInstance.m_missingValue == STRING_LAST) { this.outerInstance = outerInstance; - this.numHits = numHits; } protected override SortedDocValues GetSortedDocValues(AtomicReaderContext context, string field) diff --git a/src/Lucene.Net.Spatial/Prefix/AbstractPrefixTreeFilter.cs b/src/Lucene.Net.Spatial/Prefix/AbstractPrefixTreeFilter.cs index 69195e894e..f49f841354 100644 --- a/src/Lucene.Net.Spatial/Prefix/AbstractPrefixTreeFilter.cs +++ b/src/Lucene.Net.Spatial/Prefix/AbstractPrefixTreeFilter.cs @@ -36,8 +36,8 @@ public abstract class AbstractPrefixTreeFilter : Filter protected internal readonly string m_fieldName; protected internal readonly SpatialPrefixTree m_grid;//not in equals/hashCode since it's implied for a specific field protected internal readonly int m_detailLevel; - - public AbstractPrefixTreeFilter(IShape queryShape, string fieldName, SpatialPrefixTree grid, int detailLevel) + + protected AbstractPrefixTreeFilter(IShape queryShape, string fieldName, SpatialPrefixTree grid, int detailLevel) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.m_queryShape = queryShape; this.m_fieldName = fieldName; @@ -94,8 +94,8 @@ public abstract class BaseTermsEnumTraverser protected TermsEnum m_termsEnum;//remember to check for null in getDocIdSet protected DocsEnum m_docsEnum; - - public BaseTermsEnumTraverser(AbstractPrefixTreeFilter outerInstance, AtomicReaderContext context, IBits acceptDocs) + + protected BaseTermsEnumTraverser(AbstractPrefixTreeFilter outerInstance, AtomicReaderContext context, IBits acceptDocs) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.m_outerInstance = outerInstance; diff --git a/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs b/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs index be6b00cfd3..ac98287fdb 100644 --- a/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs +++ b/src/Lucene.Net.Spatial/Prefix/AbstractVisitingPrefixTreeFilter.cs @@ -47,8 +47,8 @@ public abstract class AbstractVisitingPrefixTreeFilter : AbstractPrefixTreeFilte protected readonly int m_prefixGridScanLevel;//at least one less than grid.getMaxLevels() - public AbstractVisitingPrefixTreeFilter(IShape queryShape, string fieldName, SpatialPrefixTree grid, - int detailLevel, int prefixGridScanLevel) + protected AbstractVisitingPrefixTreeFilter(IShape queryShape, string fieldName, SpatialPrefixTree grid, + int detailLevel, int prefixGridScanLevel) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : base(queryShape, fieldName, grid, detailLevel) { this.m_prefixGridScanLevel = Math.Max(0, Math.Min(prefixGridScanLevel, grid.MaxLevels - 1)); @@ -121,13 +121,13 @@ will be invoked when multiple segments are involved. protected readonly bool m_hasIndexedLeaves;//if false then we can skip looking for them private VNode curVNode;//current pointer, derived from query shape - private BytesRef curVNodeTerm = new BytesRef();//curVNode.cell's term. + private readonly BytesRef curVNodeTerm = new BytesRef();//curVNode.cell's term. // LUCENENET: marked readonly private Cell scanCell; private BytesRef thisTerm; //the result of termsEnum.term() - public VisitorTemplate(AbstractVisitingPrefixTreeFilter outerInstance, AtomicReaderContext context, IBits acceptDocs, - bool hasIndexedLeaves) + protected VisitorTemplate(AbstractVisitingPrefixTreeFilter outerInstance, AtomicReaderContext context, IBits acceptDocs, + bool hasIndexedLeaves) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : base(outerInstance, context, acceptDocs) { this.m_hasIndexedLeaves = hasIndexedLeaves; @@ -294,7 +294,7 @@ private void AddIntersectingChildren() { return;//not expected } - curVNode.children = new VNodeCellIterator(this, subCellsIter, new VNode(curVNode)); + curVNode.children = new VNodeCellIterator(subCellsIter, new VNode(curVNode)); } else { @@ -360,15 +360,12 @@ protected internal virtual void Scan(int scanDetailLevel) /// private class VNodeCellIterator : IEnumerator { - private readonly VisitorTemplate outerInstance; - internal readonly IEnumerator cellIter; private readonly VNode vNode; private bool first = true; - internal VNodeCellIterator(VisitorTemplate outerInstance, IEnumerator cellIter, VNode vNode) + internal VNodeCellIterator(IEnumerator cellIter, VNode vNode) { - this.outerInstance = outerInstance; //term loop this.cellIter = cellIter; this.vNode = vNode; diff --git a/src/Lucene.Net.Spatial/Prefix/PrefixTreeStrategy.cs b/src/Lucene.Net.Spatial/Prefix/PrefixTreeStrategy.cs index 013ce41bb1..5bee73ac93 100644 --- a/src/Lucene.Net.Spatial/Prefix/PrefixTreeStrategy.cs +++ b/src/Lucene.Net.Spatial/Prefix/PrefixTreeStrategy.cs @@ -81,7 +81,7 @@ public abstract class PrefixTreeStrategy : SpatialStrategy protected int m_defaultFieldValuesArrayLen = 2; protected double m_distErrPct = SpatialArgs.DEFAULT_DISTERRPCT;// [ 0 TO 0.5 ] - public PrefixTreeStrategy(SpatialPrefixTree grid, string fieldName, bool simplifyIndexedCells) + protected PrefixTreeStrategy(SpatialPrefixTree grid, string fieldName, bool simplifyIndexedCells) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : base(grid.SpatialContext, fieldName) { this.m_grid = grid; @@ -194,6 +194,30 @@ public override bool IncrementToken() } return false; } + + /// + /// Releases resources used by the and + /// if overridden in a derived class, optionally releases unmanaged resources. + /// + /// true to release both managed and unmanaged resources; + /// false to release only unmanaged resources. + + // LUCENENET specific + protected override void Dispose(bool disposing) + { + try + { + if (disposing) + { + iter?.Dispose(); // LUCENENET specific - dispose iter and set to null + iter = null; + } + } + finally + { + base.Dispose(disposing); + } + } } public override ValueSource MakeDistanceValueSource(IPoint queryPoint, double multiplier) diff --git a/src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs b/src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs index 7ae545e14d..3acf138aaa 100644 --- a/src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs +++ b/src/Lucene.Net.Spatial/Prefix/Tree/Cell.cs @@ -85,7 +85,7 @@ protected Cell(SpatialPrefixTree outerInstance, string token) } if (Level == 0) { - var x = Shape;//ensure any lazy instantiation completes to make this threadsafe + var _ = Shape;//ensure any lazy instantiation completes to make this threadsafe } } @@ -194,9 +194,9 @@ public virtual byte[] GetTokenBytes() public virtual ICollection GetSubCells(IShape shapeFilter) { //Note: Higher-performing subclasses might override to consider the shape filter to generate fewer cells. - if (shapeFilter is IPoint) + if (shapeFilter is IPoint point) { - Cell subCell = GetSubCell((IPoint)shapeFilter); + Cell subCell = GetSubCell(point); subCell.m_shapeRel = SpatialRelation.CONTAINS; return new ReadOnlyCollection(new[] { subCell }); } @@ -269,8 +269,8 @@ public virtual int CompareTo(Cell o) public override bool Equals(object obj) { - return !(obj == null || !(obj is Cell)) && - TokenString.Equals(((Cell)obj).TokenString, StringComparison.Ordinal); + return !(obj == null || !(obj is Cell cell)) && + TokenString.Equals(cell.TokenString, StringComparison.Ordinal); } public override int GetHashCode() diff --git a/src/Lucene.Net.Spatial/Prefix/Tree/GeohashPrefixTree.cs b/src/Lucene.Net.Spatial/Prefix/Tree/GeohashPrefixTree.cs index 44460c2307..957653c0cb 100644 --- a/src/Lucene.Net.Spatial/Prefix/Tree/GeohashPrefixTree.cs +++ b/src/Lucene.Net.Spatial/Prefix/Tree/GeohashPrefixTree.cs @@ -48,7 +48,7 @@ protected internal override int GetLevelForDistance(double degrees) protected internal override SpatialPrefixTree NewSPT() { - return new GeohashPrefixTree(m_ctx, m_maxLevels.HasValue ? m_maxLevels.Value : GeohashPrefixTree.MaxLevelsPossible); + return new GeohashPrefixTree(m_ctx, m_maxLevels ?? GeohashPrefixTree.MaxLevelsPossible); } } diff --git a/src/Lucene.Net.Spatial/Prefix/Tree/QuadPrefixTree.cs b/src/Lucene.Net.Spatial/Prefix/Tree/QuadPrefixTree.cs index a24056c36a..b69a180b73 100644 --- a/src/Lucene.Net.Spatial/Prefix/Tree/QuadPrefixTree.cs +++ b/src/Lucene.Net.Spatial/Prefix/Tree/QuadPrefixTree.cs @@ -50,7 +50,7 @@ protected internal override int GetLevelForDistance(double degrees) protected internal override SpatialPrefixTree NewSPT() { - return new QuadPrefixTree(m_ctx, m_maxLevels.HasValue ? m_maxLevels.Value : MAX_LEVELS_POSSIBLE); + return new QuadPrefixTree(m_ctx, m_maxLevels ?? MAX_LEVELS_POSSIBLE); } } @@ -261,12 +261,13 @@ public override void Reset(byte[] bytes, int off, int len) protected internal override ICollection GetSubCells() { QuadPrefixTree outerInstance = (QuadPrefixTree)this.m_outerInstance; - IList cells = new List(4); - cells.Add(new QuadCell(outerInstance, TokenString + "A")); - cells.Add(new QuadCell(outerInstance, TokenString + "B")); - cells.Add(new QuadCell(outerInstance, TokenString + "C")); - cells.Add(new QuadCell(outerInstance, TokenString + "D")); - return cells; + return new List(4) + { + new QuadCell(outerInstance, TokenString + "A"), + new QuadCell(outerInstance, TokenString + "B"), + new QuadCell(outerInstance, TokenString + "C"), + new QuadCell(outerInstance, TokenString + "D") + }; } public override int SubCellsSize => 4; diff --git a/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs b/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs index 3d00c3a8b0..a80f9d9455 100644 --- a/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs +++ b/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTree.cs @@ -46,7 +46,7 @@ public abstract class SpatialPrefixTree protected internal readonly SpatialContext m_ctx; - public SpatialPrefixTree(SpatialContext ctx, int maxLevels) + protected SpatialPrefixTree(SpatialContext ctx, int maxLevels) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { if (Debugging.AssertsEnabled) Debugging.Assert(maxLevels > 0); this.m_ctx = ctx; @@ -189,9 +189,9 @@ public virtual IList GetCells(IShape shape, int detailLevel, bool inclPare { throw new ArgumentException("detailLevel > maxLevels"); } - if (shape is IPoint) + if (shape is IPoint point) { - return GetCells((IPoint)shape, detailLevel, inclParents); + return GetCells(point, detailLevel, inclParents); } IList cells = new List(inclParents ? 4096 : 2048); RecursiveGetCells(WorldCell, shape, detailLevel, inclParents, simplify, cells); diff --git a/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTreeFactory.cs b/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTreeFactory.cs index 0ce3253bcd..54f87029ff 100644 --- a/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTreeFactory.cs +++ b/src/Lucene.Net.Spatial/Prefix/Tree/SpatialPrefixTreeFactory.cs @@ -48,8 +48,7 @@ public abstract class SpatialPrefixTreeFactory public static SpatialPrefixTree MakeSPT(IDictionary args, SpatialContext ctx) { SpatialPrefixTreeFactory instance; - string cname; - if (!args.TryGetValue(PREFIX_TREE, out cname)) + if (!args.TryGetValue(PREFIX_TREE, out string cname)) { cname = ctx.IsGeo ? "geohash" : "quad"; } @@ -86,15 +85,13 @@ protected internal virtual void Init(IDictionary args, SpatialCo protected internal virtual void InitMaxLevels() { - string mlStr; - if (m_args.TryGetValue(MAX_LEVELS, out mlStr)) + if (m_args.TryGetValue(MAX_LEVELS, out string mlStr)) { m_maxLevels = int.Parse(mlStr, CultureInfo.InvariantCulture); return; } double degrees; - string maxDetailDistStr; - if (!m_args.TryGetValue(MAX_DIST_ERR, out maxDetailDistStr)) + if (!m_args.TryGetValue(MAX_DIST_ERR, out string maxDetailDistStr)) { if (!m_ctx.IsGeo) { diff --git a/src/Lucene.Net.Spatial/Prefix/WithinPrefixTreeFilter.cs b/src/Lucene.Net.Spatial/Prefix/WithinPrefixTreeFilter.cs index ca1f64da04..b7da0becc1 100644 --- a/src/Lucene.Net.Spatial/Prefix/WithinPrefixTreeFilter.cs +++ b/src/Lucene.Net.Spatial/Prefix/WithinPrefixTreeFilter.cs @@ -84,13 +84,12 @@ protected virtual IShape BufferShape(IShape shape, double distErr) throw new ArgumentException("distErr must be > 0"); } SpatialContext ctx = m_grid.SpatialContext; - if (shape is IPoint) + if (shape is IPoint point) { - return ctx.MakeCircle((IPoint)shape, distErr); + return ctx.MakeCircle(point, distErr); } - else if (shape is ICircle) + else if (shape is ICircle circle) { - var circle = (ICircle)shape; double newDist = circle.Radius + distErr; if (ctx.IsGeo && newDist > 180) { diff --git a/src/Lucene.Net.Spatial/Query/SpatialArgs.cs b/src/Lucene.Net.Spatial/Query/SpatialArgs.cs index 231592b74e..20030620b1 100644 --- a/src/Lucene.Net.Spatial/Query/SpatialArgs.cs +++ b/src/Lucene.Net.Spatial/Query/SpatialArgs.cs @@ -57,7 +57,7 @@ public static double CalcDistanceFromErrPct(IShape shape, double distErrPct, Spa { if (distErrPct < 0 || distErrPct > 0.5) { - throw new ArgumentException("distErrPct " + distErrPct + " must be between [0 to 0.5]", "distErrPct"); + throw new ArgumentException($"distErrPct {distErrPct} must be between [0 to 0.5]", nameof(distErrPct)); } if (distErrPct == 0 || shape is IPoint) { diff --git a/src/Lucene.Net.Spatial/Query/SpatialArgsParser.cs b/src/Lucene.Net.Spatial/Query/SpatialArgsParser.cs index 1fff4207d2..91758786db 100644 --- a/src/Lucene.Net.Spatial/Query/SpatialArgsParser.cs +++ b/src/Lucene.Net.Spatial/Query/SpatialArgsParser.cs @@ -120,9 +120,8 @@ protected virtual SpatialArgs NewSpatialArgs(SpatialOperation op, IShape shape) protected virtual void ReadNameValuePairs(SpatialArgs args, IDictionary nameValPairs) { - string distErrPctStr, distErrStr; - nameValPairs.TryGetValue(DIST_ERR_PCT, out distErrPctStr); - nameValPairs.TryGetValue(DIST_ERR, out distErrStr); + nameValPairs.TryGetValue(DIST_ERR_PCT, out string distErrPctStr); + nameValPairs.TryGetValue(DIST_ERR, out string distErrStr); args.DistErrPct = ReadDouble(distErrPctStr); nameValPairs.Remove(DIST_ERR_PCT); args.DistErr = ReadDouble(distErrStr); diff --git a/src/Lucene.Net.Spatial/Query/SpatialOperation.cs b/src/Lucene.Net.Spatial/Query/SpatialOperation.cs index 40af95e2d4..760ae4e4c5 100644 --- a/src/Lucene.Net.Spatial/Query/SpatialOperation.cs +++ b/src/Lucene.Net.Spatial/Query/SpatialOperation.cs @@ -182,11 +182,10 @@ protected SpatialOperation(string name, bool scoreIsMeaningful, bool sourceNeeds public static SpatialOperation Get(string v) { - SpatialOperation op; - if (!registry.TryGetValue(v, out op) || op == null) + if (!registry.TryGetValue(v, out SpatialOperation op) || op == null) { if (!registry.TryGetValue(CultureInfo.InvariantCulture.TextInfo.ToUpper(v), out op) || op == null) - throw new ArgumentException("Unknown Operation: " + v, "v"); + throw new ArgumentException($"Unknown Operation: {v}", nameof(v)); } return op; } diff --git a/src/Lucene.Net.Spatial/Serialized/SerializedDVStrategy.cs b/src/Lucene.Net.Spatial/Serialized/SerializedDVStrategy.cs index 2faba1a9de..184ce53792 100644 --- a/src/Lucene.Net.Spatial/Serialized/SerializedDVStrategy.cs +++ b/src/Lucene.Net.Spatial/Serialized/SerializedDVStrategy.cs @@ -126,7 +126,7 @@ public override Filter MakeFilter(SpatialArgs args) //TODO raise to SpatialStrategy public virtual ValueSource MakeShapeValueSource() { - return new ShapeDocValueSource(this, FieldName, m_ctx.BinaryCodec); + return new ShapeDocValueSource(FieldName, m_ctx.BinaryCodec); } /// @@ -174,20 +174,18 @@ public override IBits Bits //null Map context -- we simply don't have one. That's ok. FunctionValues predFuncValues = outerInstance.predicateValueSource.GetValues(null, context); - return new BitsAnonymousHelper(this, predFuncValues, context, acceptDocs); + return new BitsAnonymousHelper(predFuncValues, context, acceptDocs); } } internal class BitsAnonymousHelper : IBits { - private readonly DocIdSetAnonymousHelper outerInstance; private readonly FunctionValues predFuncValues; private readonly AtomicReaderContext context; private readonly IBits acceptDocs; - public BitsAnonymousHelper(DocIdSetAnonymousHelper outerInstance, FunctionValues predFuncValues, AtomicReaderContext context, IBits acceptDocs) + public BitsAnonymousHelper(FunctionValues predFuncValues, AtomicReaderContext context, IBits acceptDocs) { - this.outerInstance = outerInstance; this.predFuncValues = predFuncValues; this.context = context; this.acceptDocs = acceptDocs; @@ -229,13 +227,11 @@ public override int GetHashCode() /// internal class ShapeDocValueSource : ValueSource { - private readonly SerializedDVStrategy outerInstance; private readonly string fieldName; private readonly BinaryCodec binaryCodec;//spatial4n - internal ShapeDocValueSource(SerializedDVStrategy outerInstance, string fieldName, BinaryCodec binaryCodec) + internal ShapeDocValueSource(string fieldName, BinaryCodec binaryCodec) { - this.outerInstance = outerInstance; this.fieldName = fieldName; this.binaryCodec = binaryCodec; } @@ -259,7 +255,7 @@ public FuctionValuesAnonymousHelper(ShapeDocValueSource outerInstance, BinaryDoc } private int bytesRefDoc = -1; - private BytesRef bytesRef = new BytesRef();//scratch + private readonly BytesRef bytesRef = new BytesRef();//scratch internal bool FillBytes(int doc) { diff --git a/src/Lucene.Net.Spatial/SpatialStrategy.cs b/src/Lucene.Net.Spatial/SpatialStrategy.cs index 60a63a12a8..e2c4cd3d43 100644 --- a/src/Lucene.Net.Spatial/SpatialStrategy.cs +++ b/src/Lucene.Net.Spatial/SpatialStrategy.cs @@ -59,11 +59,9 @@ public abstract class SpatialStrategy /// protected SpatialStrategy(SpatialContext ctx, string fieldName) { - if (ctx == null) - throw new ArgumentException("ctx is required", "ctx"); - this.m_ctx = ctx; + this.m_ctx = ctx ?? throw new ArgumentNullException(nameof(ctx), "ctx is required"); if (string.IsNullOrEmpty(fieldName)) - throw new ArgumentException("fieldName is required", "fieldName"); + throw new ArgumentException("fieldName is required", nameof(fieldName)); this.fieldName = fieldName; } diff --git a/src/Lucene.Net.Spatial/Util/CachingDoubleValueSource.cs b/src/Lucene.Net.Spatial/Util/CachingDoubleValueSource.cs index c242518db7..015db2f92e 100644 --- a/src/Lucene.Net.Spatial/Util/CachingDoubleValueSource.cs +++ b/src/Lucene.Net.Spatial/Util/CachingDoubleValueSource.cs @@ -69,8 +69,7 @@ public CachingDoubleFunctionValue(int docBase, FunctionValues vals, IDictionary< public override double DoubleVal(int doc) { int key = docBase + doc; - double v; - if (!cache.TryGetValue(key, out v)) + if (!cache.TryGetValue(key, out double v)) { v = values.DoubleVal(doc); cache[key] = v; @@ -98,9 +97,8 @@ public override bool Equals(object o) { if (this == o) return true; - var that = o as CachingDoubleValueSource; - if (that == null) return false; + if (!(o is CachingDoubleValueSource that)) return false; if (m_source != null ? !m_source.Equals(that.m_source) : that.m_source != null) return false; return true; diff --git a/src/Lucene.Net.Spatial/Util/ShapeFieldCacheDistanceValueSource.cs b/src/Lucene.Net.Spatial/Util/ShapeFieldCacheDistanceValueSource.cs index ffb8c56ebb..d334c2347a 100644 --- a/src/Lucene.Net.Spatial/Util/ShapeFieldCacheDistanceValueSource.cs +++ b/src/Lucene.Net.Spatial/Util/ShapeFieldCacheDistanceValueSource.cs @@ -109,11 +109,10 @@ public override string ToString(int doc) public override bool Equals(object o) { if (this == o) return true; - if (o == null || GetType() != o.GetType()) return false; + if (o is null || GetType() != o.GetType()) return false; - var that = o as ShapeFieldCacheDistanceValueSource; - if (that == null) return false; + if (!(o is ShapeFieldCacheDistanceValueSource that)) return false; if (!ctx.Equals(that.ctx)) return false; if (!from.Equals(that.from)) return false; if (!provider.Equals(that.provider)) return false; diff --git a/src/Lucene.Net.Spatial/Util/ShapeFieldCacheProvider.cs b/src/Lucene.Net.Spatial/Util/ShapeFieldCacheProvider.cs index 88468afc51..4bbb278fb8 100644 --- a/src/Lucene.Net.Spatial/Util/ShapeFieldCacheProvider.cs +++ b/src/Lucene.Net.Spatial/Util/ShapeFieldCacheProvider.cs @@ -45,7 +45,7 @@ public abstract class ShapeFieldCacheProvider protected internal readonly int m_defaultSize; protected internal readonly string m_shapeField; - public ShapeFieldCacheProvider(string shapeField, int defaultSize) + protected ShapeFieldCacheProvider(string shapeField, int defaultSize) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { // it may be a List or T this.m_shapeField = shapeField; diff --git a/src/Lucene.Net.Spatial/Util/ValueSourceFilter.cs b/src/Lucene.Net.Spatial/Util/ValueSourceFilter.cs index f0ff081e58..d8002d79ee 100644 --- a/src/Lucene.Net.Spatial/Util/ValueSourceFilter.cs +++ b/src/Lucene.Net.Spatial/Util/ValueSourceFilter.cs @@ -42,11 +42,8 @@ public class ValueSourceFilter : Filter public ValueSourceFilter(Filter startingFilter, ValueSource source, double min, double max) { - if (startingFilter == null) - { - throw new ArgumentException("please provide a non-null startingFilter; you can use QueryWrapperFilter(MatchAllDocsQuery) as a no-op filter", "startingFilter"); - } - this.startingFilter = startingFilter; + this.startingFilter = startingFilter ?? throw new ArgumentNullException(nameof(startingFilter), + "Please provide a non-null startingFilter; you can use QueryWrapperFilter(MatchAllDocsQuery) as a no-op filter"); this.source = source; this.min = min; this.max = max; diff --git a/src/Lucene.Net.Spatial/Vector/DistanceValueSource.cs b/src/Lucene.Net.Spatial/Vector/DistanceValueSource.cs index 3b2fc96fb3..7d69c108a9 100644 --- a/src/Lucene.Net.Spatial/Vector/DistanceValueSource.cs +++ b/src/Lucene.Net.Spatial/Vector/DistanceValueSource.cs @@ -71,7 +71,7 @@ internal class DistanceFunctionValue : FunctionValues { private readonly DistanceValueSource outerInstance; private readonly IDistanceCalculator calculator; - private readonly IPoint from; + //private readonly IPoint from; // LUCENENET: Never read private readonly double nullValue; private readonly FieldCache.Doubles ptX, ptY; @@ -86,7 +86,7 @@ public DistanceFunctionValue(DistanceValueSource outerInstance, AtomicReader rea validX = FieldCache.DEFAULT.GetDocsWithField(reader, outerInstance.strategy.FieldNameX); validY = FieldCache.DEFAULT.GetDocsWithField(reader, outerInstance.strategy.FieldNameY); - from = outerInstance.from; + //from = outerInstance.from; // LUCENENET: Never read calculator = outerInstance.strategy.SpatialContext.DistCalc; nullValue = (outerInstance.strategy.SpatialContext.IsGeo ? 180 * outerInstance.multiplier : double.MaxValue); } @@ -121,10 +121,9 @@ public override string ToString(int doc) public override bool Equals(object o) { if (this == o) return true; - if (o == null || GetType() != o.GetType()) return false; + if (o is null || GetType() != o.GetType()) return false; - var that = o as DistanceValueSource; - if (that == null) return false; + if (!(o is DistanceValueSource that)) return false; if (!from.Equals(that.from)) return false; if (!strategy.Equals(that.strategy)) return false; diff --git a/src/Lucene.Net.Spatial/Vector/PointVectorStrategy.cs b/src/Lucene.Net.Spatial/Vector/PointVectorStrategy.cs index 6df2276dce..7d42266c30 100644 --- a/src/Lucene.Net.Spatial/Vector/PointVectorStrategy.cs +++ b/src/Lucene.Net.Spatial/Vector/PointVectorStrategy.cs @@ -85,8 +85,7 @@ public virtual int PrecisionStep public override Field[] CreateIndexableFields(IShape shape) { - var point = shape as IPoint; - if (point != null) + if (shape is IPoint point) return CreateIndexableFields(point); throw new NotSupportedException("Can only index IPoint, not " + shape); @@ -140,9 +139,8 @@ public override ConstantScoreQuery MakeQuery(SpatialArgs args) var bbox = (IRectangle)shape; return new ConstantScoreQuery(MakeWithin(bbox)); } - else if (shape is ICircle) + else if (shape is ICircle circle) { - var circle = (ICircle)shape; var bbox = circle.BoundingBox; var vsf = new ValueSourceFilter( new QueryWrapperFilter(MakeWithin(bbox)), @@ -151,7 +149,7 @@ public override ConstantScoreQuery MakeQuery(SpatialArgs args) circle.Radius); return new ConstantScoreQuery(vsf); } - + throw new NotSupportedException("Only IRectangles and ICircles are currently supported, " + "found [" + shape.GetType().Name + "]"); //TODO } @@ -187,10 +185,8 @@ public virtual Query MakeQueryDistanceScore(SpatialArgs args) SpatialOperation.IsWithin)) { spatial = MakeWithin(bbox); - if (args.Shape is ICircle) + if (args.Shape is ICircle circle) { - var circle = (ICircle)args.Shape; - // Make the ValueSource valueSource = MakeDistanceValueSource(shape.Center); @@ -219,10 +215,11 @@ public virtual Query MakeQueryDistanceScore(SpatialArgs args) valueSource = MakeDistanceValueSource(shape.Center); } Query spatialRankingQuery = new FunctionQuery(valueSource); - var bq = new BooleanQuery(); - bq.Add(spatial, Occur.MUST); - bq.Add(spatialRankingQuery, Occur.MUST); - return bq; + return new BooleanQuery + { + { spatial, Occur.MUST }, + { spatialRankingQuery, Occur.MUST } + }; } /// @@ -268,10 +265,11 @@ private Query MakeDisjoint(IRectangle bbox) Query qX = RangeQuery(fieldNameX, bbox.MinX, bbox.MaxX); Query qY = RangeQuery(fieldNameY, bbox.MinY, bbox.MaxY); - var bq = new BooleanQuery(); - bq.Add(qX, Occur.MUST_NOT); - bq.Add(qY, Occur.MUST_NOT); - return bq; + return new BooleanQuery + { + { qX, Occur.MUST_NOT }, + { qY, Occur.MUST_NOT } + }; } } } diff --git a/src/Lucene.Net.Suggest/Spell/DirectSpellChecker.cs b/src/Lucene.Net.Suggest/Spell/DirectSpellChecker.cs index 7c504d74bc..91e43326f2 100644 --- a/src/Lucene.Net.Suggest/Spell/DirectSpellChecker.cs +++ b/src/Lucene.Net.Suggest/Spell/DirectSpellChecker.cs @@ -325,7 +325,7 @@ public virtual SuggestWord[] SuggestSimilar(Term term, int numSug, IndexReader i string text = term.Text(); if (minQueryLength > 0 && text.CodePointCount(0, text.Length) < minQueryLength) { - return new SuggestWord[0]; + return Arrays.Empty(); } if (lowerCaseTerms) @@ -337,18 +337,18 @@ public virtual SuggestWord[] SuggestSimilar(Term term, int numSug, IndexReader i if (suggestMode == SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX && docfreq > 0) { - return new SuggestWord[0]; + return Arrays.Empty(); } int maxDoc = ir.MaxDoc; if (maxQueryFrequency >= 1f && docfreq > maxQueryFrequency) { - return new SuggestWord[0]; + return Arrays.Empty(); } else if (docfreq > (int)Math.Ceiling(maxQueryFrequency * maxDoc)) { - return new SuggestWord[0]; + return Arrays.Empty(); } if (suggestMode != SuggestMode.SUGGEST_MORE_POPULAR) @@ -365,7 +365,7 @@ public virtual SuggestWord[] SuggestSimilar(Term term, int numSug, IndexReader i docfreq = Math.Max(docfreq, (int)(thresholdFrequency * maxDoc) - 1); } - ICollection terms = null; + ICollection terms; // LUCENENET: IDE0059: Remove unnecessary value assignment int inspections = numSug * maxInspections; // try ed=1 first, in case we get lucky diff --git a/src/Lucene.Net.Suggest/Spell/LuceneLevenshteinDistance.cs b/src/Lucene.Net.Suggest/Spell/LuceneLevenshteinDistance.cs index c385f88039..ff903d3c5b 100644 --- a/src/Lucene.Net.Suggest/Spell/LuceneLevenshteinDistance.cs +++ b/src/Lucene.Net.Suggest/Spell/LuceneLevenshteinDistance.cs @@ -129,7 +129,7 @@ private static Int32sRef ToInt32sRef(string s) { var @ref = new Int32sRef(s.Length); // worst case int utf16Len = s.Length; - for (int i = 0, cp = 0; i < utf16Len; i += Character.CharCount(cp)) + for (int i = 0, cp; i < utf16Len; i += Character.CharCount(cp)) // LUCENENET: IDE0059: Remove unnecessary value assignment to cp { cp = @ref.Int32s[@ref.Length++] = Character.CodePointAt(s, i); } diff --git a/src/Lucene.Net.Suggest/Spell/NGramDistance.cs b/src/Lucene.Net.Suggest/Spell/NGramDistance.cs index f3670d6456..49ffd4809c 100644 --- a/src/Lucene.Net.Suggest/Spell/NGramDistance.cs +++ b/src/Lucene.Net.Suggest/Spell/NGramDistance.cs @@ -34,8 +34,7 @@ namespace Lucene.Net.Search.Spell /// public class NGramDistance : IStringDistance { - - private int n; + private readonly int n; // LUCENENET: marked readonly /// /// Creates an N-Gram distance measure using n-grams of the specified size. diff --git a/src/Lucene.Net.Suggest/Spell/SpellChecker.cs b/src/Lucene.Net.Suggest/Spell/SpellChecker.cs index 2fa8cbb2a0..5e6f824b29 100644 --- a/src/Lucene.Net.Suggest/Spell/SpellChecker.cs +++ b/src/Lucene.Net.Suggest/Spell/SpellChecker.cs @@ -4,6 +4,7 @@ using System; using System.Collections.Generic; using System.IO; +using System.Runtime.CompilerServices; using Directory = Lucene.Net.Store.Directory; namespace Lucene.Net.Search.Spell @@ -46,7 +47,6 @@ namespace Lucene.Net.Search.Spell /// public class SpellChecker : IDisposable { - /// /// The default minimum score to use, if not specified by setting /// or overriding with . @@ -67,9 +67,9 @@ public class SpellChecker : IDisposable /// /// Boost value for start and end grams /// - private float bStart = 2.0f; + private readonly float bStart = 2.0f; // LUCENENET: marked readonly - private float bEnd = 1.0f; + private readonly float bEnd = 1.0f; // LUCENENET: marked readonly // don't use this searcher directly - see SwapSearcher() private IndexSearcher searcher; @@ -151,9 +151,7 @@ public virtual void SetSpellIndex(Directory spellIndexDir) if (!DirectoryReader.IndexExists(spellIndexDir)) { #pragma warning disable 612, 618 - using (var writer = new IndexWriter(spellIndexDir, new IndexWriterConfig(LuceneVersion.LUCENE_CURRENT, null))) - { - } + using var writer = new IndexWriter(spellIndexDir, new IndexWriterConfig(LuceneVersion.LUCENE_CURRENT, null)); #pragma warning restore 612, 618 } SwapSearcher(spellIndexDir); @@ -214,6 +212,7 @@ public virtual float Accuracy /// first criteria: the edit distance, second criteria (only if restricted mode): the popularity /// of the suggest words in the field of the user index /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual string[] SuggestSimilar(string word, int numSug) { return this.SuggestSimilar(word, numSug, null, null, SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX); @@ -241,6 +240,7 @@ public virtual string[] SuggestSimilar(string word, int numSug) /// first criteria: the edit distance, second criteria (only if restricted mode): the popularity /// of the suggest words in the field of the user index /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual string[] SuggestSimilar(string word, int numSug, float accuracy) { return this.SuggestSimilar(word, numSug, null, null, SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX, accuracy); @@ -251,6 +251,7 @@ public virtual string[] SuggestSimilar(string word, int numSug, float accuracy) /// SuggestSimilar(word, numSug, ir, suggestMode, field, this.accuracy) /// /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual string[] SuggestSimilar(string word, int numSug, IndexReader ir, string field, SuggestMode suggestMode) { return SuggestSimilar(word, numSug, ir, field, suggestMode, this.accuracy); @@ -559,6 +560,7 @@ public void IndexDictionary(IDictionary dict, IndexWriterConfig config, bool ful } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static int GetMin(int l) { if (l > 5) @@ -572,6 +574,7 @@ private static int GetMin(int l) return 1; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static int GetMax(int l) { if (l > 5) @@ -639,36 +642,49 @@ private IndexSearcher ObtainSearcher() } } - private void ReleaseSearcher(IndexSearcher aSearcher) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static void ReleaseSearcher(IndexSearcher aSearcher) // LUCENENET: CA1822: Mark members as static { // don't check if open - always decRef // don't decrement the private searcher - could have been swapped aSearcher.IndexReader.DecRef(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void EnsureOpen() { if (disposed) { - throw new ObjectDisposedException(this.GetType().FullName, "Spellchecker has been closed"); + throw new ObjectDisposedException(this.GetType().FullName, "Spellchecker has been disposed."); } } /// - /// Dispose the underlying IndexSearcher used by this SpellChecker + /// Dispose the underlying used by this . /// if the close operation causes an /// if the is already disposed public void Dispose() { - if (!disposed) + Dispose(true); + GC.SuppressFinalize(this); + } + + /// + /// Releases resources used by the and + /// if overridden in a derived class, optionally releases unmanaged resources. + /// + /// true to release both managed and unmanaged resources; + /// false to release only unmanaged resources. + + // LUCENENET specific - implemented proper dispose pattern + protected virtual void Dispose(bool disposing) + { + if (disposing && !disposed) { lock (searcherLock) { disposed = true; - if (searcher != null) - { - searcher.IndexReader.Dispose(); - } + searcher?.IndexReader?.Dispose(); searcher = null; } } @@ -687,12 +703,9 @@ private void SwapSearcher(Directory dir) if (disposed) { indexSearcher.IndexReader.Dispose(); - throw new ObjectDisposedException(this.GetType().FullName, "Spellchecker has been closed"); - } - if (searcher != null) - { - searcher.IndexReader.Dispose(); + throw new ObjectDisposedException(this.GetType().FullName, "Spellchecker has been disposed."); } + searcher?.IndexReader?.Dispose(); // set the spellindex in the sync block - ensure consistency. searcher = indexSearcher; this.spellIndex = dir; @@ -705,6 +718,7 @@ private void SwapSearcher(Directory dir) /// a new read-only IndexSearcher /// f there is a low-level IO error // for testing purposes + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual IndexSearcher CreateSearcher(Directory dir) { return new IndexSearcher(DirectoryReader.Open(dir)); diff --git a/src/Lucene.Net.Suggest/Spell/WordBreakSpellChecker.cs b/src/Lucene.Net.Suggest/Spell/WordBreakSpellChecker.cs index 6c259f07a0..7130e7495e 100644 --- a/src/Lucene.Net.Suggest/Spell/WordBreakSpellChecker.cs +++ b/src/Lucene.Net.Suggest/Spell/WordBreakSpellChecker.cs @@ -1,5 +1,6 @@ using J2N; using Lucene.Net.Index; +using Lucene.Net.Support; using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; @@ -93,19 +94,19 @@ public virtual SuggestWord[][] SuggestWordBreaks(Term term, int maxSuggestions, { if (maxSuggestions < 1) { - return new SuggestWord[0][]; + return Arrays.Empty(); } int queueInitialCapacity = maxSuggestions > 10 ? 10 : maxSuggestions; IComparer queueComparer = sortMethod == BreakSuggestionSortMethod.NUM_CHANGES_THEN_MAX_FREQUENCY - ? (IComparer)new LengthThenMaxFreqComparer(this) - : new LengthThenSumFreqComparer(this); + ? (IComparer)new LengthThenMaxFreqComparer() + : new LengthThenSumFreqComparer(); JCG.PriorityQueue suggestions = new JCG.PriorityQueue(queueInitialCapacity, queueComparer); int origFreq = ir.DocFreq(term); if (origFreq > 0 && suggestMode == SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX) { - return new SuggestWord[0][]; + return Arrays.Empty(); } int useMinSuggestionFrequency = minSuggestionFrequency; @@ -114,7 +115,7 @@ public virtual SuggestWord[][] SuggestWordBreaks(Term term, int maxSuggestions, useMinSuggestionFrequency = (origFreq == 0 ? 1 : origFreq); } - GenerateBreakUpSuggestions(term, ir, 1, maxSuggestions, useMinSuggestionFrequency, new SuggestWord[0], suggestions, 0, sortMethod); + GenerateBreakUpSuggestions(term, ir, 1, maxSuggestions, useMinSuggestionFrequency, Arrays.Empty(), suggestions, 0, sortMethod); SuggestWord[][] suggestionArray = new SuggestWord[suggestions.Count][]; for (int i = suggestions.Count - 1; i >= 0; i--) @@ -157,7 +158,7 @@ public virtual CombineSuggestion[] SuggestWordCombinations(Term[] terms, int max { if (maxSuggestions < 1) { - return new CombineSuggestion[0]; + return Arrays.Empty(); } int[] origFreqs = null; @@ -171,7 +172,7 @@ public virtual CombineSuggestion[] SuggestWordCombinations(Term[] terms, int max } int queueInitialCapacity = maxSuggestions > 10 ? 10 : maxSuggestions; - IComparer queueComparer = new CombinationsThenFreqComparer(this); + IComparer queueComparer = new CombinationsThenFreqComparer(); JCG.PriorityQueue suggestions = new JCG.PriorityQueue(queueInitialCapacity, queueComparer); int thisTimeEvaluations = 0; @@ -236,7 +237,7 @@ public virtual CombineSuggestion[] SuggestWordCombinations(Term[] terms, int max word.Freq = combinedTermFreq; word.Score = origIndexes.Length - 1; word.String = combinedTerm.Text(); - CombineSuggestionWrapper suggestion = new CombineSuggestionWrapper(this, new CombineSuggestion(word, origIndexes), (origIndexes.Length - 1)); + CombineSuggestionWrapper suggestion = new CombineSuggestionWrapper(new CombineSuggestion(word, origIndexes), (origIndexes.Length - 1)); suggestions.Enqueue(suggestion); if (suggestions.Count > maxSuggestions) { @@ -290,7 +291,7 @@ private int GenerateBreakUpSuggestions(Term term, IndexReader ir, SuggestWord rightWord = GenerateSuggestWord(ir, term.Field, rightText); if (rightWord.Freq >= useMinSuggestionFrequency) { - SuggestWordArrayWrapper suggestion = new SuggestWordArrayWrapper(this, NewSuggestion(prefix, leftWord, rightWord)); + SuggestWordArrayWrapper suggestion = new SuggestWordArrayWrapper(NewSuggestion(prefix, leftWord, rightWord)); suggestions.Enqueue(suggestion); if (suggestions.Count > maxSuggestions) { @@ -345,7 +346,7 @@ private static SuggestWord[] NewSuggestion(SuggestWord[] prefix, SuggestWord app return newSuggestion; } - private SuggestWord GenerateSuggestWord(IndexReader ir, string fieldname, string text) + private static SuggestWord GenerateSuggestWord(IndexReader ir, string fieldname, string text) // LUCENENET: CA1822: Mark members as static { Term term = new Term(fieldname, text); int freq = ir.DocFreq(term); @@ -408,13 +409,6 @@ public virtual int MaxEvaluations private sealed class LengthThenMaxFreqComparer : IComparer { - private readonly WordBreakSpellChecker outerInstance; - - public LengthThenMaxFreqComparer(WordBreakSpellChecker outerInstance) - { - this.outerInstance = outerInstance; - } - public int Compare(SuggestWordArrayWrapper o1, SuggestWordArrayWrapper o2) { if (o1.SuggestWords.Length != o2.SuggestWords.Length) @@ -431,13 +425,6 @@ public int Compare(SuggestWordArrayWrapper o1, SuggestWordArrayWrapper o2) private sealed class LengthThenSumFreqComparer : IComparer { - private readonly WordBreakSpellChecker outerInstance; - - public LengthThenSumFreqComparer(WordBreakSpellChecker outerInstance) - { - this.outerInstance = outerInstance; - } - public int Compare(SuggestWordArrayWrapper o1, SuggestWordArrayWrapper o2) { if (o1.SuggestWords.Length != o2.SuggestWords.Length) @@ -454,11 +441,8 @@ public int Compare(SuggestWordArrayWrapper o1, SuggestWordArrayWrapper o2) private sealed class CombinationsThenFreqComparer : IComparer { - private readonly WordBreakSpellChecker outerInstance; - - public CombinationsThenFreqComparer(WordBreakSpellChecker outerInstance) + public CombinationsThenFreqComparer() { - this.outerInstance = outerInstance; } public int Compare(CombineSuggestionWrapper o1, CombineSuggestionWrapper o2) @@ -477,15 +461,12 @@ public int Compare(CombineSuggestionWrapper o1, CombineSuggestionWrapper o2) private class SuggestWordArrayWrapper : IComparable { - private readonly WordBreakSpellChecker outerInstance; - private readonly SuggestWord[] suggestWords; private readonly int freqMax; private readonly int freqSum; - internal SuggestWordArrayWrapper(WordBreakSpellChecker outerInstance, SuggestWord[] suggestWords) + internal SuggestWordArrayWrapper(SuggestWord[] suggestWords) { - this.outerInstance = outerInstance; this.suggestWords = suggestWords; int aFreqSum = 0; int aFreqMax = 0; @@ -516,14 +497,11 @@ public int CompareTo(SuggestWordArrayWrapper other) private class CombineSuggestionWrapper : IComparable { - private readonly WordBreakSpellChecker outerInstance; - private readonly CombineSuggestion combineSuggestion; private readonly int numCombinations; - internal CombineSuggestionWrapper(WordBreakSpellChecker outerInstance, CombineSuggestion combineSuggestion, int numCombinations) + internal CombineSuggestionWrapper(CombineSuggestion combineSuggestion, int numCombinations) { - this.outerInstance = outerInstance; this.combineSuggestion = combineSuggestion; this.numCombinations = numCombinations; } diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs index e0ea82d236..b4e44cc4b9 100644 --- a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs +++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs @@ -160,7 +160,7 @@ public class AnalyzingSuggester : Lookup /// /// Whether position holes should appear in the automaton. - private bool preservePositionIncrements; + private readonly bool preservePositionIncrements; // LUCENENET: marked readonly /// /// Number of entries the lookup was built with @@ -243,7 +243,7 @@ public override long GetSizeInBytes() return fst == null ? 0 : fst.GetSizeInBytes(); } - private void CopyDestTransitions(State from, State to, IList transitions) + private static void CopyDestTransitions(State from, State to, IList transitions) // LUCENENET: CA1822: Mark members as static { if (to.Accept) { diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/BlendedInfixSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/BlendedInfixSuggester.cs index 35c9f245a1..f95adc4644 100644 --- a/src/Lucene.Net.Suggest/Suggest/Analyzing/BlendedInfixSuggester.cs +++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/BlendedInfixSuggester.cs @@ -86,12 +86,6 @@ public enum BlenderType //SCORE } - /// - /// LUCENENET specific to ensure our Queue is only altered by a single - /// thread at a time. - /// - private static readonly object syncLock = new object(); - /// /// Create a new instance, loading from a previously built /// directory, if it exists. @@ -214,7 +208,6 @@ protected override FieldType GetTextFieldType() /// size limit private static void BoundedTreeAdd(JCG.SortedSet results, Lookup.LookupResult result, int num) { - if (results.Count >= num) { var first = results.Min; // "get" our first object so we don't cross threads @@ -291,11 +284,10 @@ protected internal virtual double CalculateCoefficient(int position) return coefficient; } - private static IComparer LOOKUP_COMP = new LookUpComparer(); + private static readonly IComparer LOOKUP_COMP = new LookUpComparer(); // LUCENENET: marked readonly private class LookUpComparer : IComparer { - public virtual int Compare(Lookup.LookupResult o1, Lookup.LookupResult o2) { // order on weight diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs index b641140b8d..d7d2594a0c 100644 --- a/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs +++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs @@ -312,7 +312,7 @@ public virtual void Build(IInputEnumerator enumerator, double ramBufferSizeMB) var directory = OfflineSorter.DefaultTempDir(); // LUCENENET specific - using GetRandomFileName() instead of picking a random int - DirectoryInfo tempIndexPath = null; + DirectoryInfo tempIndexPath; // LUCENENET: IDE0059: Remove unnecessary value assignment while (true) { tempIndexPath = new DirectoryInfo(Path.Combine(directory.FullName, prefix + ".index." + Path.GetFileNameWithoutExtension(Path.GetRandomFileName()))); @@ -826,7 +826,7 @@ public TopNSearcherAnonymousInnerClassHelper( } - private BytesRef scratchBytes; + private readonly BytesRef scratchBytes; protected override void AddIfCompetitive(Util.Fst.Util.FSTPath path) { diff --git a/src/Lucene.Net.Suggest/Suggest/FileDictionary.cs b/src/Lucene.Net.Suggest/Suggest/FileDictionary.cs index bcf70b7efe..d739016f59 100644 --- a/src/Lucene.Net.Suggest/Suggest/FileDictionary.cs +++ b/src/Lucene.Net.Suggest/Suggest/FileDictionary.cs @@ -59,7 +59,7 @@ public class FileDictionary : IDictionary /// Tab-delimited fields are most common thus the default, but one can override this via the constructor /// public const string DEFAULT_FIELD_DELIMITER = "\t"; - private TextReader @in; + private readonly TextReader @in; // LUCENENET: marked readonly private string line; private bool done = false; private readonly string fieldDelimiter; diff --git a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs index 0aa067e4de..6cdc8a995e 100644 --- a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs +++ b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletion.cs @@ -1,4 +1,5 @@ using Lucene.Net.Diagnostics; +using Lucene.Net.Support; using Lucene.Net.Util; using Lucene.Net.Util.Fst; using System; @@ -123,7 +124,7 @@ public FSTCompletion(FST automaton, bool higherWeightsFirst, bool exactF } else { - this.rootArcs = new FST.Arc[0]; + this.rootArcs = Arrays.Empty>(); } this.higherWeightsFirst = higherWeightsFirst; this.exactFirst = exactFirst; diff --git a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionBuilder.cs b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionBuilder.cs index 1f01c80258..93e9dba0ac 100644 --- a/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionBuilder.cs +++ b/src/Lucene.Net.Suggest/Suggest/Fst/FSTCompletionBuilder.cs @@ -182,15 +182,10 @@ public FSTCompletionBuilder(int buckets, IBytesRefSorter sorter, int shareMaxTai { if (buckets < 1 || buckets > 255) { - throw new ArgumentException("Buckets must be >= 1 and <= 255: " + buckets); + throw new ArgumentOutOfRangeException(nameof(buckets), buckets, "Buckets must be >= 1 and <= 255"); } - if (sorter == null) - { - throw new ArgumentException("BytesRefSorter must not be null."); - } - - this.sorter = sorter; + this.sorter = sorter ?? throw new ArgumentNullException("BytesRefSorter must not be null."); this.buckets = buckets; this.shareMaxTailLength = shareMaxTailLength; } diff --git a/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellTernarySearchTrie.cs b/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellTernarySearchTrie.cs index 486a71de1e..397c881c95 100644 --- a/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellTernarySearchTrie.cs +++ b/src/Lucene.Net.Suggest/Suggest/Jaspell/JaspellTernarySearchTrie.cs @@ -275,70 +275,68 @@ public JaspellTernarySearchTrie(FileInfo file, bool compression) public JaspellTernarySearchTrie(FileInfo file, bool compression, CultureInfo culture) : this(culture) { - using (TextReader @in = (compression) ? + using TextReader @in = (compression) ? IOUtils.GetDecodingReader(new GZipStream(new FileStream(file.FullName, FileMode.Open), CompressionMode.Decompress), Encoding.UTF8) : - IOUtils.GetDecodingReader(new FileStream(file.FullName, FileMode.Open), Encoding.UTF8)) - { - string word; - int pos; - float? occur, one = new float?(1); - while ((word = @in.ReadLine()) != null) + IOUtils.GetDecodingReader(new FileStream(file.FullName, FileMode.Open), Encoding.UTF8); + string word; + int pos; + float? occur, one = new float?(1); + while ((word = @in.ReadLine()) != null) + { + pos = word.IndexOf('\t'); + occur = one; + if (pos != -1) { - pos = word.IndexOf('\t'); - occur = one; - if (pos != -1) - { - occur = Convert.ToSingle(word.Substring(pos + 1).Trim(), CultureInfo.InvariantCulture); - word = word.Substring(0, pos); - } - string key = culture.TextInfo.ToLower(word); - if (rootNode == null) - { - rootNode = new TSTNode(key[0], null); - } - TSTNode node = null; - if (key.Length > 0 && rootNode != null) + occur = Convert.ToSingle(word.Substring(pos + 1).Trim(), CultureInfo.InvariantCulture); + word = word.Substring(0, pos); + } + string key = culture.TextInfo.ToLower(word); + if (rootNode == null) + { + rootNode = new TSTNode(key[0], null); + } + TSTNode node = null; + if (key.Length > 0 && rootNode != null) + { + TSTNode currentNode = rootNode; + int charIndex = 0; + while (true) { - TSTNode currentNode = rootNode; - int charIndex = 0; - while (true) + if (currentNode == null) + { + break; + } + int charComp = CompareCharsAlphabetically(key[charIndex], currentNode.splitchar, culture); + if (charComp == 0) { - if (currentNode == null) + charIndex++; + if (charIndex == key.Length) { + node = currentNode; break; } - int charComp = CompareCharsAlphabetically(key[charIndex], currentNode.splitchar, culture); - if (charComp == 0) - { - charIndex++; - if (charIndex == key.Length) - { - node = currentNode; - break; - } - currentNode = currentNode.relatives[TSTNode.EQKID]; - } - else if (charComp < 0) - { - currentNode = currentNode.relatives[TSTNode.LOKID]; - } - else - { - currentNode = currentNode.relatives[TSTNode.HIKID]; - } + currentNode = currentNode.relatives[TSTNode.EQKID]; } - float? occur2 = null; - if (node != null) + else if (charComp < 0) { - occur2 = ((float?)(node.data)); + currentNode = currentNode.relatives[TSTNode.LOKID]; } - if (occur2 != null) + else { - occur += (float)occur2; + currentNode = currentNode.relatives[TSTNode.HIKID]; } - currentNode = GetOrCreateNode(culture.TextInfo.ToLower(word.Trim())); - currentNode.data = occur; } + float? occur2 = null; + if (node != null) + { + occur2 = ((float?)(node.data)); + } + if (occur2 != null) + { + occur += (float)occur2; + } + currentNode = GetOrCreateNode(culture.TextInfo.ToLower(word.Trim())); + currentNode.data = occur; } } } diff --git a/src/Lucene.Net.Suggest/Suggest/Lookup.cs b/src/Lucene.Net.Suggest/Suggest/Lookup.cs index ca74e4fd54..43ec5a35dd 100644 --- a/src/Lucene.Net.Suggest/Suggest/Lookup.cs +++ b/src/Lucene.Net.Suggest/Suggest/Lookup.cs @@ -190,7 +190,7 @@ public LookupResult[] GetResults() /// Sole constructor. (For invocation by subclass /// constructors, typically implicit.) /// - public Lookup() + protected Lookup() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { } diff --git a/src/Lucene.Net.TestFramework.NUnit/Support/TestFramework/Assert.cs b/src/Lucene.Net.TestFramework.NUnit/Support/TestFramework/Assert.cs index 882d39910e..fe891bc358 100644 --- a/src/Lucene.Net.TestFramework.NUnit/Support/TestFramework/Assert.cs +++ b/src/Lucene.Net.TestFramework.NUnit/Support/TestFramework/Assert.cs @@ -4,6 +4,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Runtime.CompilerServices; using _NUnit = NUnit.Framework; using JCG = J2N.Collections.Generic; @@ -54,6 +55,7 @@ protected Assert() // // actual: // The actual value + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(T expected, T actual) { if (!JCG.EqualityComparer.Default.Equals(expected, actual)) @@ -78,6 +80,7 @@ public static void AreEqual(T expected, T actual) // // args: // Array of objects to be used in formatting the message + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(T expected, T actual, string message, params object[] args) { if (!JCG.EqualityComparer.Default.Equals(expected, actual)) @@ -97,6 +100,7 @@ public static void AreEqual(T expected, T actual, string message, params obje // // actual: // The actual value + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(string expected, string actual) { if (!StringComparer.Ordinal.Equals(expected, actual)) @@ -119,6 +123,7 @@ public static void AreEqual(string expected, string actual) // // actual: // The actual value + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(string expected, string actual, string message, params object[] args) { if (!StringComparer.Ordinal.Equals(expected, actual)) @@ -138,6 +143,7 @@ public static void AreEqual(string expected, string actual, string message, para // // actual: // The actual value + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(bool expected, bool actual) { if (!expected.Equals(actual)) @@ -162,6 +168,7 @@ public static void AreEqual(bool expected, bool actual) // // args: // Array of objects to be used in formatting the message + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(bool expected, bool actual, string message, params object[] args) { if (!expected.Equals(actual)) @@ -189,6 +196,7 @@ public static void AreEqual(bool expected, bool actual, string message, params o // // args: // Array of objects to be used in formatting the message + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(double expected, double actual, double delta, string message, params object[] args) { if (Math.Abs(expected - actual) > delta) @@ -209,6 +217,7 @@ public static void AreEqual(double expected, double actual, double delta, string // // delta: // The maximum acceptable difference between the the expected and the actual + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(double expected, double actual, double delta) { if (Math.Abs(expected - actual) > delta) @@ -235,6 +244,7 @@ public static void AreEqual(double expected, double actual, double delta) // // args: // Array of objects to be used in formatting the message + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(float expected, float actual, float delta, string message, params object[] args) { if (Math.Abs(expected - actual) > delta) @@ -255,6 +265,7 @@ public static void AreEqual(float expected, float actual, float delta, string me // // delta: // The maximum acceptable difference between the the expected and the actual + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(float expected, float actual, float delta) { if (Math.Abs(expected - actual) > delta) @@ -273,6 +284,7 @@ public static void AreEqual(float expected, float actual, float delta) // // actual: // The actual value + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(int expected, int actual) { if (!expected.Equals(actual)) @@ -295,6 +307,7 @@ public static void AreEqual(int expected, int actual) // // actual: // The actual value + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(int expected, int actual, string message, params object[] args) { if (!expected.Equals(actual)) @@ -314,6 +327,7 @@ public static void AreEqual(int expected, int actual, string message, params obj // // actual: // The actual value + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(long expected, long actual) { if (!expected.Equals(actual)) @@ -336,6 +350,7 @@ public static void AreEqual(long expected, long actual) // // actual: // The actual value + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(long expected, long actual, string message, params object[] args) { if (!expected.Equals(actual)) @@ -355,6 +370,7 @@ public static void AreEqual(long expected, long actual, string message, params o // // actual: // The actual value + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(byte expected, byte actual) { if (!expected.Equals(actual)) @@ -377,6 +393,7 @@ public static void AreEqual(byte expected, byte actual) // // actual: // The actual value + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(byte expected, byte actual, string message, params object[] args) { if (!expected.Equals(actual)) @@ -384,6 +401,7 @@ public static void AreEqual(byte expected, byte actual, string message, params o } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static JCG.SetEqualityComparer GetSetComparer(bool aggressive) { return aggressive @@ -391,6 +409,7 @@ private static JCG.SetEqualityComparer GetSetComparer(bool aggressive) : JCG.SetEqualityComparer.Default; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static JCG.ListEqualityComparer GetListComparer(bool aggressive) { return aggressive @@ -398,6 +417,7 @@ private static JCG.ListEqualityComparer GetListComparer(bool aggressive) : JCG.ListEqualityComparer.Default; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static JCG.DictionaryEqualityComparer GetDictionaryComparer(bool aggressive) { return aggressive @@ -412,17 +432,20 @@ private static string FormatErrorMessage(object expected, object actual, string return string.Concat(failureHeader, Environment.NewLine, Environment.NewLine, msg); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static string FormatCollection(object collection) { return string.Format(StringFormatter.CurrentCulture, "{0}", collection); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(ISet expected, ISet actual, bool aggressive = true) { if (!GetSetComparer(aggressive).Equals(expected, actual)) Fail(FailureFormat, FormatCollection(expected), FormatCollection(actual)); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(ISet expected, ISet actual, bool aggressive, string message, params object[] args) { //Fail(FormatErrorMessage(expected, actual, message, args)); @@ -430,42 +453,49 @@ public static void AreEqual(ISet expected, ISet actual, bool aggressive Fail(FormatErrorMessage(FormatCollection(expected), FormatCollection(actual), message, args)); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(ISet expected, ISet actual, bool aggressive, Func getMessage) { if (!GetSetComparer(aggressive).Equals(expected, actual)) Fail(FormatErrorMessage(FormatCollection(expected), FormatCollection(actual), getMessage())); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(IList expected, IList actual, bool aggressive = true) { if (!GetListComparer(aggressive).Equals(expected, actual)) Fail(string.Format(FailureFormat, FormatCollection(expected), FormatCollection(actual))); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(IList expected, IList actual, bool aggressive, string message, params object[] args) { if (!GetListComparer(aggressive).Equals(expected, actual)) Fail(FormatErrorMessage(FormatCollection(expected), FormatCollection(actual), message, args)); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(IList expected, IList actual, bool aggressive, Func getMessage) { if (!GetListComparer(aggressive).Equals(expected, actual)) Fail(FormatErrorMessage(FormatCollection(expected), FormatCollection(actual), getMessage())); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(IDictionary expected, IDictionary actual, bool aggressive = true) { if (!GetDictionaryComparer(aggressive).Equals(expected, actual)) Fail(FailureFormat, FormatCollection(expected), FormatCollection(actual)); - } + } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(IDictionary expected, IDictionary actual, bool aggressive, string message, params object[] args) { if (!GetDictionaryComparer(aggressive).Equals(expected, actual)) Fail(FormatErrorMessage(FormatCollection(expected), FormatCollection(actual), message, args)); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(IDictionary expected, IDictionary actual, bool aggressive, Func getMessage) { if (!GetDictionaryComparer(aggressive).Equals(expected, actual)) @@ -474,6 +504,7 @@ public static void AreEqual(IDictionary expected, ID // From CollectionAssert + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(T[] expected, T[] actual) { if (!J2N.Collections.ArrayEqualityComparer.OneDimensional.Equals(expected, actual)) @@ -481,6 +512,7 @@ public static void AreEqual(T[] expected, T[] actual) } // From CollectionAssert + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(T[] expected, T[] actual, string message, params object[] args) { if (!J2N.Collections.ArrayEqualityComparer.OneDimensional.Equals(expected, actual)) @@ -488,6 +520,7 @@ public static void AreEqual(T[] expected, T[] actual, string message, params } // From CollectionAssert + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreEqual(T[] expected, T[] actual, Func getMessage) { if (!J2N.Collections.ArrayEqualityComparer.OneDimensional.Equals(expected, actual)) @@ -513,6 +546,7 @@ public static void AreEqual(T[] expected, T[] actual, Func getMessage // // args: // Array of objects to be used in formatting the message + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreNotEqual(object expected, object actual, string message, params object[] args) { _NUnit.Assert.AreNotEqual(expected, actual, message, args); @@ -530,6 +564,7 @@ public static void AreNotEqual(object expected, object actual, string message, p // // actual: // The actual value + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreNotEqual(object expected, object actual) { _NUnit.Assert.AreNotEqual(expected, actual); @@ -551,6 +586,7 @@ public static void AreNotEqual(object expected, object actual) // // args: // Array of objects to be used in formatting the message + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreNotSame(object expected, object actual, string message, params object[] args) { _NUnit.Assert.AreNotSame(expected, actual, message, args); @@ -566,6 +602,7 @@ public static void AreNotSame(object expected, object actual, string message, pa // // actual: // The actual object + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreNotSame(object expected, object actual) { _NUnit.Assert.AreNotSame(expected, actual); @@ -581,6 +618,7 @@ public static void AreNotSame(object expected, object actual) // // actual: // The actual object + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreSame(object expected, object actual) { _NUnit.Assert.AreSame(expected, actual); @@ -602,11 +640,13 @@ public static void AreSame(object expected, object actual) // // args: // Array of objects to be used in formatting the message + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void AreSame(object expected, object actual, string message, params object[] args) { _NUnit.Assert.AreSame(expected, actual, message, args); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void Fail(string message, params object[] args) { _NUnit.Assert.Fail(message, args); @@ -615,6 +655,7 @@ public static void Fail(string message, params object[] args) // Summary: // Throws an NUnit.Framework.AssertionException. This is used by the other Assert // functions. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void Fail() { _NUnit.Assert.Fail(); @@ -627,6 +668,7 @@ public static void Fail() // Parameters: // message: // The message to initialize the NUnit.Framework.AssertionException with. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void Fail(string message) { _NUnit.Assert.Fail(message); @@ -646,6 +688,7 @@ public static void Fail(string message) // // args: // Array of objects to be used in formatting the message + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void False(bool condition, string message, params object[] args) { if (condition) @@ -659,6 +702,7 @@ public static void False(bool condition, string message, params object[] args) // Parameters: // condition: // The evaluated condition + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void False(bool condition) { if (condition) @@ -673,6 +717,7 @@ public static void False(bool condition) // Parameters: // condition: // The evaluated condition + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void IsFalse(bool condition) { if (condition) @@ -693,6 +738,7 @@ public static void IsFalse(bool condition) // // args: // Array of objects to be used in formatting the message + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void IsFalse(bool condition, string message, params object[] args) { if (condition) @@ -713,6 +759,7 @@ public static void IsFalse(bool condition, string message, params object[] args) // // args: // Array of objects to be used in formatting the message + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void IsNotNull(object anObject, string message, params object[] args) { _NUnit.Assert.IsNotNull(anObject, message, args); @@ -725,6 +772,7 @@ public static void IsNotNull(object anObject, string message, params object[] ar // Parameters: // anObject: // The object that is to be tested + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void IsNotNull(object anObject) { _NUnit.Assert.IsNotNull(anObject); @@ -737,6 +785,7 @@ public static void IsNotNull(object anObject) // Parameters: // anObject: // The object that is to be tested + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void IsNull(object anObject) { _NUnit.Assert.IsNull(anObject); @@ -755,6 +804,7 @@ public static void IsNull(object anObject) // // args: // Array of objects to be used in formatting the message + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void IsNull(object anObject, string message, params object[] args) { _NUnit.Assert.IsNull(anObject, message, args); @@ -774,6 +824,7 @@ public static void IsNull(object anObject, string message, params object[] args) // // args: // Array of objects to be used in formatting the message + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void IsTrue(bool condition, string message, params object[] args) { if (!condition) @@ -788,6 +839,7 @@ public static void IsTrue(bool condition, string message, params object[] args) // Parameters: // condition: // The evaluated condition + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void IsTrue(bool condition) { if (!condition) @@ -802,6 +854,7 @@ public static void IsTrue(bool condition) // Parameters: // anObject: // The object that is to be tested + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void NotNull(object anObject) { if (!(anObject is null)) @@ -821,6 +874,7 @@ public static void NotNull(object anObject) // // args: // Array of objects to be used in formatting the message + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void NotNull(object anObject, string message, params object[] args) { if (anObject is null) @@ -841,6 +895,7 @@ public static void NotNull(object anObject, string message, params object[] args // // args:void Null // Array of objects to be used in formatting the message + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void Null(object anObject, string message, params object[] args) { if (!(anObject is null)) @@ -854,6 +909,7 @@ public static void Null(object anObject, string message, params object[] args) // Parameters: // anObject: // The object that is to be tested + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void Null(object anObject) { if (!(anObject is null)) @@ -874,6 +930,7 @@ public static void Null(object anObject) // // args: // Array of objects to be used in formatting the message + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void True(bool condition, string message, params object[] args) { if (!condition) @@ -888,6 +945,7 @@ public static void True(bool condition, string message, params object[] args) // Parameters: // condition: // The evaluated condition + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void True(bool condition) { if (!condition) @@ -909,6 +967,7 @@ public static void True(bool condition) // // args: // Array of objects to be used in formatting the message + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void IsNotEmpty(string aString, string message, params object[] args) { if (string.Empty.Equals(aString)) @@ -922,6 +981,7 @@ public static void IsNotEmpty(string aString, string message, params object[] ar // Parameters: // anObject: // The object that is to be tested + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void IsNotEmpty(string aString) { if (string.Empty.Equals(aString)) @@ -943,6 +1003,7 @@ public static void IsNotEmpty(string aString) // // args: // Array of objects to be used in formatting the message + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void IsEmpty(string aString, string message, params object[] args) { if (!string.Empty.Equals(aString)) @@ -956,49 +1017,58 @@ public static void IsEmpty(string aString, string message, params object[] args) // Parameters: // anObject: // The object that is to be tested + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void IsEmpty(string aString) { if (!string.Empty.Equals(aString)) _NUnit.Assert.IsEmpty(aString); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void LessOrEqual(int arg1, int arg2) { if (arg1 > arg2) _NUnit.Assert.LessOrEqual(arg1, arg2); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void Greater(int arg1, int arg2) { if (arg1 <= arg2) _NUnit.Assert.Greater(arg1, arg2); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void DoesNotThrow(Action action, string message, params object[] args) { _NUnit.Assert.DoesNotThrow(() => action(), message, args); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void DoesNotThrow(Action action) { _NUnit.Assert.DoesNotThrow(() => action()); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Exception Throws(Action action, string message, params object[] args) { return Throws(typeof(TException), action, message, args); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Exception Throws(Action action) { return Throws(typeof(TException), action); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Exception Throws(Type expectedExceptionType, Action action) { return _NUnit.Assert.Throws(expectedExceptionType, () => action()); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Exception Throws(Type expectedExceptionType, Action action, string message, params object[] args) { return _NUnit.Assert.Throws(expectedExceptionType, () => action(), message, args); @@ -1022,11 +1092,13 @@ public static Exception ThrowsFileAlreadyExistsException(string filePath, Action } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Exception ThrowsAnyOf(Action action) { return ThrowsAnyOf(new Type[] { typeof(TException1), typeof(TException2) }, action); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Exception ThrowsAnyOf(Action action) { return ThrowsAnyOf(new Type[] { typeof(TException1), typeof(TException2), typeof(TException3) }, action); diff --git a/src/Lucene.Net.TestFramework/Analysis/BaseTokenStreamTestCase.cs b/src/Lucene.Net.TestFramework/Analysis/BaseTokenStreamTestCase.cs index 97490d5e7b..b81d9f6eb1 100644 --- a/src/Lucene.Net.TestFramework/Analysis/BaseTokenStreamTestCase.cs +++ b/src/Lucene.Net.TestFramework/Analysis/BaseTokenStreamTestCase.cs @@ -68,7 +68,8 @@ public override void Clear() public override bool Equals(object other) { - return (other is CheckClearAttributesAttribute && ((CheckClearAttributesAttribute)other).clearCalled == this.clearCalled); + return other is CheckClearAttributesAttribute checkClearAttributesAttribute + && checkClearAttributesAttribute.clearCalled == this.clearCalled; } public override int GetHashCode() @@ -420,7 +421,9 @@ public static void AssertTokenStreamContents(TokenStream ts, string[] output, in } } +#pragma warning disable IDE0060 // Remove unused parameter public static void AssertTokenStreamContents(TokenStream ts, string[] output, int[] startOffsets, int[] endOffsets, string[] types, int[] posIncrements, int[] posLengths, int? finalOffset, bool[] keywordAtts, bool offsetsAreCorrect) +#pragma warning restore IDE0060 // Remove unused parameter { AssertTokenStreamContents(ts, output, startOffsets, endOffsets, types, posIncrements, posLengths, finalOffset, null, null, offsetsAreCorrect, null); } @@ -1123,9 +1126,9 @@ private static void CheckAnalysisConsistency(Random random, Analyzer a, bool use } // Throw an errant exception from the Reader: - MockReaderWrapper evilReader = new MockReaderWrapper(random, new StringReader(text)); + using MockReaderWrapper evilReader = new MockReaderWrapper(random, new StringReader(text)); evilReader.ThrowExcAfterChar(random.Next(text.Length)); // LUCENENET note, Next() is exclusive, so we don't need +1 - reader = evilReader; + //reader = evilReader; // LUCENENET: IDE0059: Remove unnecessary value assignment try { @@ -1282,12 +1285,10 @@ protected internal virtual string ToDot(Analyzer a, string inputText) protected internal virtual void ToDotFile(Analyzer a, string inputText, string localFileName) { - using (StreamWriter w = new StreamWriter(new FileStream(localFileName, FileMode.Open), Encoding.UTF8)) - { - TokenStream ts = a.GetTokenStream("field", new StringReader(inputText)); - ts.Reset(); - (new TokenStreamToDot(inputText, ts,/* new PrintWriter(*/w/*)*/)).ToDot(); - } + using StreamWriter w = new StreamWriter(new FileStream(localFileName, FileMode.Open), Encoding.UTF8); + TokenStream ts = a.GetTokenStream("field", new StringReader(inputText)); + ts.Reset(); + (new TokenStreamToDot(inputText, ts,/* new PrintWriter(*/w/*)*/)).ToDot(); } [ExceptionToNetNumericConvention] // LUCENENET: Private API, keeping as-is diff --git a/src/Lucene.Net.TestFramework/Analysis/CollationTestBase.cs b/src/Lucene.Net.TestFramework/Analysis/CollationTestBase.cs index eab43e684a..cabf204bbb 100644 --- a/src/Lucene.Net.TestFramework/Analysis/CollationTestBase.cs +++ b/src/Lucene.Net.TestFramework/Analysis/CollationTestBase.cs @@ -75,95 +75,82 @@ protected virtual string EncodeCollationKey(byte[] keyBits) public virtual void TestFarsiRangeFilterCollating(Analyzer analyzer, BytesRef firstBeg, BytesRef firstEnd, BytesRef secondBeg, BytesRef secondEnd) { - using (Directory dir = NewDirectory()) + using Directory dir = NewDirectory(); + using (IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, analyzer))) { - using (IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, analyzer))) - { - Document doc = new Document(); - doc.Add(new TextField("content", "\u0633\u0627\u0628", Field.Store.YES)); - doc.Add(new StringField("body", "body", Field.Store.YES)); - writer.AddDocument(doc); - } // writer.Dispose(); - using (IndexReader reader = DirectoryReader.Open(dir)) - { - IndexSearcher searcher = new IndexSearcher(reader); - Search.Query query = new TermQuery(new Term("body", "body")); - - // Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi - // orders the U+0698 character before the U+0633 character, so the single - // index Term below should NOT be returned by a TermRangeFilter with a Farsi - // Collator (or an Arabic one for the case when Farsi searcher not - // supported). - ScoreDoc[] result = searcher.Search(query, new TermRangeFilter("content", firstBeg, firstEnd, true, true), 1).ScoreDocs; - Assert.AreEqual(0, result.Length, "The index Term should not be included."); - - result = searcher.Search(query, new TermRangeFilter("content", secondBeg, secondEnd, true, true), 1).ScoreDocs; - Assert.AreEqual(1, result.Length, "The index Term should be included."); - - } // reader.Dispose(); - } // dir.Dispose(); + Document doc = new Document(); + doc.Add(new TextField("content", "\u0633\u0627\u0628", Field.Store.YES)); + doc.Add(new StringField("body", "body", Field.Store.YES)); + writer.AddDocument(doc); + } // writer.Dispose(); + using IndexReader reader = DirectoryReader.Open(dir); + IndexSearcher searcher = new IndexSearcher(reader); + Search.Query query = new TermQuery(new Term("body", "body")); + + // Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi + // orders the U+0698 character before the U+0633 character, so the single + // index Term below should NOT be returned by a TermRangeFilter with a Farsi + // Collator (or an Arabic one for the case when Farsi searcher not + // supported). + ScoreDoc[] result = searcher.Search(query, new TermRangeFilter("content", firstBeg, firstEnd, true, true), 1).ScoreDocs; + Assert.AreEqual(0, result.Length, "The index Term should not be included."); + + result = searcher.Search(query, new TermRangeFilter("content", secondBeg, secondEnd, true, true), 1).ScoreDocs; + Assert.AreEqual(1, result.Length, "The index Term should be included."); } public virtual void TestFarsiRangeQueryCollating(Analyzer analyzer, BytesRef firstBeg, BytesRef firstEnd, BytesRef secondBeg, BytesRef secondEnd) { - using (Directory dir = NewDirectory()) + using Directory dir = NewDirectory(); + using (IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, analyzer))) { - using (IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, analyzer))) - { - Document doc = new Document(); - - // Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi - // orders the U+0698 character before the U+0633 character, so the single - // index Term below should NOT be returned by a TermRangeQuery with a Farsi - // Collator (or an Arabic one for the case when Farsi is not supported). - doc.Add(new TextField("content", "\u0633\u0627\u0628", Field.Store.YES)); - writer.AddDocument(doc); - } // writer.Dispose(); - using (IndexReader reader = DirectoryReader.Open(dir)) - { - IndexSearcher searcher = new IndexSearcher(reader); - - Search.Query query = new TermRangeQuery("content", firstBeg, firstEnd, true, true); - ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs; - Assert.AreEqual(0, hits.Length, "The index Term should not be included."); - - query = new TermRangeQuery("content", secondBeg, secondEnd, true, true); - hits = searcher.Search(query, null, 1000).ScoreDocs; - Assert.AreEqual(1, hits.Length, "The index Term should be included."); - } // reader.Dispose(); - } // dir.Dispose(); + Document doc = new Document(); + + // Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi + // orders the U+0698 character before the U+0633 character, so the single + // index Term below should NOT be returned by a TermRangeQuery with a Farsi + // Collator (or an Arabic one for the case when Farsi is not supported). + doc.Add(new TextField("content", "\u0633\u0627\u0628", Field.Store.YES)); + writer.AddDocument(doc); + } // writer.Dispose(); + using IndexReader reader = DirectoryReader.Open(dir); + IndexSearcher searcher = new IndexSearcher(reader); + + Search.Query query = new TermRangeQuery("content", firstBeg, firstEnd, true, true); + ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs; + Assert.AreEqual(0, hits.Length, "The index Term should not be included."); + + query = new TermRangeQuery("content", secondBeg, secondEnd, true, true); + hits = searcher.Search(query, null, 1000).ScoreDocs; + Assert.AreEqual(1, hits.Length, "The index Term should be included."); } public virtual void TestFarsiTermRangeQuery(Analyzer analyzer, BytesRef firstBeg, BytesRef firstEnd, BytesRef secondBeg, BytesRef secondEnd) { - using (Directory farsiIndex = NewDirectory()) + using Directory farsiIndex = NewDirectory(); + using (IndexWriter writer = new IndexWriter(farsiIndex, new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, analyzer))) { - using (IndexWriter writer = new IndexWriter(farsiIndex, new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, analyzer))) - { - Document doc = new Document(); - doc.Add(new TextField("content", "\u0633\u0627\u0628", Field.Store.YES)); - doc.Add(new StringField("body", "body", Field.Store.YES)); - writer.AddDocument(doc); - } // writer.Dispose(); - - using (IndexReader reader = DirectoryReader.Open(farsiIndex)) - { - IndexSearcher search = NewSearcher(reader); - - // Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi - // orders the U+0698 character before the U+0633 character, so the single - // index Term below should NOT be returned by a TermRangeQuery - // with a Farsi Collator (or an Arabic one for the case when Farsi is - // not supported). - Search.Query csrq = new TermRangeQuery("content", firstBeg, firstEnd, true, true); - ScoreDoc[] result = search.Search(csrq, null, 1000).ScoreDocs; - Assert.AreEqual(0, result.Length, "The index Term should not be included."); - - csrq = new TermRangeQuery("content", secondBeg, secondEnd, true, true); - result = search.Search(csrq, null, 1000).ScoreDocs; - Assert.AreEqual(1, result.Length, "The index Term should be included."); - } // reader.Dispose(); - } // farsiIndex.Dispose(); + Document doc = new Document(); + doc.Add(new TextField("content", "\u0633\u0627\u0628", Field.Store.YES)); + doc.Add(new StringField("body", "body", Field.Store.YES)); + writer.AddDocument(doc); + } // writer.Dispose(); + + using IndexReader reader = DirectoryReader.Open(farsiIndex); + IndexSearcher search = NewSearcher(reader); + + // Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi + // orders the U+0698 character before the U+0633 character, so the single + // index Term below should NOT be returned by a TermRangeQuery + // with a Farsi Collator (or an Arabic one for the case when Farsi is + // not supported). + Search.Query csrq = new TermRangeQuery("content", firstBeg, firstEnd, true, true); + ScoreDoc[] result = search.Search(csrq, null, 1000).ScoreDocs; + Assert.AreEqual(0, result.Length, "The index Term should not be included."); + + csrq = new TermRangeQuery("content", secondBeg, secondEnd, true, true); + result = search.Search(csrq, null, 1000).ScoreDocs; + Assert.AreEqual(1, result.Length, "The index Term should be included."); } /// @@ -185,63 +172,59 @@ public virtual void TestCollationKeySort(Analyzer usAnalyzer, string svResult, string dkResult) { - using (Directory indexStore = NewDirectory()) + using Directory indexStore = NewDirectory(); + using (IndexWriter writer = new IndexWriter(indexStore, new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)))) { - using (IndexWriter writer = new IndexWriter(indexStore, new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)))) - { - // document data: - // the tracer field is used to determine which document was hit - string[][] sortData = new string[][] { new string[] { "A", "x", "p\u00EAche", "p\u00EAche", "p\u00EAche", "p\u00EAche" }, new string[] { "B", "y", "HAT", "HAT", "HAT", "HAT" }, new string[] { "C", "x", "p\u00E9ch\u00E9", "p\u00E9ch\u00E9", "p\u00E9ch\u00E9", "p\u00E9ch\u00E9" }, new string[] { "D", "y", "HUT", "HUT", "HUT", "HUT" }, new string[] { "E", "x", "peach", "peach", "peach", "peach" }, new string[] { "F", "y", "H\u00C5T", "H\u00C5T", "H\u00C5T", "H\u00C5T" }, new string[] { "G", "x", "sin", "sin", "sin", "sin" }, new string[] { "H", "y", "H\u00D8T", "H\u00D8T", "H\u00D8T", "H\u00D8T" }, new string[] { "I", "x", "s\u00EDn", "s\u00EDn", "s\u00EDn", "s\u00EDn" }, new string[] { "J", "y", "HOT", "HOT", "HOT", "HOT" } }; + // document data: + // the tracer field is used to determine which document was hit + string[][] sortData = new string[][] { new string[] { "A", "x", "p\u00EAche", "p\u00EAche", "p\u00EAche", "p\u00EAche" }, new string[] { "B", "y", "HAT", "HAT", "HAT", "HAT" }, new string[] { "C", "x", "p\u00E9ch\u00E9", "p\u00E9ch\u00E9", "p\u00E9ch\u00E9", "p\u00E9ch\u00E9" }, new string[] { "D", "y", "HUT", "HUT", "HUT", "HUT" }, new string[] { "E", "x", "peach", "peach", "peach", "peach" }, new string[] { "F", "y", "H\u00C5T", "H\u00C5T", "H\u00C5T", "H\u00C5T" }, new string[] { "G", "x", "sin", "sin", "sin", "sin" }, new string[] { "H", "y", "H\u00D8T", "H\u00D8T", "H\u00D8T", "H\u00D8T" }, new string[] { "I", "x", "s\u00EDn", "s\u00EDn", "s\u00EDn", "s\u00EDn" }, new string[] { "J", "y", "HOT", "HOT", "HOT", "HOT" } }; - FieldType customType = new FieldType(); - customType.IsStored = true; + FieldType customType = new FieldType(); + customType.IsStored = true; - for (int i = 0; i < sortData.Length; ++i) + for (int i = 0; i < sortData.Length; ++i) + { + Document doc = new Document(); + doc.Add(new Field("tracer", sortData[i][0], customType)); + doc.Add(new TextField("contents", sortData[i][1], Field.Store.NO)); + if (sortData[i][2] != null) { - Document doc = new Document(); - doc.Add(new Field("tracer", sortData[i][0], customType)); - doc.Add(new TextField("contents", sortData[i][1], Field.Store.NO)); - if (sortData[i][2] != null) - { - doc.Add(new TextField("US", usAnalyzer.GetTokenStream("US", new StringReader(sortData[i][2])))); - } - if (sortData[i][3] != null) - { - doc.Add(new TextField("France", franceAnalyzer.GetTokenStream("France", new StringReader(sortData[i][3])))); - } - if (sortData[i][4] != null) - { - doc.Add(new TextField("Sweden", swedenAnalyzer.GetTokenStream("Sweden", new StringReader(sortData[i][4])))); - } - if (sortData[i][5] != null) - { - doc.Add(new TextField("Denmark", denmarkAnalyzer.GetTokenStream("Denmark", new StringReader(sortData[i][5])))); - } - writer.AddDocument(doc); + doc.Add(new TextField("US", usAnalyzer.GetTokenStream("US", new StringReader(sortData[i][2])))); } - writer.ForceMerge(1); - } // writer.Dispose(); - using (IndexReader reader = DirectoryReader.Open(indexStore)) - { - IndexSearcher searcher = new IndexSearcher(reader); + if (sortData[i][3] != null) + { + doc.Add(new TextField("France", franceAnalyzer.GetTokenStream("France", new StringReader(sortData[i][3])))); + } + if (sortData[i][4] != null) + { + doc.Add(new TextField("Sweden", swedenAnalyzer.GetTokenStream("Sweden", new StringReader(sortData[i][4])))); + } + if (sortData[i][5] != null) + { + doc.Add(new TextField("Denmark", denmarkAnalyzer.GetTokenStream("Denmark", new StringReader(sortData[i][5])))); + } + writer.AddDocument(doc); + } + writer.ForceMerge(1); + } // writer.Dispose(); + using IndexReader reader = DirectoryReader.Open(indexStore); + IndexSearcher searcher = new IndexSearcher(reader); - Sort sort = new Sort(); - Search.Query queryX = new TermQuery(new Term("contents", "x")); - Search.Query queryY = new TermQuery(new Term("contents", "y")); + Sort sort = new Sort(); + Search.Query queryX = new TermQuery(new Term("contents", "x")); + Search.Query queryY = new TermQuery(new Term("contents", "y")); - sort.SetSort(new SortField("US", SortFieldType.STRING)); - this.AssertMatches(searcher, queryY, sort, usResult); + sort.SetSort(new SortField("US", SortFieldType.STRING)); + this.AssertMatches(searcher, queryY, sort, usResult); - sort.SetSort(new SortField("France", SortFieldType.STRING)); - this.AssertMatches(searcher, queryX, sort, frResult); + sort.SetSort(new SortField("France", SortFieldType.STRING)); + this.AssertMatches(searcher, queryX, sort, frResult); - sort.SetSort(new SortField("Sweden", SortFieldType.STRING)); - this.AssertMatches(searcher, queryY, sort, svResult); + sort.SetSort(new SortField("Sweden", SortFieldType.STRING)); + this.AssertMatches(searcher, queryY, sort, svResult); - sort.SetSort(new SortField("Denmark", SortFieldType.STRING)); - this.AssertMatches(searcher, queryY, sort, dkResult); - } // reader.Dispose(); - } // indexStore.Dispose(); + sort.SetSort(new SortField("Denmark", SortFieldType.STRING)); + this.AssertMatches(searcher, queryY, sort, dkResult); } /// @@ -305,7 +288,7 @@ public virtual void AssertThreadSafe(Analyzer analyzer) ThreadJob[] threads = new ThreadJob[numThreads]; for (int i = 0; i < numThreads; i++) { - threads[i] = new ThreadAnonymousInnerClassHelper(this, analyzer, map); + threads[i] = new ThreadAnonymousInnerClassHelper(analyzer, map); } for (int i = 0; i < numThreads; i++) { @@ -319,14 +302,11 @@ public virtual void AssertThreadSafe(Analyzer analyzer) private class ThreadAnonymousInnerClassHelper : ThreadJob { - private readonly CollationTestBase outerInstance; - - private Analyzer analyzer; - private IDictionary map; + private readonly Analyzer analyzer; + private readonly IDictionary map; - public ThreadAnonymousInnerClassHelper(CollationTestBase outerInstance, Analyzer analyzer, IDictionary map) + public ThreadAnonymousInnerClassHelper(Analyzer analyzer, IDictionary map) { - this.outerInstance = outerInstance; this.analyzer = analyzer; this.map = map; } diff --git a/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs b/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs index 54afdf3398..c2ab2a8ece 100644 --- a/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs +++ b/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs @@ -162,7 +162,7 @@ protected virtual void AfterPosition() private class RollingBufferAnonymousInnerClassHelper : RollingBuffer { - private LookaheadTokenFilter outerInstance; + private readonly LookaheadTokenFilter outerInstance; public RollingBufferAnonymousInnerClassHelper(LookaheadTokenFilter outerInstance) : base(outerInstance.NewPosition) diff --git a/src/Lucene.Net.TestFramework/Analysis/MockAnalyzer.cs b/src/Lucene.Net.TestFramework/Analysis/MockAnalyzer.cs index e5f5214937..a172ee029e 100644 --- a/src/Lucene.Net.TestFramework/Analysis/MockAnalyzer.cs +++ b/src/Lucene.Net.TestFramework/Analysis/MockAnalyzer.cs @@ -56,7 +56,7 @@ public sealed class MockAnalyzer : Analyzer private int positionIncrementGap; private int? offsetGap; private readonly Random random; - private IDictionary previousMappings = new Dictionary(); + private readonly IDictionary previousMappings = new Dictionary(); // LUCENENET: marked readonly private bool enableChecks = true; private int maxTokenLength = MockTokenizer.DEFAULT_MAX_TOKEN_LENGTH; @@ -107,8 +107,7 @@ private TokenFilter MaybePayload(TokenFilter stream, string fieldName) { lock (this) { - int? val; - previousMappings.TryGetValue(fieldName, out val); + previousMappings.TryGetValue(fieldName, out int? val); if (val == null) { val = -1; // no payloads diff --git a/src/Lucene.Net.TestFramework/Analysis/MockGraphTokenFilter.cs b/src/Lucene.Net.TestFramework/Analysis/MockGraphTokenFilter.cs index 1dd914c2c4..dce9b04cb3 100644 --- a/src/Lucene.Net.TestFramework/Analysis/MockGraphTokenFilter.cs +++ b/src/Lucene.Net.TestFramework/Analysis/MockGraphTokenFilter.cs @@ -33,7 +33,9 @@ namespace Lucene.Net.Analysis public sealed class MockGraphTokenFilter : LookaheadTokenFilter { - new private static bool DEBUG = false; +#pragma warning disable CA1802 // Use literals where appropriate + new private static readonly bool DEBUG = false; +#pragma warning restore CA1802 // Use literals where appropriate private readonly ICharTermAttribute termAtt; diff --git a/src/Lucene.Net.TestFramework/Analysis/MockTokenizer.cs b/src/Lucene.Net.TestFramework/Analysis/MockTokenizer.cs index 278b3dffac..05eda512ba 100644 --- a/src/Lucene.Net.TestFramework/Analysis/MockTokenizer.cs +++ b/src/Lucene.Net.TestFramework/Analysis/MockTokenizer.cs @@ -135,7 +135,7 @@ public MockTokenizer(AttributeFactory factory, TextReader input, CharacterRunAut /// Calls MockTokenizer(AttributeFactory, TextReader, WHITESPACE, true) /// public MockTokenizer(AttributeFactory factory, TextReader input) - : this(input, WHITESPACE, true) + : this(factory, input, WHITESPACE, true) // LUCENENET specific - added missing factory parameter, as it is clearly a bug { } public sealed override bool IncrementToken() diff --git a/src/Lucene.Net.TestFramework/Analysis/ValidatingTokenFilter.cs b/src/Lucene.Net.TestFramework/Analysis/ValidatingTokenFilter.cs index 238c317eb5..18f3ddef29 100644 --- a/src/Lucene.Net.TestFramework/Analysis/ValidatingTokenFilter.cs +++ b/src/Lucene.Net.TestFramework/Analysis/ValidatingTokenFilter.cs @@ -44,10 +44,10 @@ public sealed class ValidatingTokenFilter : TokenFilter private readonly IDictionary posToEndOffset = new Dictionary(); - private PositionIncrementAttribute posIncAtt; - private PositionLengthAttribute posLenAtt; - private OffsetAttribute offsetAtt; - private CharTermAttribute termAtt; + private readonly PositionIncrementAttribute posIncAtt; + private readonly PositionLengthAttribute posLenAtt; + private readonly OffsetAttribute offsetAtt; + private readonly CharTermAttribute termAtt; private readonly bool offsetsAreCorrect; private readonly string name; @@ -90,7 +90,7 @@ public override bool IncrementToken() int startOffset = 0; int endOffset = 0; - int posLen = 0; + int posLen; // LUCENENET: IDE0059: Remove unnecessary value assignment if (posIncAtt != null) { diff --git a/src/Lucene.Net.TestFramework/Analysis/VocabularyAssert.cs b/src/Lucene.Net.TestFramework/Analysis/VocabularyAssert.cs index 5ffc59c2f9..3254a195e7 100644 --- a/src/Lucene.Net.TestFramework/Analysis/VocabularyAssert.cs +++ b/src/Lucene.Net.TestFramework/Analysis/VocabularyAssert.cs @@ -31,16 +31,14 @@ public static class VocabularyAssert // LUCENENET specific - made static because /// Run a vocabulary test against two data files. public static void AssertVocabulary(Analyzer a, Stream voc, Stream @out) { - using (TextReader vocReader = new StreamReader(voc, Encoding.UTF8)) - using (TextReader outputReader = new StreamReader(@out, Encoding.UTF8)) + using TextReader vocReader = new StreamReader(voc, Encoding.UTF8); + using TextReader outputReader = new StreamReader(@out, Encoding.UTF8); + string inputWord = null; + while ((inputWord = vocReader.ReadLine()) != null) { - string inputWord = null; - while ((inputWord = vocReader.ReadLine()) != null) - { - string expectedWord = outputReader.ReadLine(); - Assert.IsNotNull(expectedWord); - BaseTokenStreamTestCase.CheckOneTerm(a, inputWord, expectedWord); - } + string expectedWord = outputReader.ReadLine(); + Assert.IsNotNull(expectedWord); + BaseTokenStreamTestCase.CheckOneTerm(a, inputWord, expectedWord); } } @@ -48,18 +46,16 @@ public static void AssertVocabulary(Analyzer a, Stream voc, Stream @out) /// Run a vocabulary test against one file: tab separated. public static void AssertVocabulary(Analyzer a, Stream vocOut) { - using (TextReader vocReader = new StreamReader(vocOut, Encoding.UTF8)) + using TextReader vocReader = new StreamReader(vocOut, Encoding.UTF8); + string inputLine = null; + while ((inputLine = vocReader.ReadLine()) != null) { - string inputLine = null; - while ((inputLine = vocReader.ReadLine()) != null) + if (inputLine.StartsWith("#", System.StringComparison.Ordinal) || inputLine.Trim().Length == 0) { - if (inputLine.StartsWith("#", System.StringComparison.Ordinal) || inputLine.Trim().Length == 0) - { - continue; // comment - } - string[] words = inputLine.Split('\t').TrimEnd(); - BaseTokenStreamTestCase.CheckOneTerm(a, words[0], words[1]); + continue; // comment } + string[] words = inputLine.Split('\t').TrimEnd(); + BaseTokenStreamTestCase.CheckOneTerm(a, words[0], words[1]); } } @@ -67,23 +63,19 @@ public static void AssertVocabulary(Analyzer a, Stream vocOut) /// Run a vocabulary test against two data files inside a zip file. public static void AssertVocabulary(Analyzer a, Stream zipFile, string voc, string @out) { - using (ZipArchive zip = new ZipArchive(zipFile, ZipArchiveMode.Read, false, Encoding.UTF8)) - using (Stream v = zip.GetEntry(voc).Open()) - using (Stream o = zip.GetEntry(@out).Open()) - { - AssertVocabulary(a, v, o); - } + using ZipArchive zip = new ZipArchive(zipFile, ZipArchiveMode.Read, false, Encoding.UTF8); + using Stream v = zip.GetEntry(voc).Open(); + using Stream o = zip.GetEntry(@out).Open(); + AssertVocabulary(a, v, o); } /// /// Run a vocabulary test against a tab-separated data file inside a zip file. public static void AssertVocabulary(Analyzer a, Stream zipFile, string vocOut) { - using (ZipArchive zip = new ZipArchive(zipFile, ZipArchiveMode.Read, false, Encoding.UTF8)) - using (Stream vo = zip.GetEntry(vocOut).Open()) - { - AssertVocabulary(a, vo); - } + using ZipArchive zip = new ZipArchive(zipFile, ZipArchiveMode.Read, false, Encoding.UTF8); + using Stream vo = zip.GetEntry(vocOut).Open(); + AssertVocabulary(a, vo); } } } \ No newline at end of file diff --git a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingDocValuesFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingDocValuesFormat.cs index e8dc0be1f8..625865f41f 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingDocValuesFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingDocValuesFormat.cs @@ -146,38 +146,36 @@ public override void AddSortedSetField(FieldInfo field, IEnumerable va int docCount = 0; long ordCount = 0; Int64BitSet seenOrds = new Int64BitSet(valueCount); - using (IEnumerator ordIterator = ords.GetEnumerator()) + using IEnumerator ordIterator = ords.GetEnumerator(); + foreach (long? v in docToOrdCount) { - foreach (long? v in docToOrdCount) + if (Debugging.AssertsEnabled) Debugging.Assert(v != null); + int count = (int)v.Value; + if (Debugging.AssertsEnabled) Debugging.Assert(count >= 0); + docCount++; + ordCount += count; + + long lastOrd = -1; + for (int i = 0; i < count; i++) { - if (Debugging.AssertsEnabled) Debugging.Assert(v != null); - int count = (int)v.Value; - if (Debugging.AssertsEnabled) Debugging.Assert(count >= 0); - docCount++; - ordCount += count; - - long lastOrd = -1; - for (int i = 0; i < count; i++) - { - ordIterator.MoveNext(); - long? o = ordIterator.Current; - if (Debugging.AssertsEnabled) Debugging.Assert(o != null); - long ord = o.Value; - if (Debugging.AssertsEnabled) Debugging.Assert(ord >= 0 && ord < valueCount); - if (Debugging.AssertsEnabled) Debugging.Assert(ord > lastOrd,"ord={0},lastOrd={1}", ord, lastOrd); - seenOrds.Set(ord); - lastOrd = ord; - } + ordIterator.MoveNext(); + long? o = ordIterator.Current; + if (Debugging.AssertsEnabled) Debugging.Assert(o != null); + long ord = o.Value; + if (Debugging.AssertsEnabled) Debugging.Assert(ord >= 0 && ord < valueCount); + if (Debugging.AssertsEnabled) Debugging.Assert(ord > lastOrd, "ord={0},lastOrd={1}", ord, lastOrd); + seenOrds.Set(ord); + lastOrd = ord; } - if (Debugging.AssertsEnabled) Debugging.Assert(ordIterator.MoveNext() == false); - - if (Debugging.AssertsEnabled) Debugging.Assert(docCount == maxDoc); - if (Debugging.AssertsEnabled) Debugging.Assert(seenOrds.Cardinality() == valueCount); - CheckIterator(values.GetEnumerator(), valueCount, false); - CheckIterator(docToOrdCount.GetEnumerator(), maxDoc, false); - CheckIterator(ords.GetEnumerator(), ordCount, false); - @in.AddSortedSetField(field, values, docToOrdCount, ords); } + if (Debugging.AssertsEnabled) Debugging.Assert(ordIterator.MoveNext() == false); + + if (Debugging.AssertsEnabled) Debugging.Assert(docCount == maxDoc); + if (Debugging.AssertsEnabled) Debugging.Assert(seenOrds.Cardinality() == valueCount); + CheckIterator(values.GetEnumerator(), valueCount, false); + CheckIterator(docToOrdCount.GetEnumerator(), maxDoc, false); + CheckIterator(ords.GetEnumerator(), ordCount, false); + @in.AddSortedSetField(field, values, docToOrdCount, ords); } protected override void Dispose(bool disposing) diff --git a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingPostingsFormat.cs index d211c7bbcc..92aa1252d8 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingPostingsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingPostingsFormat.cs @@ -134,7 +134,7 @@ internal class AssertingTermsConsumer : TermsConsumer private AssertingPostingsConsumer lastPostingsConsumer = null; private long sumTotalTermFreq = 0; private long sumDocFreq = 0; - private OpenBitSet visitedDocs = new OpenBitSet(); + private readonly OpenBitSet visitedDocs = new OpenBitSet(); // LUCENENET: marked readonly internal AssertingTermsConsumer(TermsConsumer @in, FieldInfo fieldInfo) { diff --git a/src/Lucene.Net.TestFramework/Codecs/Bloom/TestBloomFilteredLucene41Postings.cs b/src/Lucene.Net.TestFramework/Codecs/Bloom/TestBloomFilteredLucene41Postings.cs index a69d6b2bbb..ea078007d9 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Bloom/TestBloomFilteredLucene41Postings.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Bloom/TestBloomFilteredLucene41Postings.cs @@ -30,7 +30,7 @@ namespace Lucene.Net.Codecs.Bloom [PostingsFormatName("TestBloomFilteredLucene41Postings")] // LUCENENET specific - using PostingsFormatName attribute to ensure the default name passed from subclasses is the same as this class name public class TestBloomFilteredLucene41Postings : PostingsFormat { - private BloomFilteringPostingsFormat @delegate; + private readonly BloomFilteringPostingsFormat @delegate; // Special class used to avoid OOM exceptions where Junit tests create many // fields. diff --git a/src/Lucene.Net.TestFramework/Codecs/Compressing/CompressingCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Compressing/CompressingCodec.cs index d807cb0d61..48ae592e06 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Compressing/CompressingCodec.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Compressing/CompressingCodec.cs @@ -75,7 +75,7 @@ public static CompressingCodec RandomInstance(Random random, bool withSegmentSuf /// /// Creates a compressing codec with a given . /// - public CompressingCodec(string segmentSuffix, CompressionMode compressionMode, int chunkSize) + protected CompressingCodec(string segmentSuffix, CompressionMode compressionMode, int chunkSize) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : base(new Lucene46Codec()) { this.storedFieldsFormat = new CompressingStoredFieldsFormat(this.Name, segmentSuffix, compressionMode, chunkSize); @@ -85,7 +85,7 @@ public CompressingCodec(string segmentSuffix, CompressionMode compressionMode, i /// /// Creates a compressing codec with an empty segment suffix. /// - public CompressingCodec(CompressionMode compressionMode, int chunkSize) + protected CompressingCodec(CompressionMode compressionMode, int chunkSize) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : this("", compressionMode, chunkSize) { } diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosReader.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosReader.cs index 5f0ea4440d..673388d984 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosReader.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosReader.cs @@ -123,9 +123,9 @@ public override FieldInfos Read(Directory directory, string segmentName, string } } - public static void Files(Directory dir, SegmentInfo info, ISet files) - { - files.Add(IndexFileNames.SegmentFileName(info.Name, "", PreFlexRWFieldInfosWriter.FIELD_INFOS_EXTENSION)); - } + //public static void Files(Directory dir, SegmentInfo info, ISet files) // LUCENENET: Not used + //{ + // files.Add(IndexFileNames.SegmentFileName(info.Name, "", PreFlexRWFieldInfosWriter.FIELD_INFOS_EXTENSION)); + //} } } \ No newline at end of file diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldsWriter.cs index 60d967726d..b151b323c2 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldsWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldsWriter.cs @@ -27,9 +27,11 @@ namespace Lucene.Net.Codecs.Lucene3x #pragma warning disable 612, 618 internal class PreFlexRWFieldsWriter : FieldsConsumer { +#pragma warning disable CA2213 // Disposable fields should be disposed private readonly TermInfosWriter termsOut; private readonly IndexOutput freqOut; private readonly IndexOutput proxOut; +#pragma warning restore CA2213 // Disposable fields should be disposed private readonly PreFlexRWSkipListWriter skipListWriter; private readonly int totalNumDocs; @@ -101,11 +103,6 @@ protected override void Dispose(bool disposing) private class PreFlexTermsWriter : TermsConsumer { - internal virtual void InitializeInstanceFields() - { - postingsWriter = new PostingsWriter(this); - } - private readonly PreFlexRWFieldsWriter outerInstance; private readonly FieldInfo fieldInfo; @@ -113,13 +110,13 @@ internal virtual void InitializeInstanceFields() private readonly bool storePayloads; private readonly TermInfo termInfo = new TermInfo(); - private PostingsWriter postingsWriter; + private readonly PostingsWriter postingsWriter; // LUCENENET: marked readonly public PreFlexTermsWriter(PreFlexRWFieldsWriter outerInstance, FieldInfo fieldInfo) { this.outerInstance = outerInstance; - InitializeInstanceFields(); + postingsWriter = new PostingsWriter(this); this.fieldInfo = fieldInfo; omitTF = fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY; storePayloads = fieldInfo.HasPayloads; diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs index 293d408a87..b70ab927d6 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs @@ -40,12 +40,14 @@ internal class PreFlexRWNormsConsumer : DocValuesConsumer /// Extension of norms file private const string NORMS_EXTENSION = "nrm"; - /// - /// Extension of separate norms file - [Obsolete("Only for reading existing 3.x indexes")] - private const string SEPARATE_NORMS_EXTENSION = "s"; + ///// + ///// Extension of separate norms file + //[Obsolete("Only for reading existing 3.x indexes")] + //private const string SEPARATE_NORMS_EXTENSION = "s"; // LUCENENET: IDE0051: Remove unused private member +#pragma warning disable CA2213 // Disposable fields should be disposed private readonly IndexOutput @out; +#pragma warning restore CA2213 // Disposable fields should be disposed private int lastFieldNumber = -1; // only for assert public PreFlexRWNormsConsumer(Directory directory, string segment, IOContext context) diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWPostingsFormat.cs index 126b86e5f5..26bce3910d 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWPostingsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWPostingsFormat.cs @@ -46,17 +46,14 @@ public override FieldsProducer FieldsProducer(SegmentReadState state) // Whenever IW opens readers, eg for merging, we have to // keep terms order in UTF16: - return new Lucene3xFieldsAnonymousInnerClassHelper(this, state.Directory, state.FieldInfos, state.SegmentInfo, state.Context, state.TermsIndexDivisor); + return new Lucene3xFieldsAnonymousInnerClassHelper(state.Directory, state.FieldInfos, state.SegmentInfo, state.Context, state.TermsIndexDivisor); } private class Lucene3xFieldsAnonymousInnerClassHelper : Lucene3xFields { - private readonly PreFlexRWPostingsFormat outerInstance; - - public Lucene3xFieldsAnonymousInnerClassHelper(PreFlexRWPostingsFormat outerInstance, Store.Directory directory, Index.FieldInfos fieldInfos, Index.SegmentInfo segmentInfo, Store.IOContext context, int termsIndexDivisor) + public Lucene3xFieldsAnonymousInnerClassHelper(Store.Directory directory, FieldInfos fieldInfos, SegmentInfo segmentInfo, Store.IOContext context, int termsIndexDivisor) : base(directory, fieldInfos, segmentInfo, context, termsIndexDivisor) { - this.outerInstance = outerInstance; } protected override bool SortTermsByUnicode diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWSkipListWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWSkipListWriter.cs index 0e93f882fc..45b0ff7593 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWSkipListWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWSkipListWriter.cs @@ -27,13 +27,13 @@ namespace Lucene.Net.Codecs.Lucene3x /// public class PreFlexRWSkipListWriter : MultiLevelSkipListWriter { - private int[] lastSkipDoc; - private int[] lastSkipPayloadLength; - private long[] lastSkipFreqPointer; - private long[] lastSkipProxPointer; + private readonly int[] lastSkipDoc; // LUCENENET: marked readonly + private readonly int[] lastSkipPayloadLength; // LUCENENET: marked readonly + private readonly long[] lastSkipFreqPointer; // LUCENENET: marked readonly + private readonly long[] lastSkipProxPointer; // LUCENENET: marked readonly - private IndexOutput freqOutput; - private IndexOutput proxOutput; + private readonly IndexOutput freqOutput; // LUCENENET: marked readonly + private readonly IndexOutput proxOutput; // LUCENENET: marked readonly private int curDoc; private bool curStorePayloads; diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs index 03f3a3d27f..608acc8918 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs @@ -31,8 +31,10 @@ internal sealed class PreFlexRWStoredFieldsWriter : StoredFieldsWriter { private readonly Directory directory; private readonly string segment; +#pragma warning disable CA2213 // Disposable fields should be disposed private IndexOutput fieldsStream; private IndexOutput indexStream; +#pragma warning restore CA2213 // Disposable fields should be disposed public PreFlexRWStoredFieldsWriter(Directory directory, string segment, IOContext context) { @@ -91,10 +93,9 @@ public override void Abort() { Dispose(); } -#pragma warning disable 168 - catch (Exception ignored) -#pragma warning restore 168 + catch (Exception) { + // ignored } IOUtils.DeleteFilesIgnoringExceptions(directory, IndexFileNames.SegmentFileName(segment, "", Lucene3xStoredFieldsReader.FIELDS_EXTENSION), diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsFormat.cs index bcc5ca310d..82ed18a3db 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsFormat.cs @@ -32,17 +32,14 @@ public override TermVectorsWriter VectorsWriter(Directory directory, SegmentInfo public override TermVectorsReader VectorsReader(Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos, IOContext context) { - return new Lucene3xTermVectorsReaderAnonymousInnerClassHelper(this, directory, segmentInfo, fieldInfos, context); + return new Lucene3xTermVectorsReaderAnonymousInnerClassHelper(directory, segmentInfo, fieldInfos, context); } private class Lucene3xTermVectorsReaderAnonymousInnerClassHelper : Lucene3xTermVectorsReader { - private readonly PreFlexRWTermVectorsFormat outerInstance; - - public Lucene3xTermVectorsReaderAnonymousInnerClassHelper(PreFlexRWTermVectorsFormat outerInstance, Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos, IOContext context) + public Lucene3xTermVectorsReaderAnonymousInnerClassHelper(Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos, IOContext context) : base(directory, segmentInfo, fieldInfos, context) { - this.outerInstance = outerInstance; } protected internal override bool SortTermsByUnicode() diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsWriter.cs index e020deaff5..0b53109946 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsWriter.cs @@ -30,7 +30,9 @@ internal sealed class PreFlexRWTermVectorsWriter : TermVectorsWriter { private readonly Directory directory; private readonly string segment; +#pragma warning disable CA2213 // Disposable fields should be disposed private IndexOutput tvx = null, tvd = null, tvf = null; +#pragma warning restore CA2213 // Disposable fields should be disposed public PreFlexRWTermVectorsWriter(Directory directory, string segment, IOContext context) { @@ -192,9 +194,9 @@ public override void Abort() { Dispose(); } -#pragma warning disable 168 +#pragma warning disable 168, IDE0059 catch (Exception ignored) -#pragma warning restore 168 +#pragma warning restore 168, IDE0059 { } IOUtils.DeleteFilesIgnoringExceptions(directory, IndexFileNames.SegmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_INDEX_EXTENSION), IndexFileNames.SegmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_DOCUMENTS_EXTENSION), IndexFileNames.SegmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_FIELDS_EXTENSION)); @@ -217,10 +219,13 @@ public override void Finish(FieldInfos fis, int numDocs) /// Close all streams. protected override void Dispose(bool disposing) { - // make an effort to close all streams we can but remember and re-throw - // the first exception encountered in this process - IOUtils.Dispose(tvx, tvd, tvf); - tvx = tvd = tvf = null; + if (disposing) + { + // make an effort to close all streams we can but remember and re-throw + // the first exception encountered in this process + IOUtils.Dispose(tvx, tvd, tvf); + tvx = tvd = tvf = null; + } } public override IComparer Comparer => BytesRef.UTF8SortedAsUTF16Comparer; diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/TermInfosWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/TermInfosWriter.cs index 3904ee9e81..f5bbc1f9a9 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/TermInfosWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/TermInfosWriter.cs @@ -46,7 +46,7 @@ internal sealed class TermInfosWriter : IDisposable private FieldInfos fieldInfos; private IndexOutput output; - private TermInfo lastTi = new TermInfo(); + private readonly TermInfo lastTi = new TermInfo(); // LUCENENET: marked readonly private long size; // TODO: the default values for these two parameters should be settable from @@ -107,9 +107,9 @@ internal TermInfosWriter(Directory directory, string segment, FieldInfos fis, in { directory.DeleteFile(IndexFileNames.SegmentFileName(segment, "", (isIndex ? Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION : Lucene3xPostingsFormat.TERMS_EXTENSION))); } -#pragma warning disable 168 +#pragma warning disable 168, IDE0059 catch (IOException ignored) -#pragma warning restore 168 +#pragma warning restore 168, IDE0059 { } } @@ -148,9 +148,9 @@ private void Initialize(Directory directory, string segment, FieldInfos fis, int { directory.DeleteFile(IndexFileNames.SegmentFileName(segment, "", (isIndex ? Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION : Lucene3xPostingsFormat.TERMS_EXTENSION))); } -#pragma warning disable 168 +#pragma warning disable 168, IDE0059 catch (IOException ignored) -#pragma warning restore 168 +#pragma warning restore 168, IDE0059 { } } diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40DocValuesWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40DocValuesWriter.cs index ed4709de34..0640c13092 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40DocValuesWriter.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40DocValuesWriter.cs @@ -431,7 +431,7 @@ private void AddVarDerefBytesField(FieldInfo field, IndexOutput data, IndexOutpu foreach (BytesRef v in values) { - w.Add(valueToAddress[v == null ? new BytesRef() : v]); + w.Add(valueToAddress[v ?? new BytesRef()]); } w.Finish(); } diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42DocValuesConsumer.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42DocValuesConsumer.cs index fde6cbfa9d..a30c4c4382 100644 --- a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42DocValuesConsumer.cs +++ b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42DocValuesConsumer.cs @@ -40,7 +40,9 @@ namespace Lucene.Net.Codecs.Lucene42 #pragma warning disable 612, 618 internal class Lucene42DocValuesConsumer : DocValuesConsumer { +#pragma warning disable CA2213 // Disposable fields should be disposed internal readonly IndexOutput data, meta; +#pragma warning restore CA2213 // Disposable fields should be disposed internal readonly int maxDoc; internal readonly float acceptableOverheadRatio; @@ -343,7 +345,7 @@ public override void AddSortedField(FieldInfo field, IEnumerable value public override void AddSortedSetField(FieldInfo field, IEnumerable values, IEnumerable docToOrdCount, IEnumerable ords) { // write the ordinals as a binary field - AddBinaryField(field, new IterableAnonymousInnerClassHelper(this, docToOrdCount, ords)); + AddBinaryField(field, new IterableAnonymousInnerClassHelper(docToOrdCount, ords)); // write the values as FST WriteFST(field, values); @@ -351,14 +353,11 @@ public override void AddSortedSetField(FieldInfo field, IEnumerable va private class IterableAnonymousInnerClassHelper : IEnumerable { - private readonly Lucene42DocValuesConsumer outerInstance; + private readonly IEnumerable docToOrdCount; + private readonly IEnumerable ords; - private IEnumerable docToOrdCount; - private IEnumerable ords; - - public IterableAnonymousInnerClassHelper(Lucene42DocValuesConsumer outerInstance, IEnumerable docToOrdCount, IEnumerable ords) + public IterableAnonymousInnerClassHelper(IEnumerable docToOrdCount, IEnumerable ords) { - this.outerInstance = outerInstance; this.docToOrdCount = docToOrdCount; this.ords = ords; } diff --git a/src/Lucene.Net.TestFramework/Codecs/MissingOrdRemapper.cs b/src/Lucene.Net.TestFramework/Codecs/MissingOrdRemapper.cs index 14a67bca87..e7da523f15 100644 --- a/src/Lucene.Net.TestFramework/Codecs/MissingOrdRemapper.cs +++ b/src/Lucene.Net.TestFramework/Codecs/MissingOrdRemapper.cs @@ -26,7 +26,7 @@ namespace Lucene.Net.Codecs /// A utility class to write missing values for SORTED as if they were the empty string /// (to simulate pre-Lucene4.5 dv behavior for testing old codecs). /// - public class MissingOrdRemapper + public static class MissingOrdRemapper // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { /// /// Insert an empty byte[] to the front of this enumerable. @@ -37,7 +37,7 @@ public static IEnumerable InsertEmptyValue(IEnumerable itera private class IterableAnonymousInnerClassHelper : IEnumerable { - private IEnumerable iterable; + private readonly IEnumerable iterable; public IterableAnonymousInnerClassHelper(IEnumerable iterable) { @@ -56,11 +56,8 @@ IEnumerator IEnumerable.GetEnumerator() private class IteratorAnonymousInnerClassHelper : IEnumerator { - private readonly IterableAnonymousInnerClassHelper outerInstance; - public IteratorAnonymousInnerClassHelper(IterableAnonymousInnerClassHelper outerInstance) { - this.outerInstance = outerInstance; seenEmpty = false; @in = outerInstance.iterable.GetEnumerator(); } @@ -108,7 +105,7 @@ public void Dispose() private class IterableAnonymousInnerClassHelper2 : IEnumerable { - private IEnumerable iterable; + private readonly IEnumerable iterable; public IterableAnonymousInnerClassHelper2(IEnumerable iterable) { @@ -125,11 +122,8 @@ IEnumerator IEnumerable.GetEnumerator() private class IteratorAnonymousInnerClassHelper2 : IEnumerator { - private readonly IterableAnonymousInnerClassHelper2 outerInstance; - public IteratorAnonymousInnerClassHelper2(IterableAnonymousInnerClassHelper2 outerInstance) { - this.outerInstance = outerInstance; @in = outerInstance.iterable.GetEnumerator(); } @@ -171,7 +165,7 @@ public void Dispose() private class IterableAnonymousInnerClassHelper3 : IEnumerable { - private IEnumerable iterable; + private readonly IEnumerable iterable; public IterableAnonymousInnerClassHelper3(IEnumerable iterable) { @@ -188,11 +182,8 @@ IEnumerator IEnumerable.GetEnumerator() private class IteratorAnonymousInnerClassHelper3 : IEnumerator { - private readonly IterableAnonymousInnerClassHelper3 outerInstance; - public IteratorAnonymousInnerClassHelper3(IterableAnonymousInnerClassHelper3 outerInstance) { - this.outerInstance = outerInstance; @in = outerInstance.iterable.GetEnumerator(); } diff --git a/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockFixedIntBlockPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockFixedIntBlockPostingsFormat.cs index af8649d912..8c72c82f96 100644 --- a/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockFixedIntBlockPostingsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockFixedIntBlockPostingsFormat.cs @@ -69,39 +69,34 @@ public MockInt32Factory(int blockSize) public override Int32IndexInput OpenInput(Directory dir, string fileName, IOContext context) { - return new FixedInt32BlockIndexInputAnonymousHelper(this, dir.OpenInput(fileName, context)); + return new FixedInt32BlockIndexInputAnonymousHelper(dir.OpenInput(fileName, context)); } private class FixedInt32BlockIndexInputAnonymousHelper : FixedInt32BlockIndexInput { - private readonly MockInt32Factory outerInstance; - - public FixedInt32BlockIndexInputAnonymousHelper(MockInt32Factory outerInstance, IndexInput input) + public FixedInt32BlockIndexInputAnonymousHelper(IndexInput input) : base(input) { - this.outerInstance = outerInstance; } protected override IBlockReader GetBlockReader(IndexInput @in, int[] buffer) { - return new BlockReaderAnonymousHelper(outerInstance, @in, buffer); + return new BlockReaderAnonymousHelper(@in, buffer); } private class BlockReaderAnonymousHelper : FixedInt32BlockIndexInput.IBlockReader { - private readonly MockInt32Factory outerInstance; private readonly IndexInput @in; private readonly int[] buffer; - public BlockReaderAnonymousHelper(MockInt32Factory outerInstance, IndexInput @in, int[] buffer) + public BlockReaderAnonymousHelper(IndexInput @in, int[] buffer) { - this.outerInstance = outerInstance; this.@in = @in; this.buffer = buffer; } - public void Seek(long pos) - { - } + //public void Seek(long pos) // LUCENENET: Not referenced; + //{ + //} public void ReadBlock() { diff --git a/src/Lucene.Net.TestFramework/Codecs/MockRandom/MockRandomPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/MockRandom/MockRandomPostingsFormat.cs index b4f7811cc9..417c68e5ba 100644 --- a/src/Lucene.Net.TestFramework/Codecs/MockRandom/MockRandomPostingsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/MockRandom/MockRandomPostingsFormat.cs @@ -39,7 +39,7 @@ namespace Lucene.Net.Codecs.MockRandom public sealed class MockRandomPostingsFormat : PostingsFormat { private readonly Random seedRandom; - private readonly string SEED_EXT = "sd"; + private const string SEED_EXT = "sd"; private class RandomAnonymousClassHelper : Random { @@ -316,21 +316,6 @@ public override FieldsConsumer FieldsConsumer(SegmentWriteState state) Console.WriteLine("MockRandomCodec: random-gap terms index (max gap=" + gap + ")"); } selector = new IndexTermSelectorAnonymousHelper(seed2, gap); - - // selector = new VariableGapTermsIndexWriter.IndexTermSelector() { - // Random rand = new Random(seed2); - - //@Override - // public bool isIndexTerm(BytesRef term, TermStats stats) - //{ - // return rand.nextInt(gap) == gap / 2; - //} - - //@Override - // public void newField(FieldInfo fieldInfo) - //{ - //} - // }; } indexWriter = new VariableGapTermsIndexWriter(state, selector); } diff --git a/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs index 66fa732a69..aee0430d8c 100644 --- a/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs +++ b/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs @@ -98,8 +98,7 @@ internal class RAMPostings : FieldsProducer public override Terms GetTerms(string field) { - RAMField result; - fieldToTerms.TryGetValue(field, out result); + fieldToTerms.TryGetValue(field, out RAMField result); return result; } @@ -452,9 +451,8 @@ private class RAMDocsEnum : DocsEnum private readonly IBits liveDocs; private RAMDoc current; private int upto = -1; -#pragma warning disable 414 - private int posUpto = 0; // LUCENENET NOTE: Not used -#pragma warning restore 414 + //private int posUpto = 0; // LUCENENET: Never read + public RAMDocsEnum(RAMTerm ramTerm, IBits liveDocs) { @@ -478,7 +476,7 @@ public override int NextDoc() current = ramTerm.docs[upto]; if (liveDocs == null || liveDocs.Get(current.docID)) { - posUpto = 0; + //posUpto = 0; // LUCENENET: Never read return current.docID; } } diff --git a/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs b/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs index ef3c24a5ed..cb3dba50fe 100644 --- a/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs +++ b/src/Lucene.Net.TestFramework/Index/AssertingAtomicReader.cs @@ -420,9 +420,9 @@ public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) // TODO: should we give this thing a random to be super-evil, // and randomly *not* unwrap? - if (reuse is AssertingDocsEnum) + if (reuse is AssertingDocsEnum assertingDocsEnum) { - reuse = ((AssertingDocsEnum)reuse).m_input; + reuse = assertingDocsEnum.m_input; } DocsEnum docs = base.Docs(liveDocs, reuse, flags); return docs == null ? null : new AssertingDocsEnum(docs); @@ -434,9 +434,9 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPos // TODO: should we give this thing a random to be super-evil, // and randomly *not* unwrap? - if (reuse is AssertingDocsAndPositionsEnum) + if (reuse is AssertingDocsAndPositionsEnum assertingDocsAndPositionsEnum) { - reuse = ((AssertingDocsAndPositionsEnum)reuse).m_input; + reuse = assertingDocsAndPositionsEnum.m_input; } DocsAndPositionsEnum docs = base.DocsAndPositions(liveDocs, reuse, flags); return docs == null ? null : new AssertingDocsAndPositionsEnum(docs); diff --git a/src/Lucene.Net.TestFramework/Index/BaseCompressingDocValuesFormatTestCase.cs b/src/Lucene.Net.TestFramework/Index/BaseCompressingDocValuesFormatTestCase.cs index 25c399ccb2..e5fac6c3d8 100644 --- a/src/Lucene.Net.TestFramework/Index/BaseCompressingDocValuesFormatTestCase.cs +++ b/src/Lucene.Net.TestFramework/Index/BaseCompressingDocValuesFormatTestCase.cs @@ -62,103 +62,94 @@ internal static long DirSize(Directory d) public virtual void TestUniqueValuesCompression() { IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); - using (Directory dir = new RAMDirectory()) - using (IndexWriter iwriter = new IndexWriter(dir, iwc)) - { - - int uniqueValueCount = TestUtil.NextInt32(Random, 1, 256); - IList values = new List(); + using Directory dir = new RAMDirectory(); + using IndexWriter iwriter = new IndexWriter(dir, iwc); + int uniqueValueCount = TestUtil.NextInt32(Random, 1, 256); + IList values = new List(); - Document doc = new Document(); - NumericDocValuesField dvf = new NumericDocValuesField("dv", 0); - doc.Add(dvf); - for (int i = 0; i < 300; ++i) + Document doc = new Document(); + NumericDocValuesField dvf = new NumericDocValuesField("dv", 0); + doc.Add(dvf); + for (int i = 0; i < 300; ++i) + { + long value; + if (values.Count < uniqueValueCount) { - long value; - if (values.Count < uniqueValueCount) - { - value = Random.NextInt64(); - values.Add(value); - } - else - { - value = RandomPicks.RandomFrom(Random, values); - } - dvf.SetInt64Value(value); - iwriter.AddDocument(doc); + value = Random.NextInt64(); + values.Add(value); } - iwriter.ForceMerge(1); - long size1 = DirSize(dir); - for (int i = 0; i < 20; ++i) + else { - dvf.SetInt64Value(RandomPicks.RandomFrom(Random, values)); - iwriter.AddDocument(doc); + value = RandomPicks.RandomFrom(Random, values); } - iwriter.ForceMerge(1); - long size2 = DirSize(dir); - // make sure the new longs did not cost 8 bytes each - Assert.IsTrue(size2 < size1 + 8 * 20); + dvf.SetInt64Value(value); + iwriter.AddDocument(doc); } + iwriter.ForceMerge(1); + long size1 = DirSize(dir); + for (int i = 0; i < 20; ++i) + { + dvf.SetInt64Value(RandomPicks.RandomFrom(Random, values)); + iwriter.AddDocument(doc); + } + iwriter.ForceMerge(1); + long size2 = DirSize(dir); + // make sure the new longs did not cost 8 bytes each + Assert.IsTrue(size2 < size1 + 8 * 20); } [Test] public virtual void TestDateCompression() { IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); - using (Directory dir = new RAMDirectory()) - using (IndexWriter iwriter = new IndexWriter(dir, iwc)) - { - - const long @base = 13; // prime - long day = 1000L * 60 * 60 * 24; + using Directory dir = new RAMDirectory(); + using IndexWriter iwriter = new IndexWriter(dir, iwc); + const long @base = 13; // prime + long day = 1000L * 60 * 60 * 24; - Document doc = new Document(); - NumericDocValuesField dvf = new NumericDocValuesField("dv", 0); - doc.Add(dvf); - for (int i = 0; i < 300; ++i) - { - dvf.SetInt64Value(@base + Random.Next(1000) * day); - iwriter.AddDocument(doc); - } - iwriter.ForceMerge(1); - long size1 = DirSize(dir); - for (int i = 0; i < 50; ++i) - { - dvf.SetInt64Value(@base + Random.Next(1000) * day); - iwriter.AddDocument(doc); - } - iwriter.ForceMerge(1); - long size2 = DirSize(dir); - // make sure the new longs costed less than if they had only been packed - Assert.IsTrue(size2 < size1 + (PackedInt32s.BitsRequired(day) * 50) / 8); + Document doc = new Document(); + NumericDocValuesField dvf = new NumericDocValuesField("dv", 0); + doc.Add(dvf); + for (int i = 0; i < 300; ++i) + { + dvf.SetInt64Value(@base + Random.Next(1000) * day); + iwriter.AddDocument(doc); + } + iwriter.ForceMerge(1); + long size1 = DirSize(dir); + for (int i = 0; i < 50; ++i) + { + dvf.SetInt64Value(@base + Random.Next(1000) * day); + iwriter.AddDocument(doc); } + iwriter.ForceMerge(1); + long size2 = DirSize(dir); + // make sure the new longs costed less than if they had only been packed + Assert.IsTrue(size2 < size1 + (PackedInt32s.BitsRequired(day) * 50) / 8); } [Test] public virtual void TestSingleBigValueCompression() { IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); - using (Directory dir = new RAMDirectory()) - using (IndexWriter iwriter = new IndexWriter(dir, iwc)) + using Directory dir = new RAMDirectory(); + using IndexWriter iwriter = new IndexWriter(dir, iwc); + Document doc = new Document(); + NumericDocValuesField dvf = new NumericDocValuesField("dv", 0); + doc.Add(dvf); + for (int i = 0; i < 20000; ++i) { - - Document doc = new Document(); - NumericDocValuesField dvf = new NumericDocValuesField("dv", 0); - doc.Add(dvf); - for (int i = 0; i < 20000; ++i) - { - dvf.SetInt64Value(i & 1023); - iwriter.AddDocument(doc); - } - iwriter.ForceMerge(1); - long size1 = DirSize(dir); - dvf.SetInt64Value(long.MaxValue); + dvf.SetInt64Value(i & 1023); iwriter.AddDocument(doc); - iwriter.ForceMerge(1); - long size2 = DirSize(dir); - // make sure the new value did not grow the bpv for every other value - Assert.IsTrue(size2 < size1 + (20000 * (63 - 10)) / 8); } + iwriter.ForceMerge(1); + long size1 = DirSize(dir); + dvf.SetInt64Value(long.MaxValue); + iwriter.AddDocument(doc); + iwriter.ForceMerge(1); + long size2 = DirSize(dir); + // make sure the new value did not grow the bpv for every other value + Assert.IsTrue(size2 < size1 + (20000 * (63 - 10)) / 8); } } } \ No newline at end of file diff --git a/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs b/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs index 8582dac2d4..1b003e6299 100644 --- a/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs +++ b/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs @@ -88,41 +88,36 @@ public virtual void TestOneNumber() { string longTerm = "longtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongterm"; string text = "this is the text to be indexed. " + longTerm; - using (Directory directory = NewDirectory()) - { - using (RandomIndexWriter iwriter = new RandomIndexWriter( + using Directory directory = NewDirectory(); + using (RandomIndexWriter iwriter = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, directory)) - { - Document doc = new Document(); - doc.Add(NewTextField("fieldname", text, Field.Store.YES)); - doc.Add(new NumericDocValuesField("dv", 5)); - iwriter.AddDocument(doc); - } // iwriter.Dispose(); - - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - IndexSearcher isearcher = new IndexSearcher(ireader); - - Assert.AreEqual(1, isearcher.Search(new TermQuery(new Term("fieldname", longTerm)), 1).TotalHits); - Query query = new TermQuery(new Term("fieldname", "text")); - TopDocs hits = isearcher.Search(query, null, 1); - Assert.AreEqual(1, hits.TotalHits); - // Iterate through the results: - for (int i = 0; i < hits.ScoreDocs.Length; i++) - { - Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); - Assert.AreEqual(text, hitDoc.Get("fieldname")); - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - NumericDocValues dv = ((AtomicReader)((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader))).GetNumericDocValues("dv"); - Assert.AreEqual(5L, dv.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - 5L required because types don't match (xUnit checks this) - } - - } // ireader.Dispose(); - } // directory.Dispose(); + { + Document doc = new Document(); + doc.Add(NewTextField("fieldname", text, Field.Store.YES)); + doc.Add(new NumericDocValuesField("dv", 5)); + iwriter.AddDocument(doc); + } // iwriter.Dispose(); + + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); // read-only=true + IndexSearcher isearcher = new IndexSearcher(ireader); + + Assert.AreEqual(1, isearcher.Search(new TermQuery(new Term("fieldname", longTerm)), 1).TotalHits); + Query query = new TermQuery(new Term("fieldname", "text")); + TopDocs hits = isearcher.Search(query, null, 1); + Assert.AreEqual(1, hits.TotalHits); + // Iterate through the results: + for (int i = 0; i < hits.ScoreDocs.Length; i++) + { + Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); + Assert.AreEqual(text, hitDoc.Get("fieldname")); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + NumericDocValues dv = ((AtomicReader)((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader))).GetNumericDocValues("dv"); + Assert.AreEqual(5L, dv.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - 5L required because types don't match (xUnit checks this) + } } [Test] @@ -131,40 +126,36 @@ public virtual void TestOneSingle() // LUCENENET specific - renamed from TestOne string longTerm = "longtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongterm"; string text = "this is the text to be indexed. " + longTerm; - using (Directory directory = NewDirectory()) - { - using (RandomIndexWriter iwriter = new RandomIndexWriter( + using Directory directory = NewDirectory(); + using (RandomIndexWriter iwriter = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, directory)) - { - Document doc = new Document(); - doc.Add(NewTextField("fieldname", text, Field.Store.YES)); - doc.Add(new SingleDocValuesField("dv", 5.7f)); - iwriter.AddDocument(doc); - } // iwriter.Dispose(); - - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - IndexSearcher isearcher = new IndexSearcher(ireader); - - Assert.AreEqual(1, isearcher.Search(new TermQuery(new Term("fieldname", longTerm)), 1).TotalHits); - Query query = new TermQuery(new Term("fieldname", "text")); - TopDocs hits = isearcher.Search(query, null, 1); - Assert.AreEqual(1, hits.TotalHits); - // Iterate through the results: - for (int i = 0; i < hits.ScoreDocs.Length; i++) - { - Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); - Assert.AreEqual(text, hitDoc.Get("fieldname")); - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - NumericDocValues dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetNumericDocValues("dv"); - Assert.AreEqual((long)J2N.BitConversion.SingleToRawInt32Bits(5.7f), dv.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - cast required because types don't match (xUnit checks this) - } - } // ireader.Dispose(); - } // directory.Dispose(); + { + Document doc = new Document(); + doc.Add(NewTextField("fieldname", text, Field.Store.YES)); + doc.Add(new SingleDocValuesField("dv", 5.7f)); + iwriter.AddDocument(doc); + } // iwriter.Dispose(); + + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); // read-only=true + IndexSearcher isearcher = new IndexSearcher(ireader); + + Assert.AreEqual(1, isearcher.Search(new TermQuery(new Term("fieldname", longTerm)), 1).TotalHits); + Query query = new TermQuery(new Term("fieldname", "text")); + TopDocs hits = isearcher.Search(query, null, 1); + Assert.AreEqual(1, hits.TotalHits); + // Iterate through the results: + for (int i = 0; i < hits.ScoreDocs.Length; i++) + { + Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); + Assert.AreEqual(text, hitDoc.Get("fieldname")); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + NumericDocValues dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetNumericDocValues("dv"); + Assert.AreEqual((long)J2N.BitConversion.SingleToRawInt32Bits(5.7f), dv.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - cast required because types don't match (xUnit checks this) + } } [Test] @@ -172,44 +163,39 @@ public virtual void TestTwoNumbers() { string longTerm = "longtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongterm"; string text = "this is the text to be indexed. " + longTerm; - using (Directory directory = NewDirectory()) - { - using (RandomIndexWriter iwriter = new RandomIndexWriter( + using Directory directory = NewDirectory(); + using (RandomIndexWriter iwriter = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, directory)) - { - Document doc = new Document(); - doc.Add(NewTextField("fieldname", text, Field.Store.YES)); - doc.Add(new NumericDocValuesField("dv1", 5)); - doc.Add(new NumericDocValuesField("dv2", 17)); - iwriter.AddDocument(doc); - } // iwriter.Dispose(); - - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - IndexSearcher isearcher = new IndexSearcher(ireader); - - Assert.AreEqual(1, isearcher.Search(new TermQuery(new Term("fieldname", longTerm)), 1).TotalHits); - Query query = new TermQuery(new Term("fieldname", "text")); - TopDocs hits = isearcher.Search(query, null, 1); - Assert.AreEqual(1, hits.TotalHits); - // Iterate through the results: - for (int i = 0; i < hits.ScoreDocs.Length; i++) - { - Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); - Assert.AreEqual(text, hitDoc.Get("fieldname")); - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - NumericDocValues dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetNumericDocValues("dv1"); - Assert.AreEqual(5L, dv.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - 5L required because types don't match (xUnit checks this) - dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetNumericDocValues("dv2"); - Assert.AreEqual(17L, dv.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - 17L required because types don't match (xUnit checks this) - } - - } // ireader.Dispose(); - } // directory.Dispose(); + { + Document doc = new Document(); + doc.Add(NewTextField("fieldname", text, Field.Store.YES)); + doc.Add(new NumericDocValuesField("dv1", 5)); + doc.Add(new NumericDocValuesField("dv2", 17)); + iwriter.AddDocument(doc); + } // iwriter.Dispose(); + + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); // read-only=true + IndexSearcher isearcher = new IndexSearcher(ireader); + + Assert.AreEqual(1, isearcher.Search(new TermQuery(new Term("fieldname", longTerm)), 1).TotalHits); + Query query = new TermQuery(new Term("fieldname", "text")); + TopDocs hits = isearcher.Search(query, null, 1); + Assert.AreEqual(1, hits.TotalHits); + // Iterate through the results: + for (int i = 0; i < hits.ScoreDocs.Length; i++) + { + Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); + Assert.AreEqual(text, hitDoc.Get("fieldname")); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + NumericDocValues dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetNumericDocValues("dv1"); + Assert.AreEqual(5L, dv.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - 5L required because types don't match (xUnit checks this) + dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetNumericDocValues("dv2"); + Assert.AreEqual(17L, dv.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - 17L required because types don't match (xUnit checks this) + } } [Test] @@ -217,48 +203,43 @@ public virtual void TestTwoBinaryValues() { string longTerm = "longtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongterm"; string text = "this is the text to be indexed. " + longTerm; - using (Directory directory = NewDirectory()) - { - using (RandomIndexWriter iwriter = new RandomIndexWriter( + using Directory directory = NewDirectory(); + using (RandomIndexWriter iwriter = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, directory)) - { - Document doc = new Document(); - - doc.Add(NewTextField("fieldname", text, Field.Store.YES)); - doc.Add(new BinaryDocValuesField("dv1", new BytesRef(longTerm))); - doc.Add(new BinaryDocValuesField("dv2", new BytesRef(text))); - iwriter.AddDocument(doc); - } // iwriter.Dispose(); - - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - IndexSearcher isearcher = new IndexSearcher(ireader); - - Assert.AreEqual(1, isearcher.Search(new TermQuery(new Term("fieldname", longTerm)), 1).TotalHits); - Query query = new TermQuery(new Term("fieldname", "text")); - TopDocs hits = isearcher.Search(query, null, 1); - Assert.AreEqual(1, hits.TotalHits); - BytesRef scratch = new BytesRef(); // LUCENENET: Moved this outside of the loop for performance - // Iterate through the results: - for (int i = 0; i < hits.ScoreDocs.Length; i++) - { - Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); - Assert.AreEqual(text, hitDoc.Get("fieldname")); - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - BinaryDocValues dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetBinaryDocValues("dv1"); - dv.Get(hits.ScoreDocs[i].Doc, scratch); - Assert.AreEqual(new BytesRef(longTerm), scratch); - dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetBinaryDocValues("dv2"); - dv.Get(hits.ScoreDocs[i].Doc, scratch); - Assert.AreEqual(new BytesRef(text), scratch); - } - - } // ireader.Dispose(); - } // directory.Dispose(); + { + Document doc = new Document(); + + doc.Add(NewTextField("fieldname", text, Field.Store.YES)); + doc.Add(new BinaryDocValuesField("dv1", new BytesRef(longTerm))); + doc.Add(new BinaryDocValuesField("dv2", new BytesRef(text))); + iwriter.AddDocument(doc); + } // iwriter.Dispose(); + + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); // read-only=true + IndexSearcher isearcher = new IndexSearcher(ireader); + + Assert.AreEqual(1, isearcher.Search(new TermQuery(new Term("fieldname", longTerm)), 1).TotalHits); + Query query = new TermQuery(new Term("fieldname", "text")); + TopDocs hits = isearcher.Search(query, null, 1); + Assert.AreEqual(1, hits.TotalHits); + BytesRef scratch = new BytesRef(); // LUCENENET: Moved this outside of the loop for performance + // Iterate through the results: + for (int i = 0; i < hits.ScoreDocs.Length; i++) + { + Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); + Assert.AreEqual(text, hitDoc.Get("fieldname")); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + BinaryDocValues dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetBinaryDocValues("dv1"); + dv.Get(hits.ScoreDocs[i].Doc, scratch); + Assert.AreEqual(new BytesRef(longTerm), scratch); + dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetBinaryDocValues("dv2"); + dv.Get(hits.ScoreDocs[i].Doc, scratch); + Assert.AreEqual(new BytesRef(text), scratch); + } } [Test] @@ -266,47 +247,42 @@ public virtual void TestTwoFieldsMixed() { string longTerm = "longtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongterm"; string text = "this is the text to be indexed. " + longTerm; - using (Directory directory = NewDirectory()) - { - using (RandomIndexWriter iwriter = new RandomIndexWriter( + using Directory directory = NewDirectory(); + using (RandomIndexWriter iwriter = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, directory)) - { - Document doc = new Document(); - - doc.Add(NewTextField("fieldname", text, Field.Store.YES)); - doc.Add(new NumericDocValuesField("dv1", 5)); - doc.Add(new BinaryDocValuesField("dv2", new BytesRef("hello world"))); - iwriter.AddDocument(doc); - } // iwriter.Dispose(); - - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - IndexSearcher isearcher = new IndexSearcher(ireader); - - Assert.AreEqual(1, isearcher.Search(new TermQuery(new Term("fieldname", longTerm)), 1).TotalHits); - Query query = new TermQuery(new Term("fieldname", "text")); - TopDocs hits = isearcher.Search(query, null, 1); - Assert.AreEqual(1, hits.TotalHits); - BytesRef scratch = new BytesRef(); - // Iterate through the results: - for (int i = 0; i < hits.ScoreDocs.Length; i++) - { - Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); - Assert.AreEqual(text, hitDoc.Get("fieldname")); - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - NumericDocValues dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetNumericDocValues("dv1"); - Assert.AreEqual(5L, dv.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - 5L required because types don't match (xUnit checks this) - BinaryDocValues dv2 = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetBinaryDocValues("dv2"); - dv2.Get(hits.ScoreDocs[i].Doc, scratch); - Assert.AreEqual(new BytesRef("hello world"), scratch); - } - - } // ireader.Dispose(); - } // directory.Dispose(); + { + Document doc = new Document(); + + doc.Add(NewTextField("fieldname", text, Field.Store.YES)); + doc.Add(new NumericDocValuesField("dv1", 5)); + doc.Add(new BinaryDocValuesField("dv2", new BytesRef("hello world"))); + iwriter.AddDocument(doc); + } // iwriter.Dispose(); + + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); // read-only=true + IndexSearcher isearcher = new IndexSearcher(ireader); + + Assert.AreEqual(1, isearcher.Search(new TermQuery(new Term("fieldname", longTerm)), 1).TotalHits); + Query query = new TermQuery(new Term("fieldname", "text")); + TopDocs hits = isearcher.Search(query, null, 1); + Assert.AreEqual(1, hits.TotalHits); + BytesRef scratch = new BytesRef(); + // Iterate through the results: + for (int i = 0; i < hits.ScoreDocs.Length; i++) + { + Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); + Assert.AreEqual(text, hitDoc.Get("fieldname")); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + NumericDocValues dv = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetNumericDocValues("dv1"); + Assert.AreEqual(5L, dv.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - 5L required because types don't match (xUnit checks this) + BinaryDocValues dv2 = ((AtomicReader)((AtomicReader)ireader.Leaves[0].Reader)).GetBinaryDocValues("dv2"); + dv2.Get(hits.ScoreDocs[i].Doc, scratch); + Assert.AreEqual(new BytesRef("hello world"), scratch); + } } [Test] @@ -314,52 +290,47 @@ public virtual void TestThreeFieldsMixed() { string longTerm = "longtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongterm"; string text = "this is the text to be indexed. " + longTerm; - using (Directory directory = NewDirectory()) - { - using (RandomIndexWriter iwriter = new RandomIndexWriter( + using Directory directory = NewDirectory(); + using (RandomIndexWriter iwriter = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, directory)) - { - Document doc = new Document(); - - doc.Add(NewTextField("fieldname", text, Field.Store.YES)); - doc.Add(new SortedDocValuesField("dv1", new BytesRef("hello hello"))); - doc.Add(new NumericDocValuesField("dv2", 5)); - doc.Add(new BinaryDocValuesField("dv3", new BytesRef("hello world"))); - iwriter.AddDocument(doc); - } // iwriter.Dispose(); - - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - IndexSearcher isearcher = new IndexSearcher(ireader); - - Assert.AreEqual(1, isearcher.Search(new TermQuery(new Term("fieldname", longTerm)), 1).TotalHits); - Query query = new TermQuery(new Term("fieldname", "text")); - TopDocs hits = isearcher.Search(query, null, 1); - Assert.AreEqual(1, hits.TotalHits); - BytesRef scratch = new BytesRef(); - // Iterate through the results: - for (int i = 0; i < hits.ScoreDocs.Length; i++) - { - Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); - Assert.AreEqual(text, hitDoc.Get("fieldname")); - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv1"); - int ord = dv.GetOrd(0); - dv.LookupOrd(ord, scratch); - Assert.AreEqual(new BytesRef("hello hello"), scratch); - NumericDocValues dv2 = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv2"); - Assert.AreEqual(5L, dv2.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - 5L required because types don't match (xUnit checks this) - BinaryDocValues dv3 = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv3"); - dv3.Get(hits.ScoreDocs[i].Doc, scratch); - Assert.AreEqual(new BytesRef("hello world"), scratch); - } - - } // ireader.Dispose(); - } // directory.Dispose(); + { + Document doc = new Document(); + + doc.Add(NewTextField("fieldname", text, Field.Store.YES)); + doc.Add(new SortedDocValuesField("dv1", new BytesRef("hello hello"))); + doc.Add(new NumericDocValuesField("dv2", 5)); + doc.Add(new BinaryDocValuesField("dv3", new BytesRef("hello world"))); + iwriter.AddDocument(doc); + } // iwriter.Dispose(); + + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); // read-only=true + IndexSearcher isearcher = new IndexSearcher(ireader); + + Assert.AreEqual(1, isearcher.Search(new TermQuery(new Term("fieldname", longTerm)), 1).TotalHits); + Query query = new TermQuery(new Term("fieldname", "text")); + TopDocs hits = isearcher.Search(query, null, 1); + Assert.AreEqual(1, hits.TotalHits); + BytesRef scratch = new BytesRef(); + // Iterate through the results: + for (int i = 0; i < hits.ScoreDocs.Length; i++) + { + Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); + Assert.AreEqual(text, hitDoc.Get("fieldname")); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv1"); + int ord = dv.GetOrd(0); + dv.LookupOrd(ord, scratch); + Assert.AreEqual(new BytesRef("hello hello"), scratch); + NumericDocValues dv2 = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv2"); + Assert.AreEqual(5L, dv2.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - 5L required because types don't match (xUnit checks this) + BinaryDocValues dv3 = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv3"); + dv3.Get(hits.ScoreDocs[i].Doc, scratch); + Assert.AreEqual(new BytesRef("hello world"), scratch); + } } [Test] @@ -367,52 +338,47 @@ public virtual void TestThreeFieldsMixed2() { string longTerm = "longtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongterm"; string text = "this is the text to be indexed. " + longTerm; - using (Directory directory = NewDirectory()) - { - using (RandomIndexWriter iwriter = new RandomIndexWriter( + using Directory directory = NewDirectory(); + using (RandomIndexWriter iwriter = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, directory)) - { - Document doc = new Document(); - - doc.Add(NewTextField("fieldname", text, Field.Store.YES)); - doc.Add(new BinaryDocValuesField("dv1", new BytesRef("hello world"))); - doc.Add(new SortedDocValuesField("dv2", new BytesRef("hello hello"))); - doc.Add(new NumericDocValuesField("dv3", 5)); - iwriter.AddDocument(doc); - } // iwriter.Dispose(); - - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - IndexSearcher isearcher = new IndexSearcher(ireader); - - Assert.AreEqual(1, isearcher.Search(new TermQuery(new Term("fieldname", longTerm)), 1).TotalHits); - Query query = new TermQuery(new Term("fieldname", "text")); - TopDocs hits = isearcher.Search(query, null, 1); - Assert.AreEqual(1, hits.TotalHits); - BytesRef scratch = new BytesRef(); - // Iterate through the results: - for (int i = 0; i < hits.ScoreDocs.Length; i++) - { - Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); - Assert.AreEqual(text, hitDoc.Get("fieldname")); - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv2"); - int ord = dv.GetOrd(0); - dv.LookupOrd(ord, scratch); - Assert.AreEqual(new BytesRef("hello hello"), scratch); - NumericDocValues dv2 = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv3"); - Assert.AreEqual(5L, dv2.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - 5L required because types don't match (xUnit checks this) - BinaryDocValues dv3 = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv1"); - dv3.Get(hits.ScoreDocs[i].Doc, scratch); - Assert.AreEqual(new BytesRef("hello world"), scratch); - } - - } // ireader.Dispose(); - } // directory.Dispose(); + { + Document doc = new Document(); + + doc.Add(NewTextField("fieldname", text, Field.Store.YES)); + doc.Add(new BinaryDocValuesField("dv1", new BytesRef("hello world"))); + doc.Add(new SortedDocValuesField("dv2", new BytesRef("hello hello"))); + doc.Add(new NumericDocValuesField("dv3", 5)); + iwriter.AddDocument(doc); + }// iwriter.Dispose(); + + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); // read-only=true + IndexSearcher isearcher = new IndexSearcher(ireader); + + Assert.AreEqual(1, isearcher.Search(new TermQuery(new Term("fieldname", longTerm)), 1).TotalHits); + Query query = new TermQuery(new Term("fieldname", "text")); + TopDocs hits = isearcher.Search(query, null, 1); + Assert.AreEqual(1, hits.TotalHits); + BytesRef scratch = new BytesRef(); + // Iterate through the results: + for (int i = 0; i < hits.ScoreDocs.Length; i++) + { + Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); + Assert.AreEqual(text, hitDoc.Get("fieldname")); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv2"); + int ord = dv.GetOrd(0); + dv.LookupOrd(ord, scratch); + Assert.AreEqual(new BytesRef("hello hello"), scratch); + NumericDocValues dv2 = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv3"); + Assert.AreEqual(5L, dv2.Get(hits.ScoreDocs[i].Doc)); // LUCENENET specific - 5L required because types don't match (xUnit checks this) + BinaryDocValues dv3 = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv1"); + dv3.Get(hits.ScoreDocs[i].Doc, scratch); + Assert.AreEqual(new BytesRef("hello world"), scratch); + } } [Test] @@ -420,31 +386,26 @@ public virtual void TestTwoDocumentsNumeric() { Analyzer analyzer = new MockAnalyzer(Random); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + conf.SetMergePolicy(NewLogMergePolicy()); + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - conf.SetMergePolicy(NewLogMergePolicy()); - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - doc.Add(new NumericDocValuesField("dv", 1)); - iwriter.AddDocument(doc); - doc = new Document(); - doc.Add(new NumericDocValuesField("dv", 2)); - iwriter.AddDocument(doc); - iwriter.ForceMerge(1); - } // iwriter.Dispose(); + Document doc = new Document(); + doc.Add(new NumericDocValuesField("dv", 1)); + iwriter.AddDocument(doc); + doc = new Document(); + doc.Add(new NumericDocValuesField("dv", 2)); + iwriter.AddDocument(doc); + iwriter.ForceMerge(1); + } // iwriter.Dispose(); - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv"); - Assert.AreEqual(1L, dv.Get(0)); // LUCENENET specific - 1L required because types don't match (xUnit checks this) - Assert.AreEqual(2L, dv.Get(1)); // LUCENENET specific - 2L required because types don't match (xUnit checks this) - - } // ireader.Dispose(); - } // directory.Dispose(); + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); // read-only=true + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv"); + Assert.AreEqual(1L, dv.Get(0)); // LUCENENET specific - 1L required because types don't match (xUnit checks this) + Assert.AreEqual(2L, dv.Get(1)); // LUCENENET specific - 2L required because types don't match (xUnit checks this) } [Test] @@ -452,46 +413,41 @@ public virtual void TestTwoDocumentsMerged() { Analyzer analyzer = new MockAnalyzer(Random); - using (Directory directory = NewDirectory()) - { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - conf.SetMergePolicy(NewLogMergePolicy()); - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - doc.Add(NewField("id", "0", StringField.TYPE_STORED)); - doc.Add(new NumericDocValuesField("dv", -10)); - iwriter.AddDocument(doc); - iwriter.Commit(); - doc = new Document(); - doc.Add(NewField("id", "1", StringField.TYPE_STORED)); - doc.Add(new NumericDocValuesField("dv", 99)); - iwriter.AddDocument(doc); - iwriter.ForceMerge(1); - } // iwriter.Dispose(); - - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + conf.SetMergePolicy(NewLogMergePolicy()); + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) + { + Document doc = new Document(); + doc.Add(NewField("id", "0", StringField.TYPE_STORED)); + doc.Add(new NumericDocValuesField("dv", -10)); + iwriter.AddDocument(doc); + iwriter.Commit(); + doc = new Document(); + doc.Add(NewField("id", "1", StringField.TYPE_STORED)); + doc.Add(new NumericDocValuesField("dv", 99)); + iwriter.AddDocument(doc); + iwriter.ForceMerge(1); + } // iwriter.Dispose(); + + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); // read-only=true + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv"); + for (int i = 0; i < 2; i++) + { + Document doc2 = ((AtomicReader)ireader.Leaves[0].Reader).Document(i); + long expected; + if (doc2.Get("id").Equals("0", StringComparison.Ordinal)) + { + expected = -10; + } + else { - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv"); - for (int i = 0; i < 2; i++) - { - Document doc2 = ((AtomicReader)ireader.Leaves[0].Reader).Document(i); - long expected; - if (doc2.Get("id").Equals("0", StringComparison.Ordinal)) - { - expected = -10; - } - else - { - expected = 99; - } - Assert.AreEqual(expected, dv.Get(i)); - } - - } // ireader.Dispose(); - } // directory.Dispose(); + expected = 99; + } + Assert.AreEqual(expected, dv.Get(i)); + } } [Test] @@ -499,31 +455,26 @@ public virtual void TestBigNumericRange() { Analyzer analyzer = new MockAnalyzer(Random); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + conf.SetMergePolicy(NewLogMergePolicy()); + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - conf.SetMergePolicy(NewLogMergePolicy()); - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - doc.Add(new NumericDocValuesField("dv", long.MinValue)); - iwriter.AddDocument(doc); - doc = new Document(); - doc.Add(new NumericDocValuesField("dv", long.MaxValue)); - iwriter.AddDocument(doc); - iwriter.ForceMerge(1); - } // iwriter.Dispose(); + Document doc = new Document(); + doc.Add(new NumericDocValuesField("dv", long.MinValue)); + iwriter.AddDocument(doc); + doc = new Document(); + doc.Add(new NumericDocValuesField("dv", long.MaxValue)); + iwriter.AddDocument(doc); + iwriter.ForceMerge(1); + } // iwriter.Dispose(); - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv"); - Assert.AreEqual(long.MinValue, dv.Get(0)); - Assert.AreEqual(long.MaxValue, dv.Get(1)); - - } // ireader.Dispose(); - } // directory.Dispose(); + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); // read-only=true + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv"); + Assert.AreEqual(long.MinValue, dv.Get(0)); + Assert.AreEqual(long.MaxValue, dv.Get(1)); } [Test] @@ -531,31 +482,26 @@ public virtual void TestBigNumericRange2() { Analyzer analyzer = new MockAnalyzer(Random); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + conf.SetMergePolicy(NewLogMergePolicy()); + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - conf.SetMergePolicy(NewLogMergePolicy()); - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - doc.Add(new NumericDocValuesField("dv", -8841491950446638677L)); - iwriter.AddDocument(doc); - doc = new Document(); - doc.Add(new NumericDocValuesField("dv", 9062230939892376225L)); - iwriter.AddDocument(doc); - iwriter.ForceMerge(1); - } // iwriter.Dispose(); - - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv"); - Assert.AreEqual(-8841491950446638677L, dv.Get(0)); - Assert.AreEqual(9062230939892376225L, dv.Get(1)); + Document doc = new Document(); + doc.Add(new NumericDocValuesField("dv", -8841491950446638677L)); + iwriter.AddDocument(doc); + doc = new Document(); + doc.Add(new NumericDocValuesField("dv", 9062230939892376225L)); + iwriter.AddDocument(doc); + iwriter.ForceMerge(1); + } // iwriter.Dispose(); - } // ireader.Dispose(); - } // directory.Dispose(); + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv"); + Assert.AreEqual(-8841491950446638677L, dv.Get(0)); + Assert.AreEqual(9062230939892376225L, dv.Get(1)); } [Test] @@ -565,40 +511,35 @@ public virtual void TestBytes() string text = "this is the text to be indexed. " + longTerm; Analyzer analyzer = new MockAnalyzer(Random); - using (Directory directory = NewDirectory()) - { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - doc.Add(NewTextField("fieldname", text, Field.Store.YES)); - doc.Add(new BinaryDocValuesField("dv", new BytesRef("hello world"))); - iwriter.AddDocument(doc); - } // iwriter.Dispose(); - - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - IndexSearcher isearcher = new IndexSearcher(ireader); - - Assert.AreEqual(1, isearcher.Search(new TermQuery(new Term("fieldname", longTerm)), 1).TotalHits); - Query query = new TermQuery(new Term("fieldname", "text")); - TopDocs hits = isearcher.Search(query, null, 1); - Assert.AreEqual(1, hits.TotalHits); - BytesRef scratch = new BytesRef(); - // Iterate through the results: - for (int i = 0; i < hits.ScoreDocs.Length; i++) - { - Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); - Assert.AreEqual(text, hitDoc.Get("fieldname")); - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); - dv.Get(hits.ScoreDocs[i].Doc, scratch); - Assert.AreEqual(new BytesRef("hello world"), scratch); - } - - } // ireader.Dispose(); - } // directory.Dispose(); + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) + { + Document doc = new Document(); + doc.Add(NewTextField("fieldname", text, Field.Store.YES)); + doc.Add(new BinaryDocValuesField("dv", new BytesRef("hello world"))); + iwriter.AddDocument(doc); + } // iwriter.Dispose(); + + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); // read-only=true + IndexSearcher isearcher = new IndexSearcher(ireader); + + Assert.AreEqual(1, isearcher.Search(new TermQuery(new Term("fieldname", longTerm)), 1).TotalHits); + Query query = new TermQuery(new Term("fieldname", "text")); + TopDocs hits = isearcher.Search(query, null, 1); + Assert.AreEqual(1, hits.TotalHits); + BytesRef scratch = new BytesRef(); + // Iterate through the results: + for (int i = 0; i < hits.ScoreDocs.Length; i++) + { + Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); + Assert.AreEqual(text, hitDoc.Get("fieldname")); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); + dv.Get(hits.ScoreDocs[i].Doc, scratch); + Assert.AreEqual(new BytesRef("hello world"), scratch); + } } [Test] @@ -606,48 +547,43 @@ public virtual void TestBytesTwoDocumentsMerged() { Analyzer analyzer = new MockAnalyzer(Random); - using (Directory directory = NewDirectory()) - { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - conf.SetMergePolicy(NewLogMergePolicy()); - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - doc.Add(NewField("id", "0", StringField.TYPE_STORED)); - doc.Add(new BinaryDocValuesField("dv", new BytesRef("hello world 1"))); - iwriter.AddDocument(doc); - iwriter.Commit(); - doc = new Document(); - doc.Add(NewField("id", "1", StringField.TYPE_STORED)); - doc.Add(new BinaryDocValuesField("dv", new BytesRef("hello 2"))); - iwriter.AddDocument(doc); - iwriter.ForceMerge(1); - } // iwriter.Dispose(); - - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + conf.SetMergePolicy(NewLogMergePolicy()); + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) + { + Document doc = new Document(); + doc.Add(NewField("id", "0", StringField.TYPE_STORED)); + doc.Add(new BinaryDocValuesField("dv", new BytesRef("hello world 1"))); + iwriter.AddDocument(doc); + iwriter.Commit(); + doc = new Document(); + doc.Add(NewField("id", "1", StringField.TYPE_STORED)); + doc.Add(new BinaryDocValuesField("dv", new BytesRef("hello 2"))); + iwriter.AddDocument(doc); + iwriter.ForceMerge(1); + } // iwriter.Dispose(); + + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); // read-only=true + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); + BytesRef scratch = new BytesRef(); + for (int i = 0; i < 2; i++) + { + Document doc2 = ((AtomicReader)ireader.Leaves[0].Reader).Document(i); + string expected; + if (doc2.Get("id").Equals("0", StringComparison.Ordinal)) + { + expected = "hello world 1"; + } + else { - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); - BytesRef scratch = new BytesRef(); - for (int i = 0; i < 2; i++) - { - Document doc2 = ((AtomicReader)ireader.Leaves[0].Reader).Document(i); - string expected; - if (doc2.Get("id").Equals("0", StringComparison.Ordinal)) - { - expected = "hello world 1"; - } - else - { - expected = "hello 2"; - } - dv.Get(i, scratch); - Assert.AreEqual(expected, scratch.Utf8ToString()); - } - - } // ireader.Dispose(); - } // directory.Dispose(); + expected = "hello 2"; + } + dv.Get(i, scratch); + Assert.AreEqual(expected, scratch.Utf8ToString()); + } } [Test] @@ -657,41 +593,36 @@ public virtual void TestSortedBytes() string text = "this is the text to be indexed. " + longTerm; Analyzer analyzer = new MockAnalyzer(Random); - using (Directory directory = NewDirectory()) - { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - - doc.Add(NewTextField("fieldname", text, Field.Store.YES)); - doc.Add(new SortedDocValuesField("dv", new BytesRef("hello world"))); - iwriter.AddDocument(doc); - } // iwriter.Dispose(); - - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - IndexSearcher isearcher = new IndexSearcher(ireader); - - Assert.AreEqual(1, isearcher.Search(new TermQuery(new Term("fieldname", longTerm)), 1).TotalHits); - Query query = new TermQuery(new Term("fieldname", "text")); - TopDocs hits = isearcher.Search(query, null, 1); - Assert.AreEqual(1, hits.TotalHits); - BytesRef scratch = new BytesRef(); - // Iterate through the results: - for (int i = 0; i < hits.ScoreDocs.Length; i++) - { - Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); - Assert.AreEqual(text, hitDoc.Get("fieldname")); - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); - dv.LookupOrd(dv.GetOrd(hits.ScoreDocs[i].Doc), scratch); - Assert.AreEqual(new BytesRef("hello world"), scratch); - } - - } // ireader.Dispose(); - } // directory.Dispose(); + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) + { + Document doc = new Document(); + + doc.Add(NewTextField("fieldname", text, Field.Store.YES)); + doc.Add(new SortedDocValuesField("dv", new BytesRef("hello world"))); + iwriter.AddDocument(doc); + } // iwriter.Dispose(); + + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); // read-only=true + IndexSearcher isearcher = new IndexSearcher(ireader); + + Assert.AreEqual(1, isearcher.Search(new TermQuery(new Term("fieldname", longTerm)), 1).TotalHits); + Query query = new TermQuery(new Term("fieldname", "text")); + TopDocs hits = isearcher.Search(query, null, 1); + Assert.AreEqual(1, hits.TotalHits); + BytesRef scratch = new BytesRef(); + // Iterate through the results: + for (int i = 0; i < hits.ScoreDocs.Length; i++) + { + Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); + Assert.AreEqual(text, hitDoc.Get("fieldname")); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); + dv.LookupOrd(dv.GetOrd(hits.ScoreDocs[i].Doc), scratch); + Assert.AreEqual(new BytesRef("hello world"), scratch); + } } [Test] @@ -699,34 +630,29 @@ public virtual void TestSortedBytesTwoDocuments() { Analyzer analyzer = new MockAnalyzer(Random); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + conf.SetMergePolicy(NewLogMergePolicy()); + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - conf.SetMergePolicy(NewLogMergePolicy()); - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - doc.Add(new SortedDocValuesField("dv", new BytesRef("hello world 1"))); - iwriter.AddDocument(doc); - doc = new Document(); - doc.Add(new SortedDocValuesField("dv", new BytesRef("hello world 2"))); - iwriter.AddDocument(doc); - iwriter.ForceMerge(1); - } // iwriter.Dispose(); + Document doc = new Document(); + doc.Add(new SortedDocValuesField("dv", new BytesRef("hello world 1"))); + iwriter.AddDocument(doc); + doc = new Document(); + doc.Add(new SortedDocValuesField("dv", new BytesRef("hello world 2"))); + iwriter.AddDocument(doc); + iwriter.ForceMerge(1); + } // iwriter.Dispose(); - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); - BytesRef scratch = new BytesRef(); - dv.LookupOrd(dv.GetOrd(0), scratch); - Assert.AreEqual("hello world 1", scratch.Utf8ToString()); - dv.LookupOrd(dv.GetOrd(1), scratch); - Assert.AreEqual("hello world 2", scratch.Utf8ToString()); - - } // ireader.Dispose(); - } // directory.Dispose(); + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); // read-only=true + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); + BytesRef scratch = new BytesRef(); + dv.LookupOrd(dv.GetOrd(0), scratch); + Assert.AreEqual("hello world 1", scratch.Utf8ToString()); + dv.LookupOrd(dv.GetOrd(1), scratch); + Assert.AreEqual("hello world 2", scratch.Utf8ToString()); } [Test] @@ -734,41 +660,36 @@ public virtual void TestSortedBytesThreeDocuments() { Analyzer analyzer = new MockAnalyzer(Random); - using (Directory directory = NewDirectory()) - { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - conf.SetMergePolicy(NewLogMergePolicy()); - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - doc.Add(new SortedDocValuesField("dv", new BytesRef("hello world 1"))); - iwriter.AddDocument(doc); - doc = new Document(); - doc.Add(new SortedDocValuesField("dv", new BytesRef("hello world 2"))); - iwriter.AddDocument(doc); - doc = new Document(); - doc.Add(new SortedDocValuesField("dv", new BytesRef("hello world 1"))); - iwriter.AddDocument(doc); - iwriter.ForceMerge(1); - } // iwriter.Dispose(); - - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); - Assert.AreEqual(2, dv.ValueCount); - BytesRef scratch = new BytesRef(); - Assert.AreEqual(0, dv.GetOrd(0)); - dv.LookupOrd(0, scratch); - Assert.AreEqual("hello world 1", scratch.Utf8ToString()); - Assert.AreEqual(1, dv.GetOrd(1)); - dv.LookupOrd(1, scratch); - Assert.AreEqual("hello world 2", scratch.Utf8ToString()); - Assert.AreEqual(0, dv.GetOrd(2)); - - } // ireader.Dispose(); - } // directory.Dispose(); + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + conf.SetMergePolicy(NewLogMergePolicy()); + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) + { + Document doc = new Document(); + doc.Add(new SortedDocValuesField("dv", new BytesRef("hello world 1"))); + iwriter.AddDocument(doc); + doc = new Document(); + doc.Add(new SortedDocValuesField("dv", new BytesRef("hello world 2"))); + iwriter.AddDocument(doc); + doc = new Document(); + doc.Add(new SortedDocValuesField("dv", new BytesRef("hello world 1"))); + iwriter.AddDocument(doc); + iwriter.ForceMerge(1); + } // iwriter.Dispose(); + + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); // read-only=true + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); + Assert.AreEqual(2, dv.ValueCount); + BytesRef scratch = new BytesRef(); + Assert.AreEqual(0, dv.GetOrd(0)); + dv.LookupOrd(0, scratch); + Assert.AreEqual("hello world 1", scratch.Utf8ToString()); + Assert.AreEqual(1, dv.GetOrd(1)); + dv.LookupOrd(1, scratch); + Assert.AreEqual("hello world 2", scratch.Utf8ToString()); + Assert.AreEqual(0, dv.GetOrd(2)); } [Test] @@ -776,103 +697,96 @@ public virtual void TestSortedBytesTwoDocumentsMerged() { Analyzer analyzer = new MockAnalyzer(Random); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + conf.SetMergePolicy(NewLogMergePolicy()); + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) + { + Document doc = new Document(); + doc.Add(NewField("id", "0", StringField.TYPE_STORED)); + doc.Add(new SortedDocValuesField("dv", new BytesRef("hello world 1"))); + iwriter.AddDocument(doc); + iwriter.Commit(); + doc = new Document(); + doc.Add(NewField("id", "1", StringField.TYPE_STORED)); + doc.Add(new SortedDocValuesField("dv", new BytesRef("hello world 2"))); + iwriter.AddDocument(doc); + iwriter.ForceMerge(1); + } // iwriter.Dispose(); + + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); // read-only=true + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); + Assert.AreEqual(2, dv.ValueCount); // 2 ords + BytesRef scratch = new BytesRef(); + dv.LookupOrd(0, scratch); + Assert.AreEqual(new BytesRef("hello world 1"), scratch); + dv.LookupOrd(1, scratch); + Assert.AreEqual(new BytesRef("hello world 2"), scratch); + for (int i = 0; i < 2; i++) + { + Document doc2 = ((AtomicReader)ireader.Leaves[0].Reader).Document(i); + string expected; + if (doc2.Get("id").Equals("0", StringComparison.Ordinal)) + { + expected = "hello world 1"; + } + else + { + expected = "hello world 2"; + } + dv.LookupOrd(dv.GetOrd(i), scratch); + Assert.AreEqual(expected, scratch.Utf8ToString()); + } + } + + [Test] + public virtual void TestSortedMergeAwayAllValues() + { + using Directory directory = NewDirectory(); + Analyzer analyzer = new MockAnalyzer(Random); + IndexWriterConfig iwconfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + iwconfig.SetMergePolicy(NewLogMergePolicy()); + DirectoryReader ireader = null; + try { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - conf.SetMergePolicy(NewLogMergePolicy()); - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, iwconfig)) { + Document doc = new Document(); - doc.Add(NewField("id", "0", StringField.TYPE_STORED)); - doc.Add(new SortedDocValuesField("dv", new BytesRef("hello world 1"))); + doc.Add(new StringField("id", "0", Field.Store.NO)); iwriter.AddDocument(doc); - iwriter.Commit(); doc = new Document(); - doc.Add(NewField("id", "1", StringField.TYPE_STORED)); - doc.Add(new SortedDocValuesField("dv", new BytesRef("hello world 2"))); + doc.Add(new StringField("id", "1", Field.Store.NO)); + doc.Add(new SortedDocValuesField("field", new BytesRef("hello"))); iwriter.AddDocument(doc); + iwriter.Commit(); + iwriter.DeleteDocuments(new Term("id", "1")); iwriter.ForceMerge(1); - } // iwriter.Dispose(); - - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); - Assert.AreEqual(2, dv.ValueCount); // 2 ords - BytesRef scratch = new BytesRef(); - dv.LookupOrd(0, scratch); - Assert.AreEqual(new BytesRef("hello world 1"), scratch); - dv.LookupOrd(1, scratch); - Assert.AreEqual(new BytesRef("hello world 2"), scratch); - for (int i = 0; i < 2; i++) - { - Document doc2 = ((AtomicReader)ireader.Leaves[0].Reader).Document(i); - string expected; - if (doc2.Get("id").Equals("0", StringComparison.Ordinal)) - { - expected = "hello world 1"; - } - else - { - expected = "hello world 2"; - } - dv.LookupOrd(dv.GetOrd(i), scratch); - Assert.AreEqual(expected, scratch.Utf8ToString()); - } - } // ireader.Dispose(); - } // directory.Dispose(); - } + ireader = iwriter.GetReader(); + } // iwriter.Dispose(); - [Test] - public virtual void TestSortedMergeAwayAllValues() - { - using (Directory directory = NewDirectory()) - { - Analyzer analyzer = new MockAnalyzer(Random); - IndexWriterConfig iwconfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - iwconfig.SetMergePolicy(NewLogMergePolicy()); - DirectoryReader ireader = null; - try + SortedDocValues dv = GetOnlySegmentReader(ireader).GetSortedDocValues("field"); + if (DefaultCodecSupportsDocsWithField) { - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, iwconfig)) - { - - Document doc = new Document(); - doc.Add(new StringField("id", "0", Field.Store.NO)); - iwriter.AddDocument(doc); - doc = new Document(); - doc.Add(new StringField("id", "1", Field.Store.NO)); - doc.Add(new SortedDocValuesField("field", new BytesRef("hello"))); - iwriter.AddDocument(doc); - iwriter.Commit(); - iwriter.DeleteDocuments(new Term("id", "1")); - iwriter.ForceMerge(1); - - ireader = iwriter.GetReader(); - } // iwriter.Dispose(); - - SortedDocValues dv = GetOnlySegmentReader(ireader).GetSortedDocValues("field"); - if (DefaultCodecSupportsDocsWithField) - { - Assert.AreEqual(-1, dv.GetOrd(0)); - Assert.AreEqual(0, dv.ValueCount); - } - else - { - Assert.AreEqual(0, dv.GetOrd(0)); - Assert.AreEqual(1, dv.ValueCount); - BytesRef @ref = new BytesRef(); - dv.LookupOrd(0, @ref); - Assert.AreEqual(new BytesRef(), @ref); - } + Assert.AreEqual(-1, dv.GetOrd(0)); + Assert.AreEqual(0, dv.ValueCount); } - finally + else { - ireader?.Dispose(); + Assert.AreEqual(0, dv.GetOrd(0)); + Assert.AreEqual(1, dv.ValueCount); + BytesRef @ref = new BytesRef(); + dv.LookupOrd(0, @ref); + Assert.AreEqual(new BytesRef(), @ref); } - } // directory.Dispose(); + } + finally + { + ireader?.Dispose(); + } } [Test] @@ -880,28 +794,23 @@ public virtual void TestBytesWithNewline() { Analyzer analyzer = new MockAnalyzer(Random); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + conf.SetMergePolicy(NewLogMergePolicy()); + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - conf.SetMergePolicy(NewLogMergePolicy()); - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - doc.Add(new BinaryDocValuesField("dv", new BytesRef("hello\nworld\r1"))); - iwriter.AddDocument(doc); - } // iwriter.Dispose(); - - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); - BytesRef scratch = new BytesRef(); - dv.Get(0, scratch); - Assert.AreEqual(new BytesRef("hello\nworld\r1"), scratch); + Document doc = new Document(); + doc.Add(new BinaryDocValuesField("dv", new BytesRef("hello\nworld\r1"))); + iwriter.AddDocument(doc); + } // iwriter.Dispose(); - } // ireader.Dispose(); - } // directory.Dispose(); + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); // read-only=true + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); + BytesRef scratch = new BytesRef(); + dv.Get(0, scratch); + Assert.AreEqual(new BytesRef("hello\nworld\r1"), scratch); } [Test] @@ -909,120 +818,114 @@ public virtual void TestMissingSortedBytes() { Analyzer analyzer = new MockAnalyzer(Random); - using (Directory directory = NewDirectory()) - { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - conf.SetMergePolicy(NewLogMergePolicy()); - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - doc.Add(new SortedDocValuesField("dv", new BytesRef("hello world 2"))); - iwriter.AddDocument(doc); - // 2nd doc missing the DV field - iwriter.AddDocument(new Document()); - } // iwriter.Dispose(); - - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); - BytesRef scratch = new BytesRef(); - dv.LookupOrd(dv.GetOrd(0), scratch); - Assert.AreEqual(new BytesRef("hello world 2"), scratch); - if (DefaultCodecSupportsDocsWithField) - { - Assert.AreEqual(-1, dv.GetOrd(1)); - } - dv.Get(1, scratch); - Assert.AreEqual(new BytesRef(""), scratch); - } // ireader.Dispose(); - } // directory.Dispose(); + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + conf.SetMergePolicy(NewLogMergePolicy()); + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) + { + Document doc = new Document(); + doc.Add(new SortedDocValuesField("dv", new BytesRef("hello world 2"))); + iwriter.AddDocument(doc); + // 2nd doc missing the DV field + iwriter.AddDocument(new Document()); + } // iwriter.Dispose(); + + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); + BytesRef scratch = new BytesRef(); + dv.LookupOrd(dv.GetOrd(0), scratch); + Assert.AreEqual(new BytesRef("hello world 2"), scratch); + if (DefaultCodecSupportsDocsWithField) + { + Assert.AreEqual(-1, dv.GetOrd(1)); + } + dv.Get(1, scratch); + Assert.AreEqual(new BytesRef(""), scratch); } [Test] public virtual void TestSortedTermsEnum() { - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + Analyzer analyzer = new MockAnalyzer(Random); + IndexWriterConfig iwconfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + iwconfig.SetMergePolicy(NewLogMergePolicy()); + DirectoryReader ireader = null; + try { - Analyzer analyzer = new MockAnalyzer(Random); - IndexWriterConfig iwconfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - iwconfig.SetMergePolicy(NewLogMergePolicy()); - DirectoryReader ireader = null; - try + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, iwconfig)) { - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, iwconfig)) - { - Document doc = new Document(); - doc.Add(new SortedDocValuesField("field", new BytesRef("hello"))); - iwriter.AddDocument(doc); - - doc = new Document(); - doc.Add(new SortedDocValuesField("field", new BytesRef("world"))); - iwriter.AddDocument(doc); - - doc = new Document(); - doc.Add(new SortedDocValuesField("field", new BytesRef("beer"))); - iwriter.AddDocument(doc); - iwriter.ForceMerge(1); - - ireader = iwriter.GetReader(); - } // iwriter.Dispose(); - - SortedDocValues dv = GetOnlySegmentReader(ireader).GetSortedDocValues("field"); - Assert.AreEqual(3, dv.ValueCount); - - TermsEnum termsEnum = dv.GetTermsEnum(); - - // next() - Assert.IsTrue(termsEnum.MoveNext()); - Assert.AreEqual("beer", termsEnum.Term.Utf8ToString()); - Assert.AreEqual(0L, termsEnum.Ord); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - Assert.IsTrue(termsEnum.MoveNext()); - Assert.AreEqual("hello", termsEnum.Term.Utf8ToString()); - Assert.AreEqual(1L, termsEnum.Ord); // LUCENENET specific - 1L required because types don't match (xUnit checks this) - Assert.IsTrue(termsEnum.MoveNext()); - Assert.AreEqual("world", termsEnum.Term.Utf8ToString()); - Assert.AreEqual(2L, termsEnum.Ord); // LUCENENET specific - 2L required because types don't match (xUnit checks this) - - // seekCeil() - Assert.AreEqual(SeekStatus.NOT_FOUND, termsEnum.SeekCeil(new BytesRef("ha!"))); - Assert.AreEqual("hello", termsEnum.Term.Utf8ToString()); - Assert.AreEqual(1L, termsEnum.Ord); // LUCENENET specific - 1L required because types don't match (xUnit checks this) - Assert.AreEqual(SeekStatus.FOUND, termsEnum.SeekCeil(new BytesRef("beer"))); - Assert.AreEqual("beer", termsEnum.Term.Utf8ToString()); - Assert.AreEqual(0L, termsEnum.Ord); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - Assert.AreEqual(SeekStatus.END, termsEnum.SeekCeil(new BytesRef("zzz"))); - - // seekExact() - Assert.IsTrue(termsEnum.SeekExact(new BytesRef("beer"))); - Assert.AreEqual("beer", termsEnum.Term.Utf8ToString()); - Assert.AreEqual(0L, termsEnum.Ord); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - Assert.IsTrue(termsEnum.SeekExact(new BytesRef("hello"))); - Assert.AreEqual("hello", termsEnum.Term.Utf8ToString()); - Assert.AreEqual(1L, termsEnum.Ord); // LUCENENET specific - 1L required because types don't match (xUnit checks this) - Assert.IsTrue(termsEnum.SeekExact(new BytesRef("world"))); - Assert.AreEqual("world", termsEnum.Term.Utf8ToString()); - Assert.AreEqual(2L, termsEnum.Ord); // LUCENENET specific - 2L required because types don't match (xUnit checks this) - Assert.IsFalse(termsEnum.SeekExact(new BytesRef("bogus"))); - - // seek(ord) - termsEnum.SeekExact(0); - Assert.AreEqual("beer", termsEnum.Term.Utf8ToString()); - Assert.AreEqual(0L, termsEnum.Ord); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - termsEnum.SeekExact(1); - Assert.AreEqual("hello", termsEnum.Term.Utf8ToString()); - Assert.AreEqual(1L, termsEnum.Ord); // LUCENENET specific - 1L required because types don't match (xUnit checks this) - termsEnum.SeekExact(2); - Assert.AreEqual("world", termsEnum.Term.Utf8ToString()); - Assert.AreEqual(2L, termsEnum.Ord); // LUCENENET specific - 2L required because types don't match (xUnit checks this) - } - finally - { - ireader?.Dispose(); - } - } // directory.Dispose(); + Document doc = new Document(); + doc.Add(new SortedDocValuesField("field", new BytesRef("hello"))); + iwriter.AddDocument(doc); + + doc = new Document(); + doc.Add(new SortedDocValuesField("field", new BytesRef("world"))); + iwriter.AddDocument(doc); + + doc = new Document(); + doc.Add(new SortedDocValuesField("field", new BytesRef("beer"))); + iwriter.AddDocument(doc); + iwriter.ForceMerge(1); + + ireader = iwriter.GetReader(); + } // iwriter.Dispose(); + + SortedDocValues dv = GetOnlySegmentReader(ireader).GetSortedDocValues("field"); + Assert.AreEqual(3, dv.ValueCount); + + TermsEnum termsEnum = dv.GetTermsEnum(); + + // next() + Assert.IsTrue(termsEnum.MoveNext()); + Assert.AreEqual("beer", termsEnum.Term.Utf8ToString()); + Assert.AreEqual(0L, termsEnum.Ord); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + Assert.IsTrue(termsEnum.MoveNext()); + Assert.AreEqual("hello", termsEnum.Term.Utf8ToString()); + Assert.AreEqual(1L, termsEnum.Ord); // LUCENENET specific - 1L required because types don't match (xUnit checks this) + Assert.IsTrue(termsEnum.MoveNext()); + Assert.AreEqual("world", termsEnum.Term.Utf8ToString()); + Assert.AreEqual(2L, termsEnum.Ord); // LUCENENET specific - 2L required because types don't match (xUnit checks this) + + // seekCeil() + Assert.AreEqual(SeekStatus.NOT_FOUND, termsEnum.SeekCeil(new BytesRef("ha!"))); + Assert.AreEqual("hello", termsEnum.Term.Utf8ToString()); + Assert.AreEqual(1L, termsEnum.Ord); // LUCENENET specific - 1L required because types don't match (xUnit checks this) + Assert.AreEqual(SeekStatus.FOUND, termsEnum.SeekCeil(new BytesRef("beer"))); + Assert.AreEqual("beer", termsEnum.Term.Utf8ToString()); + Assert.AreEqual(0L, termsEnum.Ord); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + Assert.AreEqual(SeekStatus.END, termsEnum.SeekCeil(new BytesRef("zzz"))); + + // seekExact() + Assert.IsTrue(termsEnum.SeekExact(new BytesRef("beer"))); + Assert.AreEqual("beer", termsEnum.Term.Utf8ToString()); + Assert.AreEqual(0L, termsEnum.Ord); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + Assert.IsTrue(termsEnum.SeekExact(new BytesRef("hello"))); + Assert.AreEqual("hello", termsEnum.Term.Utf8ToString()); + Assert.AreEqual(1L, termsEnum.Ord); // LUCENENET specific - 1L required because types don't match (xUnit checks this) + Assert.IsTrue(termsEnum.SeekExact(new BytesRef("world"))); + Assert.AreEqual("world", termsEnum.Term.Utf8ToString()); + Assert.AreEqual(2L, termsEnum.Ord); // LUCENENET specific - 2L required because types don't match (xUnit checks this) + Assert.IsFalse(termsEnum.SeekExact(new BytesRef("bogus"))); + + // seek(ord) + termsEnum.SeekExact(0); + Assert.AreEqual("beer", termsEnum.Term.Utf8ToString()); + Assert.AreEqual(0L, termsEnum.Ord); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + termsEnum.SeekExact(1); + Assert.AreEqual("hello", termsEnum.Term.Utf8ToString()); + Assert.AreEqual(1L, termsEnum.Ord); // LUCENENET specific - 1L required because types don't match (xUnit checks this) + termsEnum.SeekExact(2); + Assert.AreEqual("world", termsEnum.Term.Utf8ToString()); + Assert.AreEqual(2L, termsEnum.Ord); // LUCENENET specific - 2L required because types don't match (xUnit checks this) + } + finally + { + ireader?.Dispose(); + } } [Test] @@ -1030,34 +933,29 @@ public virtual void TestEmptySortedBytes() { Analyzer analyzer = new MockAnalyzer(Random); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + conf.SetMergePolicy(NewLogMergePolicy()); + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - conf.SetMergePolicy(NewLogMergePolicy()); - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - doc.Add(new SortedDocValuesField("dv", new BytesRef(""))); - iwriter.AddDocument(doc); - doc = new Document(); - doc.Add(new SortedDocValuesField("dv", new BytesRef(""))); - iwriter.AddDocument(doc); - iwriter.ForceMerge(1); - } // iwriter.Dispose(); - - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); - BytesRef scratch = new BytesRef(); - Assert.AreEqual(0, dv.GetOrd(0)); - Assert.AreEqual(0, dv.GetOrd(1)); - dv.LookupOrd(dv.GetOrd(0), scratch); - Assert.AreEqual("", scratch.Utf8ToString()); + Document doc = new Document(); + doc.Add(new SortedDocValuesField("dv", new BytesRef(""))); + iwriter.AddDocument(doc); + doc = new Document(); + doc.Add(new SortedDocValuesField("dv", new BytesRef(""))); + iwriter.AddDocument(doc); + iwriter.ForceMerge(1); + } // iwriter.Dispose(); - } // ireader.Dispose(); - } // directory.Dispose(); + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + SortedDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); + BytesRef scratch = new BytesRef(); + Assert.AreEqual(0, dv.GetOrd(0)); + Assert.AreEqual(0, dv.GetOrd(1)); + dv.LookupOrd(dv.GetOrd(0), scratch); + Assert.AreEqual("", scratch.Utf8ToString()); } [Test] @@ -1065,34 +963,29 @@ public virtual void TestEmptyBytes() { Analyzer analyzer = new MockAnalyzer(Random); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + conf.SetMergePolicy(NewLogMergePolicy()); + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - conf.SetMergePolicy(NewLogMergePolicy()); - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - doc.Add(new BinaryDocValuesField("dv", new BytesRef(""))); - iwriter.AddDocument(doc); - doc = new Document(); - doc.Add(new BinaryDocValuesField("dv", new BytesRef(""))); - iwriter.AddDocument(doc); - iwriter.ForceMerge(1); - } // iwriter.Dispose(); + Document doc = new Document(); + doc.Add(new BinaryDocValuesField("dv", new BytesRef(""))); + iwriter.AddDocument(doc); + doc = new Document(); + doc.Add(new BinaryDocValuesField("dv", new BytesRef(""))); + iwriter.AddDocument(doc); + iwriter.ForceMerge(1); + } // iwriter.Dispose(); - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); - BytesRef scratch = new BytesRef(); - dv.Get(0, scratch); - Assert.AreEqual("", scratch.Utf8ToString()); - dv.Get(1, scratch); - Assert.AreEqual("", scratch.Utf8ToString()); - - } // ireader.Dispose(); - } // directory.Dispose(); + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); + BytesRef scratch = new BytesRef(); + dv.Get(0, scratch); + Assert.AreEqual("", scratch.Utf8ToString()); + dv.Get(1, scratch); + Assert.AreEqual("", scratch.Utf8ToString()); } [Test] @@ -1100,31 +993,26 @@ public virtual void TestVeryLargeButLegalBytes() { Analyzer analyzer = new MockAnalyzer(Random); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + conf.SetMergePolicy(NewLogMergePolicy()); + var bytes = new byte[32766]; + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - conf.SetMergePolicy(NewLogMergePolicy()); - var bytes = new byte[32766]; - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - BytesRef b = new BytesRef(bytes); - Random.NextBytes(bytes); - doc.Add(new BinaryDocValuesField("dv", b)); - iwriter.AddDocument(doc); - } // iwriter.Dispose(); + Document doc = new Document(); + BytesRef b = new BytesRef(bytes); + Random.NextBytes(bytes); + doc.Add(new BinaryDocValuesField("dv", b)); + iwriter.AddDocument(doc); + } // iwriter.Dispose(); - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); - BytesRef scratch = new BytesRef(); - dv.Get(0, scratch); - Assert.AreEqual(new BytesRef(bytes), scratch); - - } // ireader.Dispose(); - } // directory.Dispose(); + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); // read-only=true + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); + BytesRef scratch = new BytesRef(); + dv.Get(0, scratch); + Assert.AreEqual(new BytesRef(bytes), scratch); } [Test] @@ -1132,30 +1020,26 @@ public virtual void TestVeryLargeButLegalSortedBytes() { Analyzer analyzer = new MockAnalyzer(Random); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + conf.SetMergePolicy(NewLogMergePolicy()); + var bytes = new byte[32766]; + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - conf.SetMergePolicy(NewLogMergePolicy()); - var bytes = new byte[32766]; - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - BytesRef b = new BytesRef(bytes); - Random.NextBytes(bytes); - doc.Add(new SortedDocValuesField("dv", b)); - iwriter.AddDocument(doc); - } // iwriter.Dispose(); + Document doc = new Document(); + BytesRef b = new BytesRef(bytes); + Random.NextBytes(bytes); + doc.Add(new SortedDocValuesField("dv", b)); + iwriter.AddDocument(doc); + } // iwriter.Dispose(); - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); - BytesRef scratch = new BytesRef(); - dv.Get(0, scratch); - Assert.AreEqual(new BytesRef(bytes), scratch); - } // ireader.Dispose(); - } // directory.Dispose(); + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); // read-only=true + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); + BytesRef scratch = new BytesRef(); + dv.Get(0, scratch); + Assert.AreEqual(new BytesRef(bytes), scratch); } [Test] @@ -1163,30 +1047,25 @@ public virtual void TestCodecUsesOwnBytes() { Analyzer analyzer = new MockAnalyzer(Random); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + conf.SetMergePolicy(NewLogMergePolicy()); + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - conf.SetMergePolicy(NewLogMergePolicy()); - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - doc.Add(new BinaryDocValuesField("dv", new BytesRef("boo!"))); - iwriter.AddDocument(doc); - } // iwriter.Dispose(); + Document doc = new Document(); + doc.Add(new BinaryDocValuesField("dv", new BytesRef("boo!"))); + iwriter.AddDocument(doc); + } // iwriter.Dispose(); - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); - var mybytes = new byte[20]; - BytesRef scratch = new BytesRef(mybytes); - dv.Get(0, scratch); - Assert.AreEqual("boo!", scratch.Utf8ToString()); - Assert.IsFalse(scratch.Bytes == mybytes); - - } // ireader.Dispose(); - } // directory.Dispose(); + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); // read-only=true + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); + var mybytes = new byte[20]; + BytesRef scratch = new BytesRef(mybytes); + dv.Get(0, scratch); + Assert.AreEqual("boo!", scratch.Utf8ToString()); + Assert.IsFalse(scratch.Bytes == mybytes); } [Test] @@ -1194,30 +1073,25 @@ public virtual void TestCodecUsesOwnSortedBytes() { Analyzer analyzer = new MockAnalyzer(Random); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + conf.SetMergePolicy(NewLogMergePolicy()); + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - conf.SetMergePolicy(NewLogMergePolicy()); - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - doc.Add(new SortedDocValuesField("dv", new BytesRef("boo!"))); - iwriter.AddDocument(doc); - } // iwriter.Dispose(); - - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); - var mybytes = new byte[20]; - BytesRef scratch = new BytesRef(mybytes); - dv.Get(0, scratch); - Assert.AreEqual("boo!", scratch.Utf8ToString()); - Assert.IsFalse(scratch.Bytes == mybytes); + Document doc = new Document(); + doc.Add(new SortedDocValuesField("dv", new BytesRef("boo!"))); + iwriter.AddDocument(doc); + } // iwriter.Dispose(); - } // ireader.Dispose(); - } // directory.Dispose(); + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); // read-only=true + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); + var mybytes = new byte[20]; + BytesRef scratch = new BytesRef(mybytes); + dv.Get(0, scratch); + Assert.AreEqual("boo!", scratch.Utf8ToString()); + Assert.IsFalse(scratch.Bytes == mybytes); } [Test] @@ -1225,37 +1099,32 @@ public virtual void TestCodecUsesOwnBytesEachTime() { Analyzer analyzer = new MockAnalyzer(Random); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + conf.SetMergePolicy(NewLogMergePolicy()); + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - conf.SetMergePolicy(NewLogMergePolicy()); - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - doc.Add(new BinaryDocValuesField("dv", new BytesRef("foo!"))); - iwriter.AddDocument(doc); - doc = new Document(); - doc.Add(new BinaryDocValuesField("dv", new BytesRef("bar!"))); - iwriter.AddDocument(doc); - } // iwriter.Dispose(); - - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); - BytesRef scratch = new BytesRef(); - dv.Get(0, scratch); - Assert.AreEqual("foo!", scratch.Utf8ToString()); + Document doc = new Document(); + doc.Add(new BinaryDocValuesField("dv", new BytesRef("foo!"))); + iwriter.AddDocument(doc); + doc = new Document(); + doc.Add(new BinaryDocValuesField("dv", new BytesRef("bar!"))); + iwriter.AddDocument(doc); + } // iwriter.Dispose(); - BytesRef scratch2 = new BytesRef(); - dv.Get(1, scratch2); - Assert.AreEqual("bar!", scratch2.Utf8ToString()); - // check scratch is still valid - Assert.AreEqual("foo!", scratch.Utf8ToString()); + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); // read-only=true + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv"); + BytesRef scratch = new BytesRef(); + dv.Get(0, scratch); + Assert.AreEqual("foo!", scratch.Utf8ToString()); - } // ireader.Dispose(); - } // directory.Dispose(); + BytesRef scratch2 = new BytesRef(); + dv.Get(1, scratch2); + Assert.AreEqual("bar!", scratch2.Utf8ToString()); + // check scratch is still valid + Assert.AreEqual("foo!", scratch.Utf8ToString()); } [Test] @@ -1263,37 +1132,32 @@ public virtual void TestCodecUsesOwnSortedBytesEachTime() { Analyzer analyzer = new MockAnalyzer(Random); - using (Directory directory = NewDirectory()) - { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - conf.SetMergePolicy(NewLogMergePolicy()); - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - doc.Add(new SortedDocValuesField("dv", new BytesRef("foo!"))); - iwriter.AddDocument(doc); - doc = new Document(); - doc.Add(new SortedDocValuesField("dv", new BytesRef("bar!"))); - iwriter.AddDocument(doc); - } // iwriter.Dispose(); - - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); - BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); - BytesRef scratch = new BytesRef(); - dv.Get(0, scratch); - Assert.AreEqual("foo!", scratch.Utf8ToString()); - - BytesRef scratch2 = new BytesRef(); - dv.Get(1, scratch2); - Assert.AreEqual("bar!", scratch2.Utf8ToString()); - // check scratch is still valid - Assert.AreEqual("foo!", scratch.Utf8ToString()); - - } // ireader.Dispose(); - } // directory.Dispose(); + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + conf.SetMergePolicy(NewLogMergePolicy()); + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, conf)) + { + Document doc = new Document(); + doc.Add(new SortedDocValuesField("dv", new BytesRef("foo!"))); + iwriter.AddDocument(doc); + doc = new Document(); + doc.Add(new SortedDocValuesField("dv", new BytesRef("bar!"))); + iwriter.AddDocument(doc); + } // iwriter.Dispose(); + + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); // read-only=true + if (Debugging.AssertsEnabled) Debugging.Assert(ireader.Leaves.Count == 1); + BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv"); + BytesRef scratch = new BytesRef(); + dv.Get(0, scratch); + Assert.AreEqual("foo!", scratch.Utf8ToString()); + + BytesRef scratch2 = new BytesRef(); + dv.Get(1, scratch2); + Assert.AreEqual("bar!", scratch2.Utf8ToString()); + // check scratch is still valid + Assert.AreEqual("foo!", scratch.Utf8ToString()); } /// @@ -1302,150 +1166,139 @@ public virtual void TestCodecUsesOwnSortedBytesEachTime() [Test] public virtual void TestDocValuesSimple() { - using (Directory dir = NewDirectory()) + using Directory dir = NewDirectory(); + Analyzer analyzer = new MockAnalyzer(Random); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + conf.SetMergePolicy(NewLogMergePolicy()); + using (IndexWriter writer = new IndexWriter(dir, conf)) { - Analyzer analyzer = new MockAnalyzer(Random); - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - conf.SetMergePolicy(NewLogMergePolicy()); - using (IndexWriter writer = new IndexWriter(dir, conf)) + for (int i = 0; i < 5; i++) { - for (int i = 0; i < 5; i++) - { - Document doc = new Document(); - doc.Add(new NumericDocValuesField("docId", i)); - doc.Add(new TextField("docId", "" + i, Field.Store.NO)); - writer.AddDocument(doc); - } - writer.Commit(); - writer.ForceMerge(1, true); + Document doc = new Document(); + doc.Add(new NumericDocValuesField("docId", i)); + doc.Add(new TextField("docId", "" + i, Field.Store.NO)); + writer.AddDocument(doc); + } + writer.Commit(); + writer.ForceMerge(1, true); - } // writer.Dispose(); + } // writer.Dispose(); - using (DirectoryReader reader = DirectoryReader.Open(dir, 1)) - { - Assert.AreEqual(1, reader.Leaves.Count); + using DirectoryReader reader = DirectoryReader.Open(dir, 1); + Assert.AreEqual(1, reader.Leaves.Count); - IndexSearcher searcher = new IndexSearcher(reader); + IndexSearcher searcher = new IndexSearcher(reader); - BooleanQuery query = new BooleanQuery(); - query.Add(new TermQuery(new Term("docId", "0")), Occur.SHOULD); - query.Add(new TermQuery(new Term("docId", "1")), Occur.SHOULD); - query.Add(new TermQuery(new Term("docId", "2")), Occur.SHOULD); - query.Add(new TermQuery(new Term("docId", "3")), Occur.SHOULD); - query.Add(new TermQuery(new Term("docId", "4")), Occur.SHOULD); + BooleanQuery query = new BooleanQuery(); + query.Add(new TermQuery(new Term("docId", "0")), Occur.SHOULD); + query.Add(new TermQuery(new Term("docId", "1")), Occur.SHOULD); + query.Add(new TermQuery(new Term("docId", "2")), Occur.SHOULD); + query.Add(new TermQuery(new Term("docId", "3")), Occur.SHOULD); + query.Add(new TermQuery(new Term("docId", "4")), Occur.SHOULD); - TopDocs search = searcher.Search(query, 10); - Assert.AreEqual(5, search.TotalHits); - ScoreDoc[] scoreDocs = search.ScoreDocs; - NumericDocValues docValues = GetOnlySegmentReader(reader).GetNumericDocValues("docId"); - for (int i = 0; i < scoreDocs.Length; i++) - { - Assert.AreEqual(i, scoreDocs[i].Doc); - Assert.AreEqual((long)i, docValues.Get(scoreDocs[i].Doc)); // LUCENENET specific - cast required because types don't match (xUnit checks this) - } - } // reader.Dispose(); - } // dir.Dispose(); + TopDocs search = searcher.Search(query, 10); + Assert.AreEqual(5, search.TotalHits); + ScoreDoc[] scoreDocs = search.ScoreDocs; + NumericDocValues docValues = GetOnlySegmentReader(reader).GetNumericDocValues("docId"); + for (int i = 0; i < scoreDocs.Length; i++) + { + Assert.AreEqual(i, scoreDocs[i].Doc); + Assert.AreEqual((long)i, docValues.Get(scoreDocs[i].Doc)); // LUCENENET specific - cast required because types don't match (xUnit checks this) + } } [Test] public virtual void TestRandomSortedBytes() { - using (Directory dir = NewDirectory()) - { - IndexWriterConfig cfg = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); - - if (!DefaultCodecSupportsDocsWithField) - { - // if the codec doesnt support missing, we expect missing to be mapped to byte[] - // by the impersonator, but we have to give it a chance to merge them to this - cfg.SetMergePolicy(NewLogMergePolicy()); - } - using (RandomIndexWriter w = new RandomIndexWriter(Random, dir, cfg)) - { - int numDocs = AtLeast(100); - BytesRefHash hash = new BytesRefHash(); - IDictionary docToString = new Dictionary(); - int maxLength = TestUtil.NextInt32(Random, 1, 50); - for (int i = 0; i < numDocs; i++) - { - Document doc = new Document(); - doc.Add(NewTextField("id", "" + i, Field.Store.YES)); - string @string = TestUtil.RandomRealisticUnicodeString(Random, 1, maxLength); - BytesRef br = new BytesRef(@string); - doc.Add(new SortedDocValuesField("field", br)); - hash.Add(br); - docToString["" + i] = @string; - w.AddDocument(doc); - } - if (Rarely()) - { - w.Commit(); - } - int numDocsNoValue = AtLeast(10); - for (int i = 0; i < numDocsNoValue; i++) - { - Document doc = new Document(); - doc.Add(NewTextField("id", "noValue", Field.Store.YES)); - w.AddDocument(doc); - } - if (!DefaultCodecSupportsDocsWithField) - { - BytesRef bytesRef = new BytesRef(); - hash.Add(bytesRef); // add empty value for the gaps - } - if (Rarely()) - { - w.Commit(); - } - if (!DefaultCodecSupportsDocsWithField) - { - // if the codec doesnt support missing, we expect missing to be mapped to byte[] - // by the impersonator, but we have to give it a chance to merge them to this - w.ForceMerge(1); - } - for (int i = 0; i < numDocs; i++) - { - Document doc = new Document(); - string id = "" + i + numDocs; - doc.Add(NewTextField("id", id, Field.Store.YES)); - string @string = TestUtil.RandomRealisticUnicodeString(Random, 1, maxLength); - BytesRef br = new BytesRef(@string); - hash.Add(br); - docToString[id] = @string; - doc.Add(new SortedDocValuesField("field", br)); - w.AddDocument(doc); - } - w.Commit(); - using (IndexReader reader = w.GetReader()) - { - SortedDocValues docValues = MultiDocValues.GetSortedValues(reader, "field"); - int[] sort = hash.Sort(BytesRef.UTF8SortedAsUnicodeComparer); - BytesRef expected = new BytesRef(); - BytesRef actual = new BytesRef(); - Assert.AreEqual(hash.Count, docValues.ValueCount); - for (int i = 0; i < hash.Count; i++) - { - hash.Get(sort[i], expected); - docValues.LookupOrd(i, actual); - Assert.AreEqual(expected.Utf8ToString(), actual.Utf8ToString()); - int ord = docValues.LookupTerm(expected); - Assert.AreEqual(i, ord); - } - AtomicReader slowR = SlowCompositeReaderWrapper.Wrap(reader); + using Directory dir = NewDirectory(); + IndexWriterConfig cfg = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); - foreach (KeyValuePair entry in docToString) - { - // pk lookup - DocsEnum termDocsEnum = slowR.GetTermDocsEnum(new Term("id", entry.Key)); - int docId = termDocsEnum.NextDoc(); - expected = new BytesRef(entry.Value); - docValues.Get(docId, actual); - Assert.AreEqual(expected, actual); - } + if (!DefaultCodecSupportsDocsWithField) + { + // if the codec doesnt support missing, we expect missing to be mapped to byte[] + // by the impersonator, but we have to give it a chance to merge them to this + cfg.SetMergePolicy(NewLogMergePolicy()); + } + using RandomIndexWriter w = new RandomIndexWriter(Random, dir, cfg); + int numDocs = AtLeast(100); + BytesRefHash hash = new BytesRefHash(); + IDictionary docToString = new Dictionary(); + int maxLength = TestUtil.NextInt32(Random, 1, 50); + for (int i = 0; i < numDocs; i++) + { + Document doc = new Document(); + doc.Add(NewTextField("id", "" + i, Field.Store.YES)); + string @string = TestUtil.RandomRealisticUnicodeString(Random, 1, maxLength); + BytesRef br = new BytesRef(@string); + doc.Add(new SortedDocValuesField("field", br)); + hash.Add(br); + docToString["" + i] = @string; + w.AddDocument(doc); + } + if (Rarely()) + { + w.Commit(); + } + int numDocsNoValue = AtLeast(10); + for (int i = 0; i < numDocsNoValue; i++) + { + Document doc = new Document(); + doc.Add(NewTextField("id", "noValue", Field.Store.YES)); + w.AddDocument(doc); + } + if (!DefaultCodecSupportsDocsWithField) + { + BytesRef bytesRef = new BytesRef(); + hash.Add(bytesRef); // add empty value for the gaps + } + if (Rarely()) + { + w.Commit(); + } + if (!DefaultCodecSupportsDocsWithField) + { + // if the codec doesnt support missing, we expect missing to be mapped to byte[] + // by the impersonator, but we have to give it a chance to merge them to this + w.ForceMerge(1); + } + for (int i = 0; i < numDocs; i++) + { + Document doc = new Document(); + string id = "" + i + numDocs; + doc.Add(NewTextField("id", id, Field.Store.YES)); + string @string = TestUtil.RandomRealisticUnicodeString(Random, 1, maxLength); + BytesRef br = new BytesRef(@string); + hash.Add(br); + docToString[id] = @string; + doc.Add(new SortedDocValuesField("field", br)); + w.AddDocument(doc); + } + w.Commit(); + using IndexReader reader = w.GetReader(); + SortedDocValues docValues = MultiDocValues.GetSortedValues(reader, "field"); + int[] sort = hash.Sort(BytesRef.UTF8SortedAsUnicodeComparer); + BytesRef expected = new BytesRef(); + BytesRef actual = new BytesRef(); + Assert.AreEqual(hash.Count, docValues.ValueCount); + for (int i = 0; i < hash.Count; i++) + { + hash.Get(sort[i], expected); + docValues.LookupOrd(i, actual); + Assert.AreEqual(expected.Utf8ToString(), actual.Utf8ToString()); + int ord = docValues.LookupTerm(expected); + Assert.AreEqual(i, ord); + } + AtomicReader slowR = SlowCompositeReaderWrapper.Wrap(reader); - } // reader.Dispose(); - } // w.Dispose(); - } // dir.Dispose(); + foreach (KeyValuePair entry in docToString) + { + // pk lookup + DocsEnum termDocsEnum = slowR.GetTermDocsEnum(new Term("id", entry.Key)); + int docId = termDocsEnum.NextDoc(); + expected = new BytesRef(entry.Value); + docValues.Get(docId, actual); + Assert.AreEqual(expected, actual); + } } internal abstract class Int64Producer @@ -1455,19 +1308,16 @@ internal abstract class Int64Producer private void DoTestNumericsVsStoredFields(long minValue, long maxValue) { - DoTestNumericsVsStoredFields(new Int64ProducerAnonymousInnerClassHelper(this, minValue, maxValue)); + DoTestNumericsVsStoredFields(new Int64ProducerAnonymousInnerClassHelper(minValue, maxValue)); } private class Int64ProducerAnonymousInnerClassHelper : Int64Producer { - private readonly BaseDocValuesFormatTestCase outerInstance; - - private long minValue; - private long maxValue; + private readonly long minValue; + private readonly long maxValue; - public Int64ProducerAnonymousInnerClassHelper(BaseDocValuesFormatTestCase outerInstance, long minValue, long maxValue) + public Int64ProducerAnonymousInnerClassHelper(long minValue, long maxValue) { - this.outerInstance = outerInstance; this.minValue = minValue; this.maxValue = maxValue; } @@ -1478,85 +1328,78 @@ internal override long Next() } } - private void DoTestNumericsVsStoredFields(Int64Producer longs) + private static void DoTestNumericsVsStoredFields(Int64Producer longs) // LUCENENET: CA1822: Mark members as static { - using (Directory dir = NewDirectory()) + using Directory dir = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); + using (RandomIndexWriter writer = new RandomIndexWriter(Random, dir, conf)) { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); - using (RandomIndexWriter writer = new RandomIndexWriter(Random, dir, conf)) + Document doc = new Document(); + Field idField = new StringField("id", "", Field.Store.NO); + Field storedField = NewStringField("stored", "", Field.Store.YES); + Field dvField = new NumericDocValuesField("dv", 0); + doc.Add(idField); + doc.Add(storedField); + doc.Add(dvField); + + // index some docs + int numDocs = AtLeast(300); + // numDocs should be always > 256 so that in case of a codec that optimizes + // for numbers of values <= 256, all storage layouts are tested + if (Debugging.AssertsEnabled) Debugging.Assert(numDocs > 256); + for (int i = 0; i < numDocs; i++) { - Document doc = new Document(); - Field idField = new StringField("id", "", Field.Store.NO); - Field storedField = NewStringField("stored", "", Field.Store.YES); - Field dvField = new NumericDocValuesField("dv", 0); - doc.Add(idField); - doc.Add(storedField); - doc.Add(dvField); - - // index some docs - int numDocs = AtLeast(300); - // numDocs should be always > 256 so that in case of a codec that optimizes - // for numbers of values <= 256, all storage layouts are tested - if (Debugging.AssertsEnabled) Debugging.Assert(numDocs > 256); - for (int i = 0; i < numDocs; i++) + idField.SetStringValue(Convert.ToString(i, CultureInfo.InvariantCulture)); + long value = longs.Next(); + storedField.SetStringValue(Convert.ToString(value, CultureInfo.InvariantCulture)); + dvField.SetInt64Value(value); + writer.AddDocument(doc); + if (Random.Next(31) == 0) { - idField.SetStringValue(Convert.ToString(i, CultureInfo.InvariantCulture)); - long value = longs.Next(); - storedField.SetStringValue(Convert.ToString(value, CultureInfo.InvariantCulture)); - dvField.SetInt64Value(value); - writer.AddDocument(doc); - if (Random.Next(31) == 0) - { - writer.Commit(); - } + writer.Commit(); } + } - // delete some docs - int numDeletions = Random.Next(numDocs / 10); - for (int i = 0; i < numDeletions; i++) - { - int id = Random.Next(numDocs); - writer.DeleteDocuments(new Term("id", Convert.ToString(id, CultureInfo.InvariantCulture))); - } + // delete some docs + int numDeletions = Random.Next(numDocs / 10); + for (int i = 0; i < numDeletions; i++) + { + int id = Random.Next(numDocs); + writer.DeleteDocuments(new Term("id", Convert.ToString(id, CultureInfo.InvariantCulture))); + } - // merge some segments and ensure that at least one of them has more than - // 256 values - writer.ForceMerge(numDocs / 256); + // merge some segments and ensure that at least one of them has more than + // 256 values + writer.ForceMerge(numDocs / 256); - } // writer.Dispose(); + } // writer.Dispose(); - // compare - using (DirectoryReader ir = DirectoryReader.Open(dir)) + // compare + using DirectoryReader ir = DirectoryReader.Open(dir); + foreach (AtomicReaderContext context in ir.Leaves) + { + AtomicReader r = context.AtomicReader; + NumericDocValues docValues = r.GetNumericDocValues("dv"); + for (int i = 0; i < r.MaxDoc; i++) { - foreach (AtomicReaderContext context in ir.Leaves) - { - AtomicReader r = context.AtomicReader; - NumericDocValues docValues = r.GetNumericDocValues("dv"); - for (int i = 0; i < r.MaxDoc; i++) - { - long storedValue = Convert.ToInt64(r.Document(i).Get("stored"), CultureInfo.InvariantCulture); - Assert.AreEqual(storedValue, docValues.Get(i)); - } - } - } // ir.Dispose(); - } // dir.Dispose(); - } + long storedValue = Convert.ToInt64(r.Document(i).Get("stored"), CultureInfo.InvariantCulture); + Assert.AreEqual(storedValue, docValues.Get(i)); + } + } + } private void DoTestMissingVsFieldCache(long minValue, long maxValue) { - DoTestMissingVsFieldCache(new Int64ProducerAnonymousInnerClassHelper2(this, minValue, maxValue)); + DoTestMissingVsFieldCache(new Int64ProducerAnonymousInnerClassHelper2(minValue, maxValue)); } private class Int64ProducerAnonymousInnerClassHelper2 : Int64Producer { - private readonly BaseDocValuesFormatTestCase outerInstance; + private readonly long minValue; + private readonly long maxValue; - private long minValue; - private long maxValue; - - public Int64ProducerAnonymousInnerClassHelper2(BaseDocValuesFormatTestCase outerInstance, long minValue, long maxValue) + public Int64ProducerAnonymousInnerClassHelper2(long minValue, long maxValue) { - this.outerInstance = outerInstance; this.minValue = minValue; this.maxValue = maxValue; } @@ -1567,71 +1410,67 @@ internal override long Next() } } - private void DoTestMissingVsFieldCache(Int64Producer longs) + private static void DoTestMissingVsFieldCache(Int64Producer longs) // LUCENENET: CA1822: Mark members as static { AssumeTrue("Codec does not support GetDocsWithField", DefaultCodecSupportsDocsWithField); - using (Directory dir = NewDirectory()) - { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); - using (RandomIndexWriter writer = new RandomIndexWriter(Random, dir, conf)) - { - Field idField = new StringField("id", "", Field.Store.NO); - Field indexedField = NewStringField("indexed", "", Field.Store.NO); - Field dvField = new NumericDocValuesField("dv", 0); - - // index some docs - int numDocs = AtLeast(300); - // numDocs should be always > 256 so that in case of a codec that optimizes - // for numbers of values <= 256, all storage layouts are tested - if (Debugging.AssertsEnabled) Debugging.Assert(numDocs > 256); - for (int i = 0; i < numDocs; i++) + using Directory dir = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); + using (RandomIndexWriter writer = new RandomIndexWriter(Random, dir, conf)) + { + Field idField = new StringField("id", "", Field.Store.NO); + Field indexedField = NewStringField("indexed", "", Field.Store.NO); + Field dvField = new NumericDocValuesField("dv", 0); + + // index some docs + int numDocs = AtLeast(300); + // numDocs should be always > 256 so that in case of a codec that optimizes + // for numbers of values <= 256, all storage layouts are tested + if (Debugging.AssertsEnabled) Debugging.Assert(numDocs > 256); + for (int i = 0; i < numDocs; i++) + { + idField.SetStringValue(Convert.ToString(i, CultureInfo.InvariantCulture)); + long value = longs.Next(); + indexedField.SetStringValue(Convert.ToString(value, CultureInfo.InvariantCulture)); + dvField.SetInt64Value(value); + Document doc = new Document(); + doc.Add(idField); + // 1/4 of the time we neglect to add the fields + if (Random.Next(4) > 0) { - idField.SetStringValue(Convert.ToString(i, CultureInfo.InvariantCulture)); - long value = longs.Next(); - indexedField.SetStringValue(Convert.ToString(value, CultureInfo.InvariantCulture)); - dvField.SetInt64Value(value); - Document doc = new Document(); - doc.Add(idField); - // 1/4 of the time we neglect to add the fields - if (Random.Next(4) > 0) - { - doc.Add(indexedField); - doc.Add(dvField); - } - writer.AddDocument(doc); - if (Random.Next(31) == 0) - { - writer.Commit(); - } + doc.Add(indexedField); + doc.Add(dvField); } - - // delete some docs - int numDeletions = Random.Next(numDocs / 10); - for (int i = 0; i < numDeletions; i++) + writer.AddDocument(doc); + if (Random.Next(31) == 0) { - int id = Random.Next(numDocs); - writer.DeleteDocuments(new Term("id", Convert.ToString(id, CultureInfo.InvariantCulture))); + writer.Commit(); } + } - // merge some segments and ensure that at least one of them has more than - // 256 values - writer.ForceMerge(numDocs / 256); + // delete some docs + int numDeletions = Random.Next(numDocs / 10); + for (int i = 0; i < numDeletions; i++) + { + int id = Random.Next(numDocs); + writer.DeleteDocuments(new Term("id", Convert.ToString(id, CultureInfo.InvariantCulture))); + } - } // writer.Dispose(); + // merge some segments and ensure that at least one of them has more than + // 256 values + writer.ForceMerge(numDocs / 256); - // compare - using (DirectoryReader ir = DirectoryReader.Open(dir)) - { - foreach (var context in ir.Leaves) - { - AtomicReader r = context.AtomicReader; - IBits expected = FieldCache.DEFAULT.GetDocsWithField(r, "indexed"); - IBits actual = FieldCache.DEFAULT.GetDocsWithField(r, "dv"); - AssertEquals(expected, actual); - } - } // ir.Dispose(); - } // dir.Dispose(); - } + } // writer.Dispose(); + + // compare + using DirectoryReader ir = DirectoryReader.Open(dir); + foreach (var context in ir.Leaves) + { + AtomicReader r = context.AtomicReader; + IBits expected = FieldCache.DEFAULT.GetDocsWithField(r, "indexed"); + IBits actual = FieldCache.DEFAULT.GetDocsWithField(r, "dv"); + AssertEquals(expected, actual); + } + } [Test] public virtual void TestBooleanNumericsVsStoredFields() @@ -1725,71 +1564,67 @@ public virtual void TestInt64MissingVsFieldCache() // LUCENENET specific - renam private void DoTestBinaryVsStoredFields(int minLength, int maxLength) { - using (Directory dir = NewDirectory()) + using Directory dir = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); + using (RandomIndexWriter writer = new RandomIndexWriter(Random, dir, conf)) { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); - using (RandomIndexWriter writer = new RandomIndexWriter(Random, dir, conf)) - { - Document doc = new Document(); - Field idField = new StringField("id", "", Field.Store.NO); - Field storedField = new StoredField("stored", new byte[0]); - Field dvField = new BinaryDocValuesField("dv", new BytesRef()); - doc.Add(idField); - doc.Add(storedField); - doc.Add(dvField); + Document doc = new Document(); + Field idField = new StringField("id", "", Field.Store.NO); + Field storedField = new StoredField("stored", Arrays.Empty()); + Field dvField = new BinaryDocValuesField("dv", new BytesRef()); + doc.Add(idField); + doc.Add(storedField); + doc.Add(dvField); - // index some docs - int numDocs = AtLeast(300); - for (int i = 0; i < numDocs; i++) + // index some docs + int numDocs = AtLeast(300); + for (int i = 0; i < numDocs; i++) + { + idField.SetStringValue(Convert.ToString(i, CultureInfo.InvariantCulture)); + int length; + if (minLength == maxLength) { - idField.SetStringValue(Convert.ToString(i, CultureInfo.InvariantCulture)); - int length; - if (minLength == maxLength) - { - length = minLength; // fixed length - } - else - { - length = TestUtil.NextInt32(Random, minLength, maxLength); - } - var buffer = new byte[length]; - Random.NextBytes(buffer); - storedField.SetBytesValue(new BytesRef(buffer)); - dvField.SetBytesValue(new BytesRef(buffer)); - writer.AddDocument(doc); - if (Random.Next(31) == 0) - { - writer.Commit(); - } + length = minLength; // fixed length } - - // delete some docs - int numDeletions = Random.Next(numDocs / 10); - for (int i = 0; i < numDeletions; i++) + else { - int id = Random.Next(numDocs); - writer.DeleteDocuments(new Term("id", Convert.ToString(id, CultureInfo.InvariantCulture))); + length = TestUtil.NextInt32(Random, minLength, maxLength); } - } // writer.Dispose(); + var buffer = new byte[length]; + Random.NextBytes(buffer); + storedField.SetBytesValue(new BytesRef(buffer)); + dvField.SetBytesValue(new BytesRef(buffer)); + writer.AddDocument(doc); + if (Random.Next(31) == 0) + { + writer.Commit(); + } + } - // compare - using (DirectoryReader ir = DirectoryReader.Open(dir)) + // delete some docs + int numDeletions = Random.Next(numDocs / 10); + for (int i = 0; i < numDeletions; i++) { - BytesRef scratch = new BytesRef(); // LUCENENET: Moved outside of the loop for performance - foreach (AtomicReaderContext context in ir.Leaves) - { - AtomicReader r = context.AtomicReader; - BinaryDocValues docValues = r.GetBinaryDocValues("dv"); - for (int i = 0; i < r.MaxDoc; i++) - { - BytesRef binaryValue = r.Document(i).GetBinaryValue("stored"); - - docValues.Get(i, scratch); - Assert.AreEqual(binaryValue, scratch); - } - } - } // ir.Dispose(); - } // dir.Dispose(); + int id = Random.Next(numDocs); + writer.DeleteDocuments(new Term("id", Convert.ToString(id, CultureInfo.InvariantCulture))); + } + } // writer.Dispose(); + + // compare + using DirectoryReader ir = DirectoryReader.Open(dir); + BytesRef scratch = new BytesRef(); // LUCENENET: Moved outside of the loop for performance + foreach (AtomicReaderContext context in ir.Leaves) + { + AtomicReader r = context.AtomicReader; + BinaryDocValues docValues = r.GetBinaryDocValues("dv"); + for (int i = 0; i < r.MaxDoc; i++) + { + BytesRef binaryValue = r.Document(i).GetBinaryValue("stored"); + + docValues.Get(i, scratch); + Assert.AreEqual(binaryValue, scratch); + } + } } [Test] @@ -1815,133 +1650,125 @@ public virtual void TestBinaryVariableLengthVsStoredFields() private void DoTestSortedVsStoredFields(int minLength, int maxLength) { - using (Directory dir = NewDirectory()) + using Directory dir = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); + using (RandomIndexWriter writer = new RandomIndexWriter(Random, dir, conf)) { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); - using (RandomIndexWriter writer = new RandomIndexWriter(Random, dir, conf)) - { - Document doc = new Document(); - Field idField = new StringField("id", "", Field.Store.NO); - Field storedField = new StoredField("stored", new byte[0]); - Field dvField = new SortedDocValuesField("dv", new BytesRef()); - doc.Add(idField); - doc.Add(storedField); - doc.Add(dvField); + Document doc = new Document(); + Field idField = new StringField("id", "", Field.Store.NO); + Field storedField = new StoredField("stored", Arrays.Empty()); + Field dvField = new SortedDocValuesField("dv", new BytesRef()); + doc.Add(idField); + doc.Add(storedField); + doc.Add(dvField); - // index some docs - int numDocs = AtLeast(300); - for (int i = 0; i < numDocs; i++) + // index some docs + int numDocs = AtLeast(300); + for (int i = 0; i < numDocs; i++) + { + idField.SetStringValue(Convert.ToString(i, CultureInfo.InvariantCulture)); + int length; + if (minLength == maxLength) { - idField.SetStringValue(Convert.ToString(i, CultureInfo.InvariantCulture)); - int length; - if (minLength == maxLength) - { - length = minLength; // fixed length - } - else - { - length = TestUtil.NextInt32(Random, minLength, maxLength); - } - var buffer = new byte[length]; - Random.NextBytes(buffer); - storedField.SetBytesValue(new BytesRef(buffer)); - dvField.SetBytesValue(new BytesRef(buffer)); - writer.AddDocument(doc); - if (Random.Next(31) == 0) - { - writer.Commit(); - } + length = minLength; // fixed length } - - // delete some docs - int numDeletions = Random.Next(numDocs / 10); - for (int i = 0; i < numDeletions; i++) + else { - int id = Random.Next(numDocs); - writer.DeleteDocuments(new Term("id", Convert.ToString(id, CultureInfo.InvariantCulture))); + length = TestUtil.NextInt32(Random, minLength, maxLength); + } + var buffer = new byte[length]; + Random.NextBytes(buffer); + storedField.SetBytesValue(new BytesRef(buffer)); + dvField.SetBytesValue(new BytesRef(buffer)); + writer.AddDocument(doc); + if (Random.Next(31) == 0) + { + writer.Commit(); } - } // writer.Dispose(); + } - // compare - using (DirectoryReader ir = DirectoryReader.Open(dir)) + // delete some docs + int numDeletions = Random.Next(numDocs / 10); + for (int i = 0; i < numDeletions; i++) { - BytesRef scratch = new BytesRef(); // LUCENENET: Moved outside of the loop for performance - foreach (AtomicReaderContext context in ir.Leaves) - { - AtomicReader r = context.AtomicReader; - BinaryDocValues docValues = r.GetSortedDocValues("dv"); - for (int i = 0; i < r.MaxDoc; i++) - { - BytesRef binaryValue = r.Document(i).GetBinaryValue("stored"); - - docValues.Get(i, scratch); - Assert.AreEqual(binaryValue, scratch); - } - } - } // ir.Dispose(); - } // dir.Dispose(); + int id = Random.Next(numDocs); + writer.DeleteDocuments(new Term("id", Convert.ToString(id, CultureInfo.InvariantCulture))); + } + } // writer.Dispose(); + + // compare + using DirectoryReader ir = DirectoryReader.Open(dir); + BytesRef scratch = new BytesRef(); // LUCENENET: Moved outside of the loop for performance + foreach (AtomicReaderContext context in ir.Leaves) + { + AtomicReader r = context.AtomicReader; + BinaryDocValues docValues = r.GetSortedDocValues("dv"); + for (int i = 0; i < r.MaxDoc; i++) + { + BytesRef binaryValue = r.Document(i).GetBinaryValue("stored"); + + docValues.Get(i, scratch); + Assert.AreEqual(binaryValue, scratch); + } + } } private void DoTestSortedVsFieldCache(int minLength, int maxLength) { - using (Directory dir = NewDirectory()) + using Directory dir = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); + using (RandomIndexWriter writer = new RandomIndexWriter(Random, dir, conf)) { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); - using (RandomIndexWriter writer = new RandomIndexWriter(Random, dir, conf)) - { - Document doc = new Document(); - Field idField = new StringField("id", "", Field.Store.NO); - Field indexedField = new StringField("indexed", "", Field.Store.NO); - Field dvField = new SortedDocValuesField("dv", new BytesRef()); - doc.Add(idField); - doc.Add(indexedField); - doc.Add(dvField); + Document doc = new Document(); + Field idField = new StringField("id", "", Field.Store.NO); + Field indexedField = new StringField("indexed", "", Field.Store.NO); + Field dvField = new SortedDocValuesField("dv", new BytesRef()); + doc.Add(idField); + doc.Add(indexedField); + doc.Add(dvField); - // index some docs - int numDocs = AtLeast(300); - for (int i = 0; i < numDocs; i++) + // index some docs + int numDocs = AtLeast(300); + for (int i = 0; i < numDocs; i++) + { + idField.SetStringValue(Convert.ToString(i, CultureInfo.InvariantCulture)); + int length; + if (minLength == maxLength) { - idField.SetStringValue(Convert.ToString(i, CultureInfo.InvariantCulture)); - int length; - if (minLength == maxLength) - { - length = minLength; // fixed length - } - else - { - length = TestUtil.NextInt32(Random, minLength, maxLength); - } - string value = TestUtil.RandomSimpleString(Random, length); - indexedField.SetStringValue(value); - dvField.SetBytesValue(new BytesRef(value)); - writer.AddDocument(doc); - if (Random.Next(31) == 0) - { - writer.Commit(); - } + length = minLength; // fixed length } - - // delete some docs - int numDeletions = Random.Next(numDocs / 10); - for (int i = 0; i < numDeletions; i++) + else { - int id = Random.Next(numDocs); - writer.DeleteDocuments(new Term("id", Convert.ToString(id, CultureInfo.InvariantCulture))); + length = TestUtil.NextInt32(Random, minLength, maxLength); } - } // writer.Dispose(); - - // compare - using (DirectoryReader ir = DirectoryReader.Open(dir)) - { - foreach (AtomicReaderContext context in ir.Leaves) + string value = TestUtil.RandomSimpleString(Random, length); + indexedField.SetStringValue(value); + dvField.SetBytesValue(new BytesRef(value)); + writer.AddDocument(doc); + if (Random.Next(31) == 0) { - AtomicReader r = context.AtomicReader; - SortedDocValues expected = FieldCache.DEFAULT.GetTermsIndex(r, "indexed"); - SortedDocValues actual = r.GetSortedDocValues("dv"); - AssertEquals(r.MaxDoc, expected, actual); + writer.Commit(); } - } // ir.Dispose(); - } // dir.Dispose(); + } + + // delete some docs + int numDeletions = Random.Next(numDocs / 10); + for (int i = 0; i < numDeletions; i++) + { + int id = Random.Next(numDocs); + writer.DeleteDocuments(new Term("id", Convert.ToString(id, CultureInfo.InvariantCulture))); + } + } // writer.Dispose(); + + // compare + using DirectoryReader ir = DirectoryReader.Open(dir); + foreach (AtomicReaderContext context in ir.Leaves) + { + AtomicReader r = context.AtomicReader; + SortedDocValues expected = FieldCache.DEFAULT.GetTermsIndex(r, "indexed"); + SortedDocValues actual = r.GetSortedDocValues("dv"); + AssertEquals(r.MaxDoc, expected, actual); + } } [Test] @@ -1990,682 +1817,654 @@ public virtual void TestSortedVariableLengthVsStoredFields() public virtual void TestSortedSetOneValue() { AssumeTrue("Codec does not support SORTED_SET", DefaultCodecSupportsSortedSet); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + DirectoryReader ireader = null; + try { - DirectoryReader ireader = null; - try - { - using (RandomIndexWriter iwriter = new RandomIndexWriter( + using (RandomIndexWriter iwriter = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif - Random, directory)) - { + Random, directory)) + { - Document doc = new Document(); - doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); - iwriter.AddDocument(doc); + Document doc = new Document(); + doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); + iwriter.AddDocument(doc); - ireader = iwriter.GetReader(); - } // iwriter.Dispose(); + ireader = iwriter.GetReader(); + } // iwriter.Dispose(); - SortedSetDocValues dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field"); + SortedSetDocValues dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field"); - dv.SetDocument(0); - Assert.AreEqual(0L, dv.NextOrd()); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); + dv.SetDocument(0); + Assert.AreEqual(0L, dv.NextOrd()); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); - BytesRef bytes = new BytesRef(); - dv.LookupOrd(0, bytes); - Assert.AreEqual(new BytesRef("hello"), bytes); - } - finally - { - ireader?.Dispose(); - } - } // directory.Dispose(); + BytesRef bytes = new BytesRef(); + dv.LookupOrd(0, bytes); + Assert.AreEqual(new BytesRef("hello"), bytes); + } + finally + { + ireader?.Dispose(); + } } [Test] public virtual void TestSortedSetTwoFields() { AssumeTrue("Codec does not support SORTED_SET", DefaultCodecSupportsSortedSet); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + DirectoryReader ireader = null; + try { - DirectoryReader ireader = null; - try - { - using (RandomIndexWriter iwriter = new RandomIndexWriter( + using (RandomIndexWriter iwriter = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, directory)) - { + { - Document doc = new Document(); - doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); - doc.Add(new SortedSetDocValuesField("field2", new BytesRef("world"))); - iwriter.AddDocument(doc); + Document doc = new Document(); + doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); + doc.Add(new SortedSetDocValuesField("field2", new BytesRef("world"))); + iwriter.AddDocument(doc); - ireader = iwriter.GetReader(); - } // iwriter.Dispose(); + ireader = iwriter.GetReader(); + } // iwriter.Dispose(); - SortedSetDocValues dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field"); + SortedSetDocValues dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field"); - dv.SetDocument(0); - Assert.AreEqual(0L, dv.NextOrd()); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); + dv.SetDocument(0); + Assert.AreEqual(0L, dv.NextOrd()); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); - BytesRef bytes = new BytesRef(); - dv.LookupOrd(0, bytes); - Assert.AreEqual(new BytesRef("hello"), bytes); + BytesRef bytes = new BytesRef(); + dv.LookupOrd(0, bytes); + Assert.AreEqual(new BytesRef("hello"), bytes); - dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field2"); + dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field2"); - dv.SetDocument(0); - Assert.AreEqual(0L, dv.NextOrd()); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); + dv.SetDocument(0); + Assert.AreEqual(0L, dv.NextOrd()); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); - dv.LookupOrd(0, bytes); - Assert.AreEqual(new BytesRef("world"), bytes); - } - finally - { - ireader?.Dispose(); - } - } // directory.Dispose(); + dv.LookupOrd(0, bytes); + Assert.AreEqual(new BytesRef("world"), bytes); + } + finally + { + ireader?.Dispose(); + } } [Test] public virtual void TestSortedSetTwoDocumentsMerged() { AssumeTrue("Codec does not support SORTED_SET", DefaultCodecSupportsSortedSet); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + Analyzer analyzer = new MockAnalyzer(Random); + IndexWriterConfig iwconfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + iwconfig.SetMergePolicy(NewLogMergePolicy()); + DirectoryReader ireader = null; + try { - Analyzer analyzer = new MockAnalyzer(Random); - IndexWriterConfig iwconfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - iwconfig.SetMergePolicy(NewLogMergePolicy()); - DirectoryReader ireader = null; - try + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, iwconfig)) { - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, iwconfig)) - { - - Document doc = new Document(); - doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); - iwriter.AddDocument(doc); - iwriter.Commit(); - doc = new Document(); - doc.Add(new SortedSetDocValuesField("field", new BytesRef("world"))); - iwriter.AddDocument(doc); - iwriter.ForceMerge(1); + Document doc = new Document(); + doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); + iwriter.AddDocument(doc); + iwriter.Commit(); - ireader = iwriter.GetReader(); - } // iwriter.Dispose(); + doc = new Document(); + doc.Add(new SortedSetDocValuesField("field", new BytesRef("world"))); + iwriter.AddDocument(doc); + iwriter.ForceMerge(1); - SortedSetDocValues dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field"); - Assert.AreEqual(2L, dv.ValueCount); // LUCENENET specific - 2L required because types don't match (xUnit checks this) + ireader = iwriter.GetReader(); + } // iwriter.Dispose(); - dv.SetDocument(0); - Assert.AreEqual(0L, dv.NextOrd()); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); + SortedSetDocValues dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field"); + Assert.AreEqual(2L, dv.ValueCount); // LUCENENET specific - 2L required because types don't match (xUnit checks this) - BytesRef bytes = new BytesRef(); - dv.LookupOrd(0, bytes); - Assert.AreEqual(new BytesRef("hello"), bytes); + dv.SetDocument(0); + Assert.AreEqual(0L, dv.NextOrd()); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); - dv.SetDocument(1); - Assert.AreEqual(1L, dv.NextOrd()); // LUCENENET specific - 1L required because types don't match (xUnit checks this) - Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); + BytesRef bytes = new BytesRef(); + dv.LookupOrd(0, bytes); + Assert.AreEqual(new BytesRef("hello"), bytes); - dv.LookupOrd(1, bytes); - Assert.AreEqual(new BytesRef("world"), bytes); - } - finally - { - ireader?.Dispose(); - } - } // directory.Dispose(); + dv.SetDocument(1); + Assert.AreEqual(1L, dv.NextOrd()); // LUCENENET specific - 1L required because types don't match (xUnit checks this) + Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); + + dv.LookupOrd(1, bytes); + Assert.AreEqual(new BytesRef("world"), bytes); + } + finally + { + ireader?.Dispose(); + } } [Test] public virtual void TestSortedSetTwoValues() { AssumeTrue("Codec does not support SORTED_SET", DefaultCodecSupportsSortedSet); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + DirectoryReader ireader = null; + try { - DirectoryReader ireader = null; - try - { - using (RandomIndexWriter iwriter = new RandomIndexWriter( + using (RandomIndexWriter iwriter = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, directory)) - { - - Document doc = new Document(); - doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); - doc.Add(new SortedSetDocValuesField("field", new BytesRef("world"))); - iwriter.AddDocument(doc); - - ireader = iwriter.GetReader(); - } // iwriter.Dispose(); - - SortedSetDocValues dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field"); - - dv.SetDocument(0); - Assert.AreEqual(0L, dv.NextOrd()); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - Assert.AreEqual(1L, dv.NextOrd()); // LUCENENET specific - 1L required because types don't match (xUnit checks this) - Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); - - BytesRef bytes = new BytesRef(); - dv.LookupOrd(0, bytes); - Assert.AreEqual(new BytesRef("hello"), bytes); - - dv.LookupOrd(1, bytes); - Assert.AreEqual(new BytesRef("world"), bytes); - } - finally { - ireader?.Dispose(); - } - } // directory.Dispose(); + + Document doc = new Document(); + doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); + doc.Add(new SortedSetDocValuesField("field", new BytesRef("world"))); + iwriter.AddDocument(doc); + + ireader = iwriter.GetReader(); + } // iwriter.Dispose(); + + SortedSetDocValues dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field"); + + dv.SetDocument(0); + Assert.AreEqual(0L, dv.NextOrd()); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + Assert.AreEqual(1L, dv.NextOrd()); // LUCENENET specific - 1L required because types don't match (xUnit checks this) + Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); + + BytesRef bytes = new BytesRef(); + dv.LookupOrd(0, bytes); + Assert.AreEqual(new BytesRef("hello"), bytes); + + dv.LookupOrd(1, bytes); + Assert.AreEqual(new BytesRef("world"), bytes); + } + finally + { + ireader?.Dispose(); + } } [Test] public virtual void TestSortedSetTwoValuesUnordered() { AssumeTrue("Codec does not support SORTED_SET", DefaultCodecSupportsSortedSet); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + DirectoryReader ireader = null; + try { - DirectoryReader ireader = null; - try - { - using (RandomIndexWriter iwriter = new RandomIndexWriter( + using (RandomIndexWriter iwriter = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION - this, + this, #endif - Random, directory)) - { + Random, directory)) + { - Document doc = new Document(); - doc.Add(new SortedSetDocValuesField("field", new BytesRef("world"))); - doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); - iwriter.AddDocument(doc); + Document doc = new Document(); + doc.Add(new SortedSetDocValuesField("field", new BytesRef("world"))); + doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); + iwriter.AddDocument(doc); - ireader = iwriter.GetReader(); - } // iwriter.Dispose(); + ireader = iwriter.GetReader(); + } // iwriter.Dispose(); - SortedSetDocValues dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field"); + SortedSetDocValues dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field"); - dv.SetDocument(0); - Assert.AreEqual(0L, dv.NextOrd()); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - Assert.AreEqual(1L, dv.NextOrd()); // LUCENENET specific - 1L required because types don't match (xUnit checks this) - Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); + dv.SetDocument(0); + Assert.AreEqual(0L, dv.NextOrd()); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + Assert.AreEqual(1L, dv.NextOrd()); // LUCENENET specific - 1L required because types don't match (xUnit checks this) + Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); - BytesRef bytes = new BytesRef(); - dv.LookupOrd(0, bytes); - Assert.AreEqual(new BytesRef("hello"), bytes); + BytesRef bytes = new BytesRef(); + dv.LookupOrd(0, bytes); + Assert.AreEqual(new BytesRef("hello"), bytes); - dv.LookupOrd(1, bytes); - Assert.AreEqual(new BytesRef("world"), bytes); - } - finally - { - ireader?.Dispose(); - } - } // directory.Dispose(); + dv.LookupOrd(1, bytes); + Assert.AreEqual(new BytesRef("world"), bytes); + } + finally + { + ireader?.Dispose(); + } } [Test] public virtual void TestSortedSetThreeValuesTwoDocs() { AssumeTrue("Codec does not support SORTED_SET", DefaultCodecSupportsSortedSet); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + Analyzer analyzer = new MockAnalyzer(Random); + IndexWriterConfig iwconfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + iwconfig.SetMergePolicy(NewLogMergePolicy()); + DirectoryReader ireader = null; + try { - Analyzer analyzer = new MockAnalyzer(Random); - IndexWriterConfig iwconfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - iwconfig.SetMergePolicy(NewLogMergePolicy()); - DirectoryReader ireader = null; - try + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, iwconfig)) { - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, iwconfig)) - { - Document doc = new Document(); - doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); - doc.Add(new SortedSetDocValuesField("field", new BytesRef("world"))); - iwriter.AddDocument(doc); - iwriter.Commit(); - - doc = new Document(); - doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); - doc.Add(new SortedSetDocValuesField("field", new BytesRef("beer"))); - iwriter.AddDocument(doc); - iwriter.ForceMerge(1); - - ireader = iwriter.GetReader(); - } // iwriter.Dispose(); - - SortedSetDocValues dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field"); - Assert.AreEqual(3L, dv.ValueCount); // LUCENENET specific - 3L required because types don't match (xUnit checks this) - - dv.SetDocument(0); - Assert.AreEqual(1L, dv.NextOrd()); // LUCENENET specific - 1L required because types don't match (xUnit checks this) - Assert.AreEqual(2L, dv.NextOrd()); // LUCENENET specific - 2L required because types don't match (xUnit checks this) - Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); - - dv.SetDocument(1); - Assert.AreEqual(0L, dv.NextOrd()); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - Assert.AreEqual(1L, dv.NextOrd()); // LUCENENET specific - 1L required because types don't match (xUnit checks this) - Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); - - BytesRef bytes = new BytesRef(); - dv.LookupOrd(0, bytes); - Assert.AreEqual(new BytesRef("beer"), bytes); - - dv.LookupOrd(1, bytes); - Assert.AreEqual(new BytesRef("hello"), bytes); - - dv.LookupOrd(2, bytes); - Assert.AreEqual(new BytesRef("world"), bytes); - } - finally - { - ireader?.Dispose(); - } - } // directory.Dispose(); + Document doc = new Document(); + doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); + doc.Add(new SortedSetDocValuesField("field", new BytesRef("world"))); + iwriter.AddDocument(doc); + iwriter.Commit(); + + doc = new Document(); + doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); + doc.Add(new SortedSetDocValuesField("field", new BytesRef("beer"))); + iwriter.AddDocument(doc); + iwriter.ForceMerge(1); + + ireader = iwriter.GetReader(); + } // iwriter.Dispose(); + + SortedSetDocValues dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field"); + Assert.AreEqual(3L, dv.ValueCount); // LUCENENET specific - 3L required because types don't match (xUnit checks this) + + dv.SetDocument(0); + Assert.AreEqual(1L, dv.NextOrd()); // LUCENENET specific - 1L required because types don't match (xUnit checks this) + Assert.AreEqual(2L, dv.NextOrd()); // LUCENENET specific - 2L required because types don't match (xUnit checks this) + Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); + + dv.SetDocument(1); + Assert.AreEqual(0L, dv.NextOrd()); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + Assert.AreEqual(1L, dv.NextOrd()); // LUCENENET specific - 1L required because types don't match (xUnit checks this) + Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); + + BytesRef bytes = new BytesRef(); + dv.LookupOrd(0, bytes); + Assert.AreEqual(new BytesRef("beer"), bytes); + + dv.LookupOrd(1, bytes); + Assert.AreEqual(new BytesRef("hello"), bytes); + + dv.LookupOrd(2, bytes); + Assert.AreEqual(new BytesRef("world"), bytes); + } + finally + { + ireader?.Dispose(); + } } [Test] public virtual void TestSortedSetTwoDocumentsLastMissing() { AssumeTrue("Codec does not support SORTED_SET", DefaultCodecSupportsSortedSet); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + Analyzer analyzer = new MockAnalyzer(Random); + IndexWriterConfig iwconfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + iwconfig.SetMergePolicy(NewLogMergePolicy()); + DirectoryReader ireader = null; + try { - Analyzer analyzer = new MockAnalyzer(Random); - IndexWriterConfig iwconfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - iwconfig.SetMergePolicy(NewLogMergePolicy()); - DirectoryReader ireader = null; - try + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, iwconfig)) { - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, iwconfig)) - { - Document doc = new Document(); - doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); - iwriter.AddDocument(doc); + Document doc = new Document(); + doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); + iwriter.AddDocument(doc); - doc = new Document(); - iwriter.AddDocument(doc); - iwriter.ForceMerge(1); - ireader = iwriter.GetReader(); - } // iwriter.Dispose(); + doc = new Document(); + iwriter.AddDocument(doc); + iwriter.ForceMerge(1); + ireader = iwriter.GetReader(); + } // iwriter.Dispose(); - SortedSetDocValues dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field"); - Assert.AreEqual(1L, dv.ValueCount); // LUCENENET specific - 1L required because types don't match (xUnit checks this) + SortedSetDocValues dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field"); + Assert.AreEqual(1L, dv.ValueCount); // LUCENENET specific - 1L required because types don't match (xUnit checks this) - dv.SetDocument(0); - Assert.AreEqual(0L, dv.NextOrd()); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); + dv.SetDocument(0); + Assert.AreEqual(0L, dv.NextOrd()); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); - BytesRef bytes = new BytesRef(); - dv.LookupOrd(0, bytes); - Assert.AreEqual(new BytesRef("hello"), bytes); - } - finally - { - ireader?.Dispose(); - } - } // directory.Dispose(); + BytesRef bytes = new BytesRef(); + dv.LookupOrd(0, bytes); + Assert.AreEqual(new BytesRef("hello"), bytes); + } + finally + { + ireader?.Dispose(); + } } [Test] public virtual void TestSortedSetTwoDocumentsLastMissingMerge() { AssumeTrue("Codec does not support SORTED_SET", DefaultCodecSupportsSortedSet); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + Analyzer analyzer = new MockAnalyzer(Random); + IndexWriterConfig iwconfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + iwconfig.SetMergePolicy(NewLogMergePolicy()); + DirectoryReader ireader = null; + try { - Analyzer analyzer = new MockAnalyzer(Random); - IndexWriterConfig iwconfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - iwconfig.SetMergePolicy(NewLogMergePolicy()); - DirectoryReader ireader = null; - try + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, iwconfig)) { - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, iwconfig)) - { - Document doc = new Document(); - doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); - iwriter.AddDocument(doc); - iwriter.Commit(); + Document doc = new Document(); + doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); + iwriter.AddDocument(doc); + iwriter.Commit(); - doc = new Document(); - iwriter.AddDocument(doc); - iwriter.ForceMerge(1); + doc = new Document(); + iwriter.AddDocument(doc); + iwriter.ForceMerge(1); - ireader = iwriter.GetReader(); - } // iwriter.Dispose(); + ireader = iwriter.GetReader(); + } // iwriter.Dispose(); - SortedSetDocValues dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field"); - Assert.AreEqual(1L, dv.ValueCount); // LUCENENET specific - 1L required because types don't match (xUnit checks this) + SortedSetDocValues dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field"); + Assert.AreEqual(1L, dv.ValueCount); // LUCENENET specific - 1L required because types don't match (xUnit checks this) - dv.SetDocument(0); - Assert.AreEqual(0L, dv.NextOrd()); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); + dv.SetDocument(0); + Assert.AreEqual(0L, dv.NextOrd()); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); - BytesRef bytes = new BytesRef(); - dv.LookupOrd(0, bytes); - Assert.AreEqual(new BytesRef("hello"), bytes); - } - finally - { - ireader?.Dispose(); - } - } // directory.Dispose(); + BytesRef bytes = new BytesRef(); + dv.LookupOrd(0, bytes); + Assert.AreEqual(new BytesRef("hello"), bytes); + } + finally + { + ireader?.Dispose(); + } } [Test] public virtual void TestSortedSetTwoDocumentsFirstMissing() { AssumeTrue("Codec does not support SORTED_SET", DefaultCodecSupportsSortedSet); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + Analyzer analyzer = new MockAnalyzer(Random); + IndexWriterConfig iwconfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + iwconfig.SetMergePolicy(NewLogMergePolicy()); + DirectoryReader ireader = null; + try { - Analyzer analyzer = new MockAnalyzer(Random); - IndexWriterConfig iwconfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - iwconfig.SetMergePolicy(NewLogMergePolicy()); - DirectoryReader ireader = null; - try + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, iwconfig)) { - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, iwconfig)) - { - Document doc = new Document(); - iwriter.AddDocument(doc); + Document doc = new Document(); + iwriter.AddDocument(doc); - doc = new Document(); - doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); - iwriter.AddDocument(doc); + doc = new Document(); + doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); + iwriter.AddDocument(doc); - iwriter.ForceMerge(1); - ireader = iwriter.GetReader(); - } // iwriter.Dispose(); + iwriter.ForceMerge(1); + ireader = iwriter.GetReader(); + } // iwriter.Dispose(); - SortedSetDocValues dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field"); - Assert.AreEqual(1L, dv.ValueCount); // LUCENENET specific - 1L required because types don't match (xUnit checks this) + SortedSetDocValues dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field"); + Assert.AreEqual(1L, dv.ValueCount); // LUCENENET specific - 1L required because types don't match (xUnit checks this) - dv.SetDocument(1); - Assert.AreEqual(0L, dv.NextOrd()); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); + dv.SetDocument(1); + Assert.AreEqual(0L, dv.NextOrd()); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); - BytesRef bytes = new BytesRef(); - dv.LookupOrd(0, bytes); - Assert.AreEqual(new BytesRef("hello"), bytes); - } - finally - { - ireader?.Dispose(); - } - } // directory.Dispose(); + BytesRef bytes = new BytesRef(); + dv.LookupOrd(0, bytes); + Assert.AreEqual(new BytesRef("hello"), bytes); + } + finally + { + ireader?.Dispose(); + } } [Test] public virtual void TestSortedSetTwoDocumentsFirstMissingMerge() { AssumeTrue("Codec does not support SORTED_SET", DefaultCodecSupportsSortedSet); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + Analyzer analyzer = new MockAnalyzer(Random); + IndexWriterConfig iwconfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + iwconfig.SetMergePolicy(NewLogMergePolicy()); + DirectoryReader ireader = null; + try { - Analyzer analyzer = new MockAnalyzer(Random); - IndexWriterConfig iwconfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - iwconfig.SetMergePolicy(NewLogMergePolicy()); - DirectoryReader ireader = null; - try + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, iwconfig)) { - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, iwconfig)) - { - Document doc = new Document(); - iwriter.AddDocument(doc); - iwriter.Commit(); + Document doc = new Document(); + iwriter.AddDocument(doc); + iwriter.Commit(); - doc = new Document(); - doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); - iwriter.AddDocument(doc); - iwriter.ForceMerge(1); + doc = new Document(); + doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); + iwriter.AddDocument(doc); + iwriter.ForceMerge(1); - ireader = iwriter.GetReader(); - } // iwriter.Dispose(); + ireader = iwriter.GetReader(); + } // iwriter.Dispose(); - SortedSetDocValues dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field"); - Assert.AreEqual(1L, dv.ValueCount); // LUCENENET specific - 1L required because types don't match (xUnit checks this) + SortedSetDocValues dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field"); + Assert.AreEqual(1L, dv.ValueCount); // LUCENENET specific - 1L required because types don't match (xUnit checks this) - dv.SetDocument(1); - Assert.AreEqual(0L, dv.NextOrd()); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); + dv.SetDocument(1); + Assert.AreEqual(0L, dv.NextOrd()); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dv.NextOrd()); - BytesRef bytes = new BytesRef(); - dv.LookupOrd(0, bytes); - Assert.AreEqual(new BytesRef("hello"), bytes); - } - finally - { - ireader?.Dispose(); - } - } // directory.Dispose(); + BytesRef bytes = new BytesRef(); + dv.LookupOrd(0, bytes); + Assert.AreEqual(new BytesRef("hello"), bytes); + } + finally + { + ireader?.Dispose(); + } } [Test] public virtual void TestSortedSetMergeAwayAllValues() { AssumeTrue("Codec does not support SORTED_SET", DefaultCodecSupportsSortedSet); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + Analyzer analyzer = new MockAnalyzer(Random); + IndexWriterConfig iwconfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + iwconfig.SetMergePolicy(NewLogMergePolicy()); + DirectoryReader ireader = null; + try { - Analyzer analyzer = new MockAnalyzer(Random); - IndexWriterConfig iwconfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - iwconfig.SetMergePolicy(NewLogMergePolicy()); - DirectoryReader ireader = null; - try + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, iwconfig)) { - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, iwconfig)) - { - Document doc = new Document(); - doc.Add(new StringField("id", "0", Field.Store.NO)); - iwriter.AddDocument(doc); - doc = new Document(); - doc.Add(new StringField("id", "1", Field.Store.NO)); - doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); - iwriter.AddDocument(doc); - iwriter.Commit(); - iwriter.DeleteDocuments(new Term("id", "1")); - iwriter.ForceMerge(1); - - ireader = iwriter.GetReader(); - } // iwriter.Dispose(); - - SortedSetDocValues dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field"); - Assert.AreEqual(0L, dv.ValueCount); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - } - finally - { - ireader?.Dispose(); - } - } // directory.Dispose(); + Document doc = new Document(); + doc.Add(new StringField("id", "0", Field.Store.NO)); + iwriter.AddDocument(doc); + doc = new Document(); + doc.Add(new StringField("id", "1", Field.Store.NO)); + doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); + iwriter.AddDocument(doc); + iwriter.Commit(); + iwriter.DeleteDocuments(new Term("id", "1")); + iwriter.ForceMerge(1); + + ireader = iwriter.GetReader(); + } // iwriter.Dispose(); + + SortedSetDocValues dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field"); + Assert.AreEqual(0L, dv.ValueCount); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + } + finally + { + ireader?.Dispose(); + } } [Test] public virtual void TestSortedSetTermsEnum() { AssumeTrue("Codec does not support SORTED_SET", DefaultCodecSupportsSortedSet); - using (Directory directory = NewDirectory()) + using Directory directory = NewDirectory(); + Analyzer analyzer = new MockAnalyzer(Random); + IndexWriterConfig iwconfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + iwconfig.SetMergePolicy(NewLogMergePolicy()); + DirectoryReader ireader = null; + try { - Analyzer analyzer = new MockAnalyzer(Random); - IndexWriterConfig iwconfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - iwconfig.SetMergePolicy(NewLogMergePolicy()); - DirectoryReader ireader = null; - try + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, iwconfig)) { - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, iwconfig)) - { - Document doc = new Document(); - doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); - doc.Add(new SortedSetDocValuesField("field", new BytesRef("world"))); - doc.Add(new SortedSetDocValuesField("field", new BytesRef("beer"))); - iwriter.AddDocument(doc); - - ireader = iwriter.GetReader(); - } // iwriter.Dispose(); - - SortedSetDocValues dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field"); - Assert.AreEqual(3L, dv.ValueCount); // LUCENENET specific - 3L required because types don't match (xUnit checks this) - - TermsEnum termsEnum = dv.GetTermsEnum(); - - // next() - Assert.IsTrue(termsEnum.MoveNext()); - Assert.AreEqual("beer", termsEnum.Term.Utf8ToString()); - Assert.AreEqual(0L, termsEnum.Ord); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - Assert.IsTrue(termsEnum.MoveNext()); - Assert.AreEqual("hello", termsEnum.Term.Utf8ToString()); - Assert.AreEqual(1L, termsEnum.Ord); // LUCENENET specific - 1L required because types don't match (xUnit checks this) - Assert.IsTrue(termsEnum.MoveNext()); - Assert.AreEqual("world", termsEnum.Term.Utf8ToString()); - Assert.AreEqual(2L, termsEnum.Ord); // LUCENENET specific - 2L required because types don't match (xUnit checks this) - - // seekCeil() - Assert.AreEqual(SeekStatus.NOT_FOUND, termsEnum.SeekCeil(new BytesRef("ha!"))); - Assert.AreEqual("hello", termsEnum.Term.Utf8ToString()); - Assert.AreEqual(1L, termsEnum.Ord); // LUCENENET specific - 1L required because types don't match (xUnit checks this) - Assert.AreEqual(SeekStatus.FOUND, termsEnum.SeekCeil(new BytesRef("beer"))); - Assert.AreEqual("beer", termsEnum.Term.Utf8ToString()); - Assert.AreEqual(0L, termsEnum.Ord); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - Assert.AreEqual(SeekStatus.END, termsEnum.SeekCeil(new BytesRef("zzz"))); - - // seekExact() - Assert.IsTrue(termsEnum.SeekExact(new BytesRef("beer"))); - Assert.AreEqual("beer", termsEnum.Term.Utf8ToString()); - Assert.AreEqual(0L, termsEnum.Ord); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - Assert.IsTrue(termsEnum.SeekExact(new BytesRef("hello"))); - Assert.AreEqual("hello", termsEnum.Term.Utf8ToString()); - Assert.AreEqual(1L, termsEnum.Ord); // LUCENENET specific - 1L required because types don't match (xUnit checks this) - Assert.IsTrue(termsEnum.SeekExact(new BytesRef("world"))); - Assert.AreEqual("world", termsEnum.Term.Utf8ToString()); - Assert.AreEqual(2L, termsEnum.Ord); // LUCENENET specific - 2L required because types don't match (xUnit checks this) - Assert.IsFalse(termsEnum.SeekExact(new BytesRef("bogus"))); - - // seek(ord) - termsEnum.SeekExact(0); - Assert.AreEqual("beer", termsEnum.Term.Utf8ToString()); - Assert.AreEqual(0L, termsEnum.Ord); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - termsEnum.SeekExact(1); - Assert.AreEqual("hello", termsEnum.Term.Utf8ToString()); - Assert.AreEqual(1L, termsEnum.Ord); // LUCENENET specific - 1L required because types don't match (xUnit checks this) - termsEnum.SeekExact(2); - Assert.AreEqual("world", termsEnum.Term.Utf8ToString()); - Assert.AreEqual(2L, termsEnum.Ord); // LUCENENET specific - 2L required because types don't match (xUnit checks this) - } - finally - { - ireader?.Dispose(); - } - } // directory.Dispose(); + Document doc = new Document(); + doc.Add(new SortedSetDocValuesField("field", new BytesRef("hello"))); + doc.Add(new SortedSetDocValuesField("field", new BytesRef("world"))); + doc.Add(new SortedSetDocValuesField("field", new BytesRef("beer"))); + iwriter.AddDocument(doc); + + ireader = iwriter.GetReader(); + } // iwriter.Dispose(); + + SortedSetDocValues dv = GetOnlySegmentReader(ireader).GetSortedSetDocValues("field"); + Assert.AreEqual(3L, dv.ValueCount); // LUCENENET specific - 3L required because types don't match (xUnit checks this) + + TermsEnum termsEnum = dv.GetTermsEnum(); + + // next() + Assert.IsTrue(termsEnum.MoveNext()); + Assert.AreEqual("beer", termsEnum.Term.Utf8ToString()); + Assert.AreEqual(0L, termsEnum.Ord); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + Assert.IsTrue(termsEnum.MoveNext()); + Assert.AreEqual("hello", termsEnum.Term.Utf8ToString()); + Assert.AreEqual(1L, termsEnum.Ord); // LUCENENET specific - 1L required because types don't match (xUnit checks this) + Assert.IsTrue(termsEnum.MoveNext()); + Assert.AreEqual("world", termsEnum.Term.Utf8ToString()); + Assert.AreEqual(2L, termsEnum.Ord); // LUCENENET specific - 2L required because types don't match (xUnit checks this) + + // seekCeil() + Assert.AreEqual(SeekStatus.NOT_FOUND, termsEnum.SeekCeil(new BytesRef("ha!"))); + Assert.AreEqual("hello", termsEnum.Term.Utf8ToString()); + Assert.AreEqual(1L, termsEnum.Ord); // LUCENENET specific - 1L required because types don't match (xUnit checks this) + Assert.AreEqual(SeekStatus.FOUND, termsEnum.SeekCeil(new BytesRef("beer"))); + Assert.AreEqual("beer", termsEnum.Term.Utf8ToString()); + Assert.AreEqual(0L, termsEnum.Ord); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + Assert.AreEqual(SeekStatus.END, termsEnum.SeekCeil(new BytesRef("zzz"))); + + // seekExact() + Assert.IsTrue(termsEnum.SeekExact(new BytesRef("beer"))); + Assert.AreEqual("beer", termsEnum.Term.Utf8ToString()); + Assert.AreEqual(0L, termsEnum.Ord); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + Assert.IsTrue(termsEnum.SeekExact(new BytesRef("hello"))); + Assert.AreEqual("hello", termsEnum.Term.Utf8ToString()); + Assert.AreEqual(1L, termsEnum.Ord); // LUCENENET specific - 1L required because types don't match (xUnit checks this) + Assert.IsTrue(termsEnum.SeekExact(new BytesRef("world"))); + Assert.AreEqual("world", termsEnum.Term.Utf8ToString()); + Assert.AreEqual(2L, termsEnum.Ord); // LUCENENET specific - 2L required because types don't match (xUnit checks this) + Assert.IsFalse(termsEnum.SeekExact(new BytesRef("bogus"))); + + // seek(ord) + termsEnum.SeekExact(0); + Assert.AreEqual("beer", termsEnum.Term.Utf8ToString()); + Assert.AreEqual(0L, termsEnum.Ord); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + termsEnum.SeekExact(1); + Assert.AreEqual("hello", termsEnum.Term.Utf8ToString()); + Assert.AreEqual(1L, termsEnum.Ord); // LUCENENET specific - 1L required because types don't match (xUnit checks this) + termsEnum.SeekExact(2); + Assert.AreEqual("world", termsEnum.Term.Utf8ToString()); + Assert.AreEqual(2L, termsEnum.Ord); // LUCENENET specific - 2L required because types don't match (xUnit checks this) + } + finally + { + ireader?.Dispose(); + } } private void DoTestSortedSetVsStoredFields(int minLength, int maxLength, int maxValuesPerDoc) { - using (Directory dir = NewDirectory()) + using Directory dir = NewDirectory(); + IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); + using (RandomIndexWriter writer = new RandomIndexWriter(Random, dir, conf)) { - IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); - using (RandomIndexWriter writer = new RandomIndexWriter(Random, dir, conf)) - { - // index some docs - int numDocs = AtLeast(300); - for (int i = 0; i < numDocs; i++) + // index some docs + int numDocs = AtLeast(300); + for (int i = 0; i < numDocs; i++) + { + Document doc = new Document(); + Field idField = new StringField("id", Convert.ToString(i, CultureInfo.InvariantCulture), Field.Store.NO); + doc.Add(idField); + int length; + if (minLength == maxLength) { - Document doc = new Document(); - Field idField = new StringField("id", Convert.ToString(i, CultureInfo.InvariantCulture), Field.Store.NO); - doc.Add(idField); - int length; - if (minLength == maxLength) - { - length = minLength; // fixed length - } - else - { - length = TestUtil.NextInt32(Random, minLength, maxLength); - } - int numValues = TestUtil.NextInt32(Random, 0, maxValuesPerDoc); - - // create a random set of strings - // LUCENENET specific: Use StringComparer.Ordinal to get the same ordering as Java - JCG.SortedSet values = new JCG.SortedSet(StringComparer.Ordinal); - for (int v = 0; v < numValues; v++) - { - values.Add(TestUtil.RandomSimpleString(Random, length)); - } + length = minLength; // fixed length + } + else + { + length = TestUtil.NextInt32(Random, minLength, maxLength); + } + int numValues = TestUtil.NextInt32(Random, 0, maxValuesPerDoc); - // add ordered to the stored field - foreach (string v in values) - { - doc.Add(new StoredField("stored", v)); - } + // create a random set of strings + // LUCENENET specific: Use StringComparer.Ordinal to get the same ordering as Java + JCG.SortedSet values = new JCG.SortedSet(StringComparer.Ordinal); + for (int v = 0; v < numValues; v++) + { + values.Add(TestUtil.RandomSimpleString(Random, length)); + } - // add in any order to the dv field - IList unordered = new List(values); - unordered.Shuffle(Random); - foreach (string v in unordered) - { - doc.Add(new SortedSetDocValuesField("dv", new BytesRef(v))); - } + // add ordered to the stored field + foreach (string v in values) + { + doc.Add(new StoredField("stored", v)); + } - writer.AddDocument(doc); - if (Random.Next(31) == 0) - { - writer.Commit(); - } + // add in any order to the dv field + IList unordered = new List(values); + unordered.Shuffle(Random); + foreach (string v in unordered) + { + doc.Add(new SortedSetDocValuesField("dv", new BytesRef(v))); } - // delete some docs - int numDeletions = Random.Next(numDocs / 10); - for (int i = 0; i < numDeletions; i++) + writer.AddDocument(doc); + if (Random.Next(31) == 0) { - int id = Random.Next(numDocs); - writer.DeleteDocuments(new Term("id", Convert.ToString(id, CultureInfo.InvariantCulture))); + writer.Commit(); } - } // writer.Dispose(); + } - // compare - using (DirectoryReader ir = DirectoryReader.Open(dir)) + // delete some docs + int numDeletions = Random.Next(numDocs / 10); + for (int i = 0; i < numDeletions; i++) { - foreach (AtomicReaderContext context in ir.Leaves) + int id = Random.Next(numDocs); + writer.DeleteDocuments(new Term("id", Convert.ToString(id, CultureInfo.InvariantCulture))); + } + } // writer.Dispose(); + + // compare + using DirectoryReader ir = DirectoryReader.Open(dir); + foreach (AtomicReaderContext context in ir.Leaves) + { + AtomicReader r = context.AtomicReader; + SortedSetDocValues docValues = r.GetSortedSetDocValues("dv"); + BytesRef scratch = new BytesRef(); + for (int i = 0; i < r.MaxDoc; i++) + { + string[] stringValues = r.Document(i).GetValues("stored"); + if (docValues != null) { - AtomicReader r = context.AtomicReader; - SortedSetDocValues docValues = r.GetSortedSetDocValues("dv"); - BytesRef scratch = new BytesRef(); - for (int i = 0; i < r.MaxDoc; i++) - { - string[] stringValues = r.Document(i).GetValues("stored"); - if (docValues != null) - { - docValues.SetDocument(i); - } - for (int j = 0; j < stringValues.Length; j++) - { - if (Debugging.AssertsEnabled) Debugging.Assert(docValues != null); - long ord = docValues.NextOrd(); - if (Debugging.AssertsEnabled) Debugging.Assert(ord != SortedSetDocValues.NO_MORE_ORDS); - docValues.LookupOrd(ord, scratch); - Assert.AreEqual(stringValues[j], scratch.Utf8ToString()); - } - if (Debugging.AssertsEnabled) Debugging.Assert(docValues == null || docValues.NextOrd() == SortedSetDocValues.NO_MORE_ORDS); - } + docValues.SetDocument(i); + } + for (int j = 0; j < stringValues.Length; j++) + { + if (Debugging.AssertsEnabled) Debugging.Assert(docValues != null); + long ord = docValues.NextOrd(); + if (Debugging.AssertsEnabled) Debugging.Assert(ord != SortedSetDocValues.NO_MORE_ORDS); + docValues.LookupOrd(ord, scratch); + Assert.AreEqual(stringValues[j], scratch.Utf8ToString()); } - } // ir.Dispose(); - } // dir.Dispose(); + if (Debugging.AssertsEnabled) Debugging.Assert(docValues == null || docValues.NextOrd() == SortedSetDocValues.NO_MORE_ORDS); + } + } } [Test] @@ -2714,7 +2513,7 @@ public virtual void TestSortedSetVariableLengthSingleValuedVsStoredFields() } } - private void AssertEquals(IBits expected, IBits actual) + private static void AssertEquals(IBits expected, IBits actual) // LUCENENET: CA1822: Mark members as static { Assert.AreEqual(expected.Length, actual.Length); for (int i = 0; i < expected.Length; i++) @@ -2723,12 +2522,12 @@ private void AssertEquals(IBits expected, IBits actual) } } - private void AssertEquals(int maxDoc, SortedDocValues expected, SortedDocValues actual) + private static void AssertEquals(int maxDoc, SortedDocValues expected, SortedDocValues actual) // LUCENENET: CA1822: Mark members as static { AssertEquals(maxDoc, new SingletonSortedSetDocValues(expected), new SingletonSortedSetDocValues(actual)); } - private void AssertEquals(int maxDoc, SortedSetDocValues expected, SortedSetDocValues actual) + private static void AssertEquals(int maxDoc, SortedSetDocValues expected, SortedSetDocValues actual) // LUCENENET: CA1822: Mark members as static { // can be null for the segment if no docs actually had any SortedDocValues // in this case FC.getDocTermsOrds returns EMPTY @@ -2765,7 +2564,7 @@ private void AssertEquals(int maxDoc, SortedSetDocValues expected, SortedSetDocV AssertEquals(expected.ValueCount, expected.GetTermsEnum(), actual.GetTermsEnum()); } - private void AssertEquals(long numOrds, TermsEnum expected, TermsEnum actual) + private static void AssertEquals(long numOrds, TermsEnum expected, TermsEnum actual) // LUCENENET: CA1822: Mark members as static { // sequential next() through all terms while (expected.MoveNext()) @@ -2839,92 +2638,87 @@ private void AssertEquals(long numOrds, TermsEnum expected, TermsEnum actual) private void DoTestSortedSetVsUninvertedField(int minLength, int maxLength) { - using (Directory dir = NewDirectory()) - { - IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); - using (RandomIndexWriter writer = new RandomIndexWriter(Random, dir, conf)) - { - - // index some docs - int numDocs = AtLeast(300); - for (int i = 0; i < numDocs; i++) - { - Document doc = new Document(); - Field idField = new StringField("id", Convert.ToString(i, CultureInfo.InvariantCulture), Field.Store.NO); - doc.Add(idField); - int length; - if (minLength == maxLength) - { - length = minLength; // fixed length - } - else - { - length = TestUtil.NextInt32(Random, minLength, maxLength); - } - int numValues = Random.Next(17); - // create a random list of strings - IList values = new List(); - for (int v = 0; v < numValues; v++) - { - values.Add(TestUtil.RandomSimpleString(Random, length)); - } + using Directory dir = NewDirectory(); + IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); + using RandomIndexWriter writer = new RandomIndexWriter(Random, dir, conf); - // add in any order to the indexed field - IList unordered = new List(values); - unordered.Shuffle(Random); - foreach (string v in unordered) - { - doc.Add(NewStringField("indexed", v, Field.Store.NO)); - } + // index some docs + int numDocs = AtLeast(300); + for (int i = 0; i < numDocs; i++) + { + Document doc = new Document(); + Field idField = new StringField("id", Convert.ToString(i, CultureInfo.InvariantCulture), Field.Store.NO); + doc.Add(idField); + int length; + if (minLength == maxLength) + { + length = minLength; // fixed length + } + else + { + length = TestUtil.NextInt32(Random, minLength, maxLength); + } + int numValues = Random.Next(17); + // create a random list of strings + IList values = new List(); + for (int v = 0; v < numValues; v++) + { + values.Add(TestUtil.RandomSimpleString(Random, length)); + } - // add in any order to the dv field - IList unordered2 = new List(values); - unordered2.Shuffle(Random); - foreach (string v in unordered2) - { - doc.Add(new SortedSetDocValuesField("dv", new BytesRef(v))); - } + // add in any order to the indexed field + IList unordered = new List(values); + unordered.Shuffle(Random); + foreach (string v in unordered) + { + doc.Add(NewStringField("indexed", v, Field.Store.NO)); + } - writer.AddDocument(doc); - if (Random.Next(31) == 0) - { - writer.Commit(); - } - } + // add in any order to the dv field + IList unordered2 = new List(values); + unordered2.Shuffle(Random); + foreach (string v in unordered2) + { + doc.Add(new SortedSetDocValuesField("dv", new BytesRef(v))); + } - // delete some docs - int numDeletions = Random.Next(numDocs / 10); - for (int i = 0; i < numDeletions; i++) - { - int id = Random.Next(numDocs); - writer.DeleteDocuments(new Term("id", Convert.ToString(id, CultureInfo.InvariantCulture))); - } + writer.AddDocument(doc); + if (Random.Next(31) == 0) + { + writer.Commit(); + } + } - // compare per-segment - using (DirectoryReader ir = writer.GetReader()) - { - foreach (AtomicReaderContext context in ir.Leaves) - { - AtomicReader r = context.AtomicReader; - SortedSetDocValues expected = FieldCache.DEFAULT.GetDocTermOrds(r, "indexed"); - SortedSetDocValues actual = r.GetSortedSetDocValues("dv"); - AssertEquals(r.MaxDoc, expected, actual); - } - } // ir.Dispose(); + // delete some docs + int numDeletions = Random.Next(numDocs / 10); + for (int i = 0; i < numDeletions; i++) + { + int id = Random.Next(numDocs); + writer.DeleteDocuments(new Term("id", Convert.ToString(id, CultureInfo.InvariantCulture))); + } - writer.ForceMerge(1); + // compare per-segment + using (DirectoryReader ir = writer.GetReader()) + { + foreach (AtomicReaderContext context in ir.Leaves) + { + AtomicReader r = context.AtomicReader; + SortedSetDocValues expected = FieldCache.DEFAULT.GetDocTermOrds(r, "indexed"); + SortedSetDocValues actual = r.GetSortedSetDocValues("dv"); + AssertEquals(r.MaxDoc, expected, actual); + } + } // ir.Dispose(); - // now compare again after the merge - using (DirectoryReader ir = writer.GetReader()) - { - AtomicReader ar = GetOnlySegmentReader(ir); - SortedSetDocValues expected_ = FieldCache.DEFAULT.GetDocTermOrds(ar, "indexed"); - SortedSetDocValues actual_ = ar.GetSortedSetDocValues("dv"); - AssertEquals(ir.MaxDoc, expected_, actual_); - } // ir.Dispose(); + writer.ForceMerge(1); - } // writer.Dispose(); - } // dir.Dispose(); + // now compare again after the merge + using (DirectoryReader ir = writer.GetReader()) + { + AtomicReader ar = GetOnlySegmentReader(ir); + SortedSetDocValues expected_ = FieldCache.DEFAULT.GetDocTermOrds(ar, "indexed"); + SortedSetDocValues actual_ = ar.GetSortedSetDocValues("dv"); + AssertEquals(ir.MaxDoc, expected_, actual_); + } // ir.Dispose(); } [Test] @@ -2958,21 +2752,18 @@ public virtual void TestGCDCompression() { long min = -(((long)Random.Next(1 << 30)) << 32); long mul = Random.Next() & 0xFFFFFFFFL; - Int64Producer longs = new Int64ProducerAnonymousInnerClassHelper3(this, min, mul); + Int64Producer longs = new Int64ProducerAnonymousInnerClassHelper3(min, mul); DoTestNumericsVsStoredFields(longs); } } private class Int64ProducerAnonymousInnerClassHelper3 : Int64Producer { - private readonly BaseDocValuesFormatTestCase outerInstance; + private readonly long min; + private readonly long mul; - private long min; - private long mul; - - public Int64ProducerAnonymousInnerClassHelper3(BaseDocValuesFormatTestCase outerInstance, long min, long mul) + public Int64ProducerAnonymousInnerClassHelper3(long min, long mul) { - this.outerInstance = outerInstance; this.min = min; this.mul = mul; } @@ -2997,20 +2788,13 @@ public virtual void TestZeroOrMin() int numIterations = AtLeast(1); for (int i = 0; i < numIterations; i++) { - Int64Producer longs = new Int64ProducerAnonymousInnerClassHelper4(this); + Int64Producer longs = new Int64ProducerAnonymousInnerClassHelper4(); DoTestNumericsVsStoredFields(longs); } } private class Int64ProducerAnonymousInnerClassHelper4 : Int64Producer { - private readonly BaseDocValuesFormatTestCase outerInstance; - - public Int64ProducerAnonymousInnerClassHelper4(BaseDocValuesFormatTestCase outerInstance) - { - this.outerInstance = outerInstance; - } - internal override long Next() { return Random.NextBoolean() ? 0 : long.MinValue; @@ -3021,230 +2805,206 @@ internal override long Next() public virtual void TestTwoNumbersOneMissing() { AssumeTrue("Codec does not support GetDocsWithField", DefaultCodecSupportsDocsWithField); - using (Directory directory = NewDirectory()) - { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, null); - conf.SetMergePolicy(NewLogMergePolicy()); - using (RandomIndexWriter iw = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - doc.Add(new StringField("id", "0", Field.Store.YES)); - doc.Add(new NumericDocValuesField("dv1", 0)); - iw.AddDocument(doc); - doc = new Document(); - doc.Add(new StringField("id", "1", Field.Store.YES)); - iw.AddDocument(doc); - iw.ForceMerge(1); - } // iw.Dispose(); - - using (IndexReader ir = DirectoryReader.Open(directory)) - { - Assert.AreEqual(1, ir.Leaves.Count); - AtomicReader ar = (AtomicReader)ir.Leaves[0].Reader; - NumericDocValues dv = ar.GetNumericDocValues("dv1"); - Assert.AreEqual(0L, dv.Get(0)); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - Assert.AreEqual(0L, dv.Get(1)); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - IBits docsWithField = ar.GetDocsWithField("dv1"); - Assert.IsTrue(docsWithField.Get(0)); - Assert.IsFalse(docsWithField.Get(1)); - } // ir.Dispose(); - } // directory.Dispose(); + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, null); + conf.SetMergePolicy(NewLogMergePolicy()); + using (RandomIndexWriter iw = new RandomIndexWriter(Random, directory, conf)) + { + Document doc = new Document(); + doc.Add(new StringField("id", "0", Field.Store.YES)); + doc.Add(new NumericDocValuesField("dv1", 0)); + iw.AddDocument(doc); + doc = new Document(); + doc.Add(new StringField("id", "1", Field.Store.YES)); + iw.AddDocument(doc); + iw.ForceMerge(1); + } // iw.Dispose(); + + using IndexReader ir = DirectoryReader.Open(directory); + Assert.AreEqual(1, ir.Leaves.Count); + AtomicReader ar = (AtomicReader)ir.Leaves[0].Reader; + NumericDocValues dv = ar.GetNumericDocValues("dv1"); + Assert.AreEqual(0L, dv.Get(0)); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + Assert.AreEqual(0L, dv.Get(1)); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + IBits docsWithField = ar.GetDocsWithField("dv1"); + Assert.IsTrue(docsWithField.Get(0)); + Assert.IsFalse(docsWithField.Get(1)); } [Test] public virtual void TestTwoNumbersOneMissingWithMerging() { AssumeTrue("Codec does not support GetDocsWithField", DefaultCodecSupportsDocsWithField); - using (Directory directory = NewDirectory()) - { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, null); - conf.SetMergePolicy(NewLogMergePolicy()); - using (RandomIndexWriter iw = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - doc.Add(new StringField("id", "0", Field.Store.YES)); - doc.Add(new NumericDocValuesField("dv1", 0)); - iw.AddDocument(doc); - iw.Commit(); - doc = new Document(); - doc.Add(new StringField("id", "1", Field.Store.YES)); - iw.AddDocument(doc); - iw.ForceMerge(1); - } // iw.Dispose(); - - using (IndexReader ir = DirectoryReader.Open(directory)) - { - Assert.AreEqual(1, ir.Leaves.Count); - AtomicReader ar = (AtomicReader)ir.Leaves[0].Reader; - NumericDocValues dv = ar.GetNumericDocValues("dv1"); - Assert.AreEqual(0L, dv.Get(0)); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - Assert.AreEqual(0L, dv.Get(1)); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - IBits docsWithField = ar.GetDocsWithField("dv1"); - Assert.IsTrue(docsWithField.Get(0)); - Assert.IsFalse(docsWithField.Get(1)); - } // ir.Dispose(); - } // directory.Dispose(); + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, null); + conf.SetMergePolicy(NewLogMergePolicy()); + using (RandomIndexWriter iw = new RandomIndexWriter(Random, directory, conf)) + { + Document doc = new Document(); + doc.Add(new StringField("id", "0", Field.Store.YES)); + doc.Add(new NumericDocValuesField("dv1", 0)); + iw.AddDocument(doc); + iw.Commit(); + doc = new Document(); + doc.Add(new StringField("id", "1", Field.Store.YES)); + iw.AddDocument(doc); + iw.ForceMerge(1); + } // iw.Dispose(); + + using IndexReader ir = DirectoryReader.Open(directory); + Assert.AreEqual(1, ir.Leaves.Count); + AtomicReader ar = (AtomicReader)ir.Leaves[0].Reader; + NumericDocValues dv = ar.GetNumericDocValues("dv1"); + Assert.AreEqual(0L, dv.Get(0)); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + Assert.AreEqual(0L, dv.Get(1)); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + IBits docsWithField = ar.GetDocsWithField("dv1"); + Assert.IsTrue(docsWithField.Get(0)); + Assert.IsFalse(docsWithField.Get(1)); } [Test] public virtual void TestThreeNumbersOneMissingWithMerging() { AssumeTrue("Codec does not support GetDocsWithField", DefaultCodecSupportsDocsWithField); - using (Directory directory = NewDirectory()) - { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, null); - conf.SetMergePolicy(NewLogMergePolicy()); - using (RandomIndexWriter iw = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - doc.Add(new StringField("id", "0", Field.Store.YES)); - doc.Add(new NumericDocValuesField("dv1", 0)); - iw.AddDocument(doc); - doc = new Document(); - doc.Add(new StringField("id", "1", Field.Store.YES)); - iw.AddDocument(doc); - iw.Commit(); - doc = new Document(); - doc.Add(new StringField("id", "2", Field.Store.YES)); - doc.Add(new NumericDocValuesField("dv1", 5)); - iw.AddDocument(doc); - iw.ForceMerge(1); - } // iw.Dispose(); - - using (IndexReader ir = DirectoryReader.Open(directory)) - { - Assert.AreEqual(1, ir.Leaves.Count); - AtomicReader ar = (AtomicReader)ir.Leaves[0].Reader; - NumericDocValues dv = ar.GetNumericDocValues("dv1"); - Assert.AreEqual(0L, dv.Get(0)); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - Assert.AreEqual(0L, dv.Get(1)); // LUCENENET specific - 0L required because types don't match (xUnit checks this) - Assert.AreEqual(5L, dv.Get(2)); // LUCENENET specific - 5L required because types don't match (xUnit checks this) - IBits docsWithField = ar.GetDocsWithField("dv1"); - Assert.IsTrue(docsWithField.Get(0)); - Assert.IsFalse(docsWithField.Get(1)); - Assert.IsTrue(docsWithField.Get(2)); - } // ir.Dispose(); - } // directory.Dispose(); + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, null); + conf.SetMergePolicy(NewLogMergePolicy()); + using (RandomIndexWriter iw = new RandomIndexWriter(Random, directory, conf)) + { + Document doc = new Document(); + doc.Add(new StringField("id", "0", Field.Store.YES)); + doc.Add(new NumericDocValuesField("dv1", 0)); + iw.AddDocument(doc); + doc = new Document(); + doc.Add(new StringField("id", "1", Field.Store.YES)); + iw.AddDocument(doc); + iw.Commit(); + doc = new Document(); + doc.Add(new StringField("id", "2", Field.Store.YES)); + doc.Add(new NumericDocValuesField("dv1", 5)); + iw.AddDocument(doc); + iw.ForceMerge(1); + } // iw.Dispose(); + + using IndexReader ir = DirectoryReader.Open(directory); + Assert.AreEqual(1, ir.Leaves.Count); + AtomicReader ar = (AtomicReader)ir.Leaves[0].Reader; + NumericDocValues dv = ar.GetNumericDocValues("dv1"); + Assert.AreEqual(0L, dv.Get(0)); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + Assert.AreEqual(0L, dv.Get(1)); // LUCENENET specific - 0L required because types don't match (xUnit checks this) + Assert.AreEqual(5L, dv.Get(2)); // LUCENENET specific - 5L required because types don't match (xUnit checks this) + IBits docsWithField = ar.GetDocsWithField("dv1"); + Assert.IsTrue(docsWithField.Get(0)); + Assert.IsFalse(docsWithField.Get(1)); + Assert.IsTrue(docsWithField.Get(2)); } [Test] public virtual void TestTwoBytesOneMissing() { AssumeTrue("Codec does not support GetDocsWithField", DefaultCodecSupportsDocsWithField); - using (Directory directory = NewDirectory()) - { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, null); - conf.SetMergePolicy(NewLogMergePolicy()); - using (RandomIndexWriter iw = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - doc.Add(new StringField("id", "0", Field.Store.YES)); - doc.Add(new BinaryDocValuesField("dv1", new BytesRef())); - iw.AddDocument(doc); - doc = new Document(); - doc.Add(new StringField("id", "1", Field.Store.YES)); - iw.AddDocument(doc); - iw.ForceMerge(1); - } // iw.Dispose(); - - using (IndexReader ir = DirectoryReader.Open(directory)) - { - Assert.AreEqual(1, ir.Leaves.Count); - AtomicReader ar = (AtomicReader)ir.Leaves[0].Reader; - BinaryDocValues dv = ar.GetBinaryDocValues("dv1"); - BytesRef @ref = new BytesRef(); - dv.Get(0, @ref); - Assert.AreEqual(new BytesRef(), @ref); - dv.Get(1, @ref); - Assert.AreEqual(new BytesRef(), @ref); - IBits docsWithField = ar.GetDocsWithField("dv1"); - Assert.IsTrue(docsWithField.Get(0)); - Assert.IsFalse(docsWithField.Get(1)); - } // ir.Dispose(); - } // directory.Dispose(); + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, null); + conf.SetMergePolicy(NewLogMergePolicy()); + using (RandomIndexWriter iw = new RandomIndexWriter(Random, directory, conf)) + { + Document doc = new Document(); + doc.Add(new StringField("id", "0", Field.Store.YES)); + doc.Add(new BinaryDocValuesField("dv1", new BytesRef())); + iw.AddDocument(doc); + doc = new Document(); + doc.Add(new StringField("id", "1", Field.Store.YES)); + iw.AddDocument(doc); + iw.ForceMerge(1); + } // iw.Dispose(); + + using IndexReader ir = DirectoryReader.Open(directory); + Assert.AreEqual(1, ir.Leaves.Count); + AtomicReader ar = (AtomicReader)ir.Leaves[0].Reader; + BinaryDocValues dv = ar.GetBinaryDocValues("dv1"); + BytesRef @ref = new BytesRef(); + dv.Get(0, @ref); + Assert.AreEqual(new BytesRef(), @ref); + dv.Get(1, @ref); + Assert.AreEqual(new BytesRef(), @ref); + IBits docsWithField = ar.GetDocsWithField("dv1"); + Assert.IsTrue(docsWithField.Get(0)); + Assert.IsFalse(docsWithField.Get(1)); } [Test] public virtual void TestTwoBytesOneMissingWithMerging() { AssumeTrue("Codec does not support GetDocsWithField", DefaultCodecSupportsDocsWithField); - using (Directory directory = NewDirectory()) - { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, null); - conf.SetMergePolicy(NewLogMergePolicy()); - using (RandomIndexWriter iw = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - doc.Add(new StringField("id", "0", Field.Store.YES)); - doc.Add(new BinaryDocValuesField("dv1", new BytesRef())); - iw.AddDocument(doc); - iw.Commit(); - doc = new Document(); - doc.Add(new StringField("id", "1", Field.Store.YES)); - iw.AddDocument(doc); - iw.ForceMerge(1); - } // iw.Dispose(); - - using (IndexReader ir = DirectoryReader.Open(directory)) - { - Assert.AreEqual(1, ir.Leaves.Count); - AtomicReader ar = (AtomicReader)ir.Leaves[0].Reader; - BinaryDocValues dv = ar.GetBinaryDocValues("dv1"); - BytesRef @ref = new BytesRef(); - dv.Get(0, @ref); - Assert.AreEqual(new BytesRef(), @ref); - dv.Get(1, @ref); - Assert.AreEqual(new BytesRef(), @ref); - IBits docsWithField = ar.GetDocsWithField("dv1"); - Assert.IsTrue(docsWithField.Get(0)); - Assert.IsFalse(docsWithField.Get(1)); - } // ir.Dispose(); - } // directory.Dispose(); + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, null); + conf.SetMergePolicy(NewLogMergePolicy()); + using (RandomIndexWriter iw = new RandomIndexWriter(Random, directory, conf)) + { + Document doc = new Document(); + doc.Add(new StringField("id", "0", Field.Store.YES)); + doc.Add(new BinaryDocValuesField("dv1", new BytesRef())); + iw.AddDocument(doc); + iw.Commit(); + doc = new Document(); + doc.Add(new StringField("id", "1", Field.Store.YES)); + iw.AddDocument(doc); + iw.ForceMerge(1); + } // iw.Dispose(); + + using IndexReader ir = DirectoryReader.Open(directory); + Assert.AreEqual(1, ir.Leaves.Count); + AtomicReader ar = (AtomicReader)ir.Leaves[0].Reader; + BinaryDocValues dv = ar.GetBinaryDocValues("dv1"); + BytesRef @ref = new BytesRef(); + dv.Get(0, @ref); + Assert.AreEqual(new BytesRef(), @ref); + dv.Get(1, @ref); + Assert.AreEqual(new BytesRef(), @ref); + IBits docsWithField = ar.GetDocsWithField("dv1"); + Assert.IsTrue(docsWithField.Get(0)); + Assert.IsFalse(docsWithField.Get(1)); } [Test] public virtual void TestThreeBytesOneMissingWithMerging() { AssumeTrue("Codec does not support GetDocsWithField", DefaultCodecSupportsDocsWithField); - using (Directory directory = NewDirectory()) - { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, null); - conf.SetMergePolicy(NewLogMergePolicy()); - using (RandomIndexWriter iw = new RandomIndexWriter(Random, directory, conf)) - { - Document doc = new Document(); - doc.Add(new StringField("id", "0", Field.Store.YES)); - doc.Add(new BinaryDocValuesField("dv1", new BytesRef())); - iw.AddDocument(doc); - doc = new Document(); - doc.Add(new StringField("id", "1", Field.Store.YES)); - iw.AddDocument(doc); - iw.Commit(); - doc = new Document(); - doc.Add(new StringField("id", "2", Field.Store.YES)); - doc.Add(new BinaryDocValuesField("dv1", new BytesRef("boo"))); - iw.AddDocument(doc); - iw.ForceMerge(1); - } // iw.Dispose(); - - using (IndexReader ir = DirectoryReader.Open(directory)) - { - Assert.AreEqual(1, ir.Leaves.Count); - AtomicReader ar = (AtomicReader)ir.Leaves[0].Reader; - BinaryDocValues dv = ar.GetBinaryDocValues("dv1"); - BytesRef @ref = new BytesRef(); - dv.Get(0, @ref); - Assert.AreEqual(new BytesRef(), @ref); - dv.Get(1, @ref); - Assert.AreEqual(new BytesRef(), @ref); - dv.Get(2, @ref); - Assert.AreEqual(new BytesRef("boo"), @ref); - IBits docsWithField = ar.GetDocsWithField("dv1"); - Assert.IsTrue(docsWithField.Get(0)); - Assert.IsFalse(docsWithField.Get(1)); - Assert.IsTrue(docsWithField.Get(2)); - } // ir.Dispose(); - } // directory.Dispose(); + using Directory directory = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, null); + conf.SetMergePolicy(NewLogMergePolicy()); + using (RandomIndexWriter iw = new RandomIndexWriter(Random, directory, conf)) + { + Document doc = new Document(); + doc.Add(new StringField("id", "0", Field.Store.YES)); + doc.Add(new BinaryDocValuesField("dv1", new BytesRef())); + iw.AddDocument(doc); + doc = new Document(); + doc.Add(new StringField("id", "1", Field.Store.YES)); + iw.AddDocument(doc); + iw.Commit(); + doc = new Document(); + doc.Add(new StringField("id", "2", Field.Store.YES)); + doc.Add(new BinaryDocValuesField("dv1", new BytesRef("boo"))); + iw.AddDocument(doc); + iw.ForceMerge(1); + } // iw.Dispose(); + + using IndexReader ir = DirectoryReader.Open(directory); + Assert.AreEqual(1, ir.Leaves.Count); + AtomicReader ar = (AtomicReader)ir.Leaves[0].Reader; + BinaryDocValues dv = ar.GetBinaryDocValues("dv1"); + BytesRef @ref = new BytesRef(); + dv.Get(0, @ref); + Assert.AreEqual(new BytesRef(), @ref); + dv.Get(1, @ref); + Assert.AreEqual(new BytesRef(), @ref); + dv.Get(2, @ref); + Assert.AreEqual(new BytesRef("boo"), @ref); + IBits docsWithField = ar.GetDocsWithField("dv1"); + Assert.IsTrue(docsWithField.Get(0)); + Assert.IsFalse(docsWithField.Get(1)); + Assert.IsTrue(docsWithField.Get(2)); } // LUCENE-4853 @@ -3399,94 +3159,91 @@ public virtual void TestHugeBinaryValueLimit() Analyzer analyzer = new MockAnalyzer(Random); // FSDirectory because SimpleText will consume gobbs of // space when storing big binary values: - using (Directory d = NewFSDirectory(CreateTempDir("hugeBinaryValues"))) - { - bool doFixed = Random.NextBoolean(); - int numDocs; - int fixedLength = 0; - if (doFixed) - { - // Sometimes make all values fixed length since some - // codecs have different code paths for this: - numDocs = TestUtil.NextInt32(Random, 10, 20); + using Directory d = NewFSDirectory(CreateTempDir("hugeBinaryValues")); + bool doFixed = Random.NextBoolean(); + int numDocs; + int fixedLength = 0; + if (doFixed) + { + // Sometimes make all values fixed length since some + // codecs have different code paths for this: + numDocs = TestUtil.NextInt32(Random, 10, 20); #pragma warning disable 612, 618 - fixedLength = Lucene42DocValuesFormat.MAX_BINARY_FIELD_LENGTH; + fixedLength = Lucene42DocValuesFormat.MAX_BINARY_FIELD_LENGTH; #pragma warning restore 612, 618 - } - else - { - numDocs = TestUtil.NextInt32(Random, 100, 200); - } - var docBytes = new List(); - DirectoryReader r = null; - try + } + else + { + numDocs = TestUtil.NextInt32(Random, 100, 200); + } + var docBytes = new List(); + DirectoryReader r = null; + try + { + using (IndexWriter w = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer))) { - using (IndexWriter w = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer))) + long totalBytes = 0; + for (int docID = 0; docID < numDocs; docID++) { - long totalBytes = 0; - for (int docID = 0; docID < numDocs; docID++) - { - // we don't use RandomIndexWriter because it might add - // more docvalues than we expect !!!! + // we don't use RandomIndexWriter because it might add + // more docvalues than we expect !!!! - // Must be > 64KB in size to ensure more than 2 pages in - // PagedBytes would be needed: - int numBytes; - if (doFixed) - { - numBytes = fixedLength; - } - else if (docID == 0 || Random.Next(5) == 3) - { + // Must be > 64KB in size to ensure more than 2 pages in + // PagedBytes would be needed: + int numBytes; + if (doFixed) + { + numBytes = fixedLength; + } + else if (docID == 0 || Random.Next(5) == 3) + { #pragma warning disable 612, 618 - numBytes = Lucene42DocValuesFormat.MAX_BINARY_FIELD_LENGTH; - } - else - { - numBytes = TestUtil.NextInt32(Random, 1, Lucene42DocValuesFormat.MAX_BINARY_FIELD_LENGTH); + numBytes = Lucene42DocValuesFormat.MAX_BINARY_FIELD_LENGTH; + } + else + { + numBytes = TestUtil.NextInt32(Random, 1, Lucene42DocValuesFormat.MAX_BINARY_FIELD_LENGTH); #pragma warning restore 612, 618 - } - totalBytes += numBytes; - if (totalBytes > 5 * 1024 * 1024) - { - break; - } - var bytes = new byte[numBytes]; - Random.NextBytes(bytes); - docBytes.Add(bytes); - Document doc = new Document(); - BytesRef b = new BytesRef(bytes); - b.Length = bytes.Length; - doc.Add(new BinaryDocValuesField("field", b)); - doc.Add(new StringField("id", "" + docID, Field.Store.YES)); - w.AddDocument(doc); } - - r = w.GetReader(); - } // w.Dispose(); - - using (AtomicReader ar = SlowCompositeReaderWrapper.Wrap(r)) - { - BytesRef bytes = new BytesRef(); // LUCENENET: Moved outside of the loop for performance - BinaryDocValues s = FieldCache.DEFAULT.GetTerms(ar, "field", false); - for (int docID = 0; docID < docBytes.Count; docID++) + totalBytes += numBytes; + if (totalBytes > 5 * 1024 * 1024) { - Document doc = ar.Document(docID); - - s.Get(docID, bytes); - var expected = docBytes[Convert.ToInt32(doc.Get("id"), CultureInfo.InvariantCulture)]; - Assert.AreEqual(expected.Length, bytes.Length); - Assert.AreEqual(new BytesRef(expected), bytes); + break; } + var bytes = new byte[numBytes]; + Random.NextBytes(bytes); + docBytes.Add(bytes); + Document doc = new Document(); + BytesRef b = new BytesRef(bytes); + b.Length = bytes.Length; + doc.Add(new BinaryDocValuesField("field", b)); + doc.Add(new StringField("id", "" + docID, Field.Store.YES)); + w.AddDocument(doc); + } - } // ar.Dispose(); - } - finally + r = w.GetReader(); + } // w.Dispose(); + + using (AtomicReader ar = SlowCompositeReaderWrapper.Wrap(r)) { - r?.Dispose(); // LUCENENET specific - small chance w.Dispose() will throw, this is just here to cover that case. It is safe to call r.Dispose() more than once. - } + BytesRef bytes = new BytesRef(); // LUCENENET: Moved outside of the loop for performance + BinaryDocValues s = FieldCache.DEFAULT.GetTerms(ar, "field", false); + for (int docID = 0; docID < docBytes.Count; docID++) + { + Document doc = ar.Document(docID); + + s.Get(docID, bytes); + var expected = docBytes[Convert.ToInt32(doc.Get("id"), CultureInfo.InvariantCulture)]; + Assert.AreEqual(expected.Length, bytes.Length); + Assert.AreEqual(new BytesRef(expected), bytes); + } - } // d.Dispose(); + } // ar.Dispose(); + } + finally + { + r?.Dispose(); // LUCENENET specific - small chance w.Dispose() will throw, this is just here to cover that case. It is safe to call r.Dispose() more than once. + } } /// @@ -3495,88 +3252,78 @@ public virtual void TestHugeBinaryValueLimit() [Test] public virtual void TestThreads() { - using (Directory dir = NewDirectory()) - { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); - using (RandomIndexWriter writer = new RandomIndexWriter(Random, dir, conf)) - { - Document doc = new Document(); - Field idField = new StringField("id", "", Field.Store.NO); - Field storedBinField = new StoredField("storedBin", Arrays.Empty()); - Field dvBinField = new BinaryDocValuesField("dvBin", new BytesRef()); - Field dvSortedField = new SortedDocValuesField("dvSorted", new BytesRef()); - Field storedNumericField = new StoredField("storedNum", ""); - Field dvNumericField = new NumericDocValuesField("dvNum", 0); - doc.Add(idField); - doc.Add(storedBinField); - doc.Add(dvBinField); - doc.Add(dvSortedField); - doc.Add(storedNumericField); - doc.Add(dvNumericField); - - // index some docs - int numDocs = AtLeast(300); - for (int i = 0; i < numDocs; i++) - { - idField.SetStringValue(Convert.ToString(i, CultureInfo.InvariantCulture)); - int length = TestUtil.NextInt32(Random, 0, 8); - var buffer = new byte[length]; - Random.NextBytes(buffer); - storedBinField.SetBytesValue(buffer); - dvBinField.SetBytesValue(buffer); - dvSortedField.SetBytesValue(buffer); - long numericValue = Random.NextInt64(); - storedNumericField.SetStringValue(Convert.ToString(numericValue, CultureInfo.InvariantCulture)); - dvNumericField.SetInt64Value(numericValue); - writer.AddDocument(doc); - if (Random.Next(31) == 0) - { - writer.Commit(); - } - } - - // delete some docs - int numDeletions = Random.Next(numDocs / 10); - for (int i = 0; i < numDeletions; i++) - { - int id = Random.Next(numDocs); - writer.DeleteDocuments(new Term("id", Convert.ToString(id, CultureInfo.InvariantCulture))); + using Directory dir = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); + using (RandomIndexWriter writer = new RandomIndexWriter(Random, dir, conf)) + { + Document doc = new Document(); + Field idField = new StringField("id", "", Field.Store.NO); + Field storedBinField = new StoredField("storedBin", Arrays.Empty()); + Field dvBinField = new BinaryDocValuesField("dvBin", new BytesRef()); + Field dvSortedField = new SortedDocValuesField("dvSorted", new BytesRef()); + Field storedNumericField = new StoredField("storedNum", ""); + Field dvNumericField = new NumericDocValuesField("dvNum", 0); + doc.Add(idField); + doc.Add(storedBinField); + doc.Add(dvBinField); + doc.Add(dvSortedField); + doc.Add(storedNumericField); + doc.Add(dvNumericField); + + // index some docs + int numDocs = AtLeast(300); + for (int i = 0; i < numDocs; i++) + { + idField.SetStringValue(Convert.ToString(i, CultureInfo.InvariantCulture)); + int length = TestUtil.NextInt32(Random, 0, 8); + var buffer = new byte[length]; + Random.NextBytes(buffer); + storedBinField.SetBytesValue(buffer); + dvBinField.SetBytesValue(buffer); + dvSortedField.SetBytesValue(buffer); + long numericValue = Random.NextInt64(); + storedNumericField.SetStringValue(Convert.ToString(numericValue, CultureInfo.InvariantCulture)); + dvNumericField.SetInt64Value(numericValue); + writer.AddDocument(doc); + if (Random.Next(31) == 0) + { + writer.Commit(); } - } // writer.Dispose(); + } - // compare - using (DirectoryReader ir = DirectoryReader.Open(dir)) + // delete some docs + int numDeletions = Random.Next(numDocs / 10); + for (int i = 0; i < numDeletions; i++) { - int numThreads = TestUtil.NextInt32(Random, 2, 7); - ThreadJob[] threads = new ThreadJob[numThreads]; - using (CountdownEvent startingGun = new CountdownEvent(1)) - { + int id = Random.Next(numDocs); + writer.DeleteDocuments(new Term("id", Convert.ToString(id, CultureInfo.InvariantCulture))); + } + } // writer.Dispose(); - for (int i = 0; i < threads.Length; i++) - { - threads[i] = new ThreadAnonymousInnerClassHelper(this, ir, startingGun); - threads[i].Start(); - } - startingGun.Signal(); - foreach (ThreadJob t in threads) - { - t.Join(); - } - } - } // ir.Dispose(); - } // dir.Dispose(); + // compare + using DirectoryReader ir = DirectoryReader.Open(dir); + int numThreads = TestUtil.NextInt32(Random, 2, 7); + ThreadJob[] threads = new ThreadJob[numThreads]; + using CountdownEvent startingGun = new CountdownEvent(1); + for (int i = 0; i < threads.Length; i++) + { + threads[i] = new ThreadAnonymousInnerClassHelper(ir, startingGun); + threads[i].Start(); + } + startingGun.Signal(); + foreach (ThreadJob t in threads) + { + t.Join(); + } } private class ThreadAnonymousInnerClassHelper : ThreadJob { - private readonly BaseDocValuesFormatTestCase outerInstance; + private readonly DirectoryReader ir; + private readonly CountdownEvent startingGun; - private DirectoryReader ir; - private CountdownEvent startingGun; - - public ThreadAnonymousInnerClassHelper(BaseDocValuesFormatTestCase outerInstance, DirectoryReader ir, CountdownEvent startingGun) + public ThreadAnonymousInnerClassHelper(DirectoryReader ir, CountdownEvent startingGun) { - this.outerInstance = outerInstance; this.ir = ir; this.startingGun = startingGun; } @@ -3622,107 +3369,97 @@ public virtual void TestThreads2() { AssumeTrue("Codec does not support GetDocsWithField", DefaultCodecSupportsDocsWithField); AssumeTrue("Codec does not support SORTED_SET", DefaultCodecSupportsSortedSet); - using (Directory dir = NewDirectory()) - { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); - using (RandomIndexWriter writer = new RandomIndexWriter(Random, dir, conf)) - { - Field idField = new StringField("id", "", Field.Store.NO); - Field storedBinField = new StoredField("storedBin", Arrays.Empty()); - Field dvBinField = new BinaryDocValuesField("dvBin", new BytesRef()); - Field dvSortedField = new SortedDocValuesField("dvSorted", new BytesRef()); - Field storedNumericField = new StoredField("storedNum", ""); - Field dvNumericField = new NumericDocValuesField("dvNum", 0); - - // index some docs - int numDocs = AtLeast(300); - for (int i = 0; i < numDocs; i++) + using Directory dir = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); + using (RandomIndexWriter writer = new RandomIndexWriter(Random, dir, conf)) + { + Field idField = new StringField("id", "", Field.Store.NO); + Field storedBinField = new StoredField("storedBin", Arrays.Empty()); + Field dvBinField = new BinaryDocValuesField("dvBin", new BytesRef()); + Field dvSortedField = new SortedDocValuesField("dvSorted", new BytesRef()); + Field storedNumericField = new StoredField("storedNum", ""); + Field dvNumericField = new NumericDocValuesField("dvNum", 0); + + // index some docs + int numDocs = AtLeast(300); + for (int i = 0; i < numDocs; i++) + { + idField.SetStringValue(Convert.ToString(i, CultureInfo.InvariantCulture)); + int length = TestUtil.NextInt32(Random, 0, 8); + var buffer = new byte[length]; + Random.NextBytes(buffer); + storedBinField.SetBytesValue(buffer); + dvBinField.SetBytesValue(buffer); + dvSortedField.SetBytesValue(buffer); + long numericValue = Random.NextInt64(); + storedNumericField.SetStringValue(Convert.ToString(numericValue, CultureInfo.InvariantCulture)); + dvNumericField.SetInt64Value(numericValue); + Document doc = new Document(); + doc.Add(idField); + if (Random.Next(4) > 0) { - idField.SetStringValue(Convert.ToString(i, CultureInfo.InvariantCulture)); - int length = TestUtil.NextInt32(Random, 0, 8); - var buffer = new byte[length]; - Random.NextBytes(buffer); - storedBinField.SetBytesValue(buffer); - dvBinField.SetBytesValue(buffer); - dvSortedField.SetBytesValue(buffer); - long numericValue = Random.NextInt64(); - storedNumericField.SetStringValue(Convert.ToString(numericValue, CultureInfo.InvariantCulture)); - dvNumericField.SetInt64Value(numericValue); - Document doc = new Document(); - doc.Add(idField); - if (Random.Next(4) > 0) - { - doc.Add(storedBinField); - doc.Add(dvBinField); - doc.Add(dvSortedField); - } - if (Random.Next(4) > 0) - { - doc.Add(storedNumericField); - doc.Add(dvNumericField); - } - int numSortedSetFields = Random.Next(3); - - // LUCENENET specific: Use StringComparer.Ordinal to get the same ordering as Java - JCG.SortedSet values = new JCG.SortedSet(StringComparer.Ordinal); - for (int j = 0; j < numSortedSetFields; j++) - { - values.Add(TestUtil.RandomSimpleString(Random)); - } - foreach (string v in values) - { - doc.Add(new SortedSetDocValuesField("dvSortedSet", new BytesRef(v))); - doc.Add(new StoredField("storedSortedSet", v)); - } - writer.AddDocument(doc); - if (Random.Next(31) == 0) - { - writer.Commit(); - } + doc.Add(storedBinField); + doc.Add(dvBinField); + doc.Add(dvSortedField); } + if (Random.Next(4) > 0) + { + doc.Add(storedNumericField); + doc.Add(dvNumericField); + } + int numSortedSetFields = Random.Next(3); - // delete some docs - int numDeletions = Random.Next(numDocs / 10); - for (int i = 0; i < numDeletions; i++) + // LUCENENET specific: Use StringComparer.Ordinal to get the same ordering as Java + JCG.SortedSet values = new JCG.SortedSet(StringComparer.Ordinal); + for (int j = 0; j < numSortedSetFields; j++) + { + values.Add(TestUtil.RandomSimpleString(Random)); + } + foreach (string v in values) { - int id = Random.Next(numDocs); - writer.DeleteDocuments(new Term("id", Convert.ToString(id, CultureInfo.InvariantCulture))); + doc.Add(new SortedSetDocValuesField("dvSortedSet", new BytesRef(v))); + doc.Add(new StoredField("storedSortedSet", v)); } - } // writer.Dispose(); + writer.AddDocument(doc); + if (Random.Next(31) == 0) + { + writer.Commit(); + } + } - // compare - using (DirectoryReader ir = DirectoryReader.Open(dir)) + // delete some docs + int numDeletions = Random.Next(numDocs / 10); + for (int i = 0; i < numDeletions; i++) { - int numThreads = TestUtil.NextInt32(Random, 2, 7); - ThreadJob[] threads = new ThreadJob[numThreads]; - using (CountdownEvent startingGun = new CountdownEvent(1)) - { + int id = Random.Next(numDocs); + writer.DeleteDocuments(new Term("id", Convert.ToString(id, CultureInfo.InvariantCulture))); + } + } // writer.Dispose(); - for (int i = 0; i < threads.Length; i++) - { - threads[i] = new ThreadAnonymousInnerClassHelper2(this, ir, startingGun); - threads[i].Start(); - } - startingGun.Signal(); - foreach (ThreadJob t in threads) - { - t.Join(); - } - } - } // ir.Dispose(); - } // dir.Dispose(); + // compare + using DirectoryReader ir = DirectoryReader.Open(dir); + int numThreads = TestUtil.NextInt32(Random, 2, 7); + ThreadJob[] threads = new ThreadJob[numThreads]; + using CountdownEvent startingGun = new CountdownEvent(1); + for (int i = 0; i < threads.Length; i++) + { + threads[i] = new ThreadAnonymousInnerClassHelper2(ir, startingGun); + threads[i].Start(); + } + startingGun.Signal(); + foreach (ThreadJob t in threads) + { + t.Join(); + } } private class ThreadAnonymousInnerClassHelper2 : ThreadJob { - private readonly BaseDocValuesFormatTestCase outerInstance; + private readonly DirectoryReader ir; + private readonly CountdownEvent startingGun; - private DirectoryReader ir; - private CountdownEvent startingGun; - - public ThreadAnonymousInnerClassHelper2(BaseDocValuesFormatTestCase outerInstance, DirectoryReader ir, CountdownEvent startingGun) + public ThreadAnonymousInnerClassHelper2(DirectoryReader ir, CountdownEvent startingGun) { - this.outerInstance = outerInstance; this.ir = ir; this.startingGun = startingGun; } @@ -3825,49 +3562,45 @@ public virtual void TestEmptyBinaryValueOnPageSizes() { break; } - using (Directory dir = NewDirectory()) + using Directory dir = NewDirectory(); + IndexReader r = null; + try { - IndexReader r = null; - try - { - using (RandomIndexWriter w = new RandomIndexWriter( + using (RandomIndexWriter w = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, dir)) + { + BytesRef bytes = new BytesRef(); + bytes.Bytes = new byte[1 << i]; + bytes.Length = 1 << i; + for (int j = 0; j < 4; j++) { - BytesRef bytes = new BytesRef(); - bytes.Bytes = new byte[1 << i]; - bytes.Length = 1 << i; - for (int j = 0; j < 4; j++) - { - Document doc_ = new Document(); - doc_.Add(new BinaryDocValuesField("field", bytes)); - w.AddDocument(doc_); - } - Document doc = new Document(); - doc.Add(new StoredField("id", "5")); - doc.Add(new BinaryDocValuesField("field", new BytesRef())); - w.AddDocument(doc); - r = w.GetReader(); - } // w.Dispose(); + Document doc_ = new Document(); + doc_.Add(new BinaryDocValuesField("field", bytes)); + w.AddDocument(doc_); + } + Document doc = new Document(); + doc.Add(new StoredField("id", "5")); + doc.Add(new BinaryDocValuesField("field", new BytesRef())); + w.AddDocument(doc); + r = w.GetReader(); + } // w.Dispose(); - using (AtomicReader ar = SlowCompositeReaderWrapper.Wrap(r)) - { - BinaryDocValues values = ar.GetBinaryDocValues("field"); - BytesRef result = new BytesRef(); - for (int j = 0; j < 5; j++) - { - values.Get(0, result); - Assert.IsTrue(result.Length == 0 || result.Length == 1 << i); - } - } // ar.Dispose(); - } - finally + using AtomicReader ar = SlowCompositeReaderWrapper.Wrap(r); + BinaryDocValues values = ar.GetBinaryDocValues("field"); + BytesRef result = new BytesRef(); + for (int j = 0; j < 5; j++) { - r?.Dispose(); // LUCENENET specific - small chance w.Dispose() will throw, this is just here to cover that case. It is safe to call r.Dispose() more than once. + values.Get(0, result); + Assert.IsTrue(result.Length == 0 || result.Length == 1 << i); } - } // dir.Dispose(); + } + finally + { + r?.Dispose(); // LUCENENET specific - small chance w.Dispose() will throw, this is just here to cover that case. It is safe to call r.Dispose() more than once. + } } } diff --git a/src/Lucene.Net.TestFramework/Index/BaseIndexFileFormatTestCase.cs b/src/Lucene.Net.TestFramework/Index/BaseIndexFileFormatTestCase.cs index a04c13471a..83523b02f7 100644 --- a/src/Lucene.Net.TestFramework/Index/BaseIndexFileFormatTestCase.cs +++ b/src/Lucene.Net.TestFramework/Index/BaseIndexFileFormatTestCase.cs @@ -6,6 +6,7 @@ using Lucene.Net.TestFramework; using Lucene.Net.Util; using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; using JCG = J2N.Collections.Generic; #if TESTFRAMEWORK_MSTEST @@ -102,6 +103,7 @@ private IDictionary BytesUsedByExtension(Directory d) /// Return the list of extensions that should be excluded from byte counts when /// comparing indices that store the same content. /// + [SuppressMessage("Style", "IDE0025:Use expression body for properties", Justification = "Multiple lines")] protected virtual ICollection ExcludedExtensionsFromByteCounts { get @@ -123,46 +125,38 @@ protected virtual ICollection ExcludedExtensionsFromByteCounts [Test] public virtual void TestMergeStability() { - using (Directory dir = NewDirectory()) + using Directory dir = NewDirectory(); + // do not use newMergePolicy that might return a MockMergePolicy that ignores the no-CFS ratio + MergePolicy mp = NewTieredMergePolicy(); + mp.NoCFSRatio = 0; + var cfg = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetUseCompoundFile(false).SetMergePolicy(mp); + using (var w = new RandomIndexWriter(Random, dir, cfg)) { - // do not use newMergePolicy that might return a MockMergePolicy that ignores the no-CFS ratio - MergePolicy mp = NewTieredMergePolicy(); - mp.NoCFSRatio = 0; - var cfg = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetUseCompoundFile(false).SetMergePolicy(mp); - using (var w = new RandomIndexWriter(Random, dir, cfg)) + var numDocs = AtLeast(500); + for (var i = 0; i < numDocs; ++i) { - var numDocs = AtLeast(500); - for (var i = 0; i < numDocs; ++i) - { - var d = new Document(); - AddRandomFields(d); - w.AddDocument(d); - } - w.ForceMerge(1); - w.Commit(); + var d = new Document(); + AddRandomFields(d); + w.AddDocument(d); } - using (IndexReader reader = DirectoryReader.Open(dir)) - { - using (Directory dir2 = NewDirectory()) - { - mp = NewTieredMergePolicy(); - mp.NoCFSRatio = 0; - cfg = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetUseCompoundFile(false).SetMergePolicy(mp); - - using (var w = new RandomIndexWriter(Random, dir2, cfg)) - { - w.AddIndexes(reader); - w.Commit(); - } + w.ForceMerge(1); + w.Commit(); + } + using IndexReader reader = DirectoryReader.Open(dir); + using Directory dir2 = NewDirectory(); + mp = NewTieredMergePolicy(); + mp.NoCFSRatio = 0; + cfg = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetUseCompoundFile(false).SetMergePolicy(mp); - // LUCENENET: We need to explicitly call Equals() and use HashMap in order to ensure our - // equality check is done correctly. Calling Assert.AreEqual doesn't guarantee this is done. - Assert.True(BytesUsedByExtension(dir).Equals(BytesUsedByExtension(dir2))); + using (var w = new RandomIndexWriter(Random, dir2, cfg)) + { + w.AddIndexes(reader); + w.Commit(); + } - } // dir2.Dispose(); - } // reader.Dispose(); - } // dir.Dispose(); - + // LUCENENET: We need to explicitly call Equals() and use HashMap in order to ensure our + // equality check is done correctly. Calling Assert.AreEqual doesn't guarantee this is done. + Assert.True(BytesUsedByExtension(dir).Equals(BytesUsedByExtension(dir2))); } } } \ No newline at end of file diff --git a/src/Lucene.Net.TestFramework/Index/BaseMergePolicyTestCase.cs b/src/Lucene.Net.TestFramework/Index/BaseMergePolicyTestCase.cs index aae8abdb02..fe3d36b6aa 100644 --- a/src/Lucene.Net.TestFramework/Index/BaseMergePolicyTestCase.cs +++ b/src/Lucene.Net.TestFramework/Index/BaseMergePolicyTestCase.cs @@ -54,43 +54,36 @@ public BaseMergePolicyTestCase(BeforeAfterClass beforeAfter) [Test] public virtual void TestForceMergeNotNeeded() { - using (Directory dir = NewDirectory()) + using Directory dir = NewDirectory(); + AtomicBoolean mayMerge = new AtomicBoolean(true); + MergeScheduler mergeScheduler = new SerialMergeSchedulerAnonymousInnerClassHelper(mayMerge); + using IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(mergeScheduler).SetMergePolicy(NewMergePolicy())); + writer.Config.MergePolicy.NoCFSRatio = Random.NextBoolean() ? 0 : 1; + int numSegments = TestUtil.NextInt32(Random, 2, 20); + for (int i = 0; i < numSegments; ++i) { - AtomicBoolean mayMerge = new AtomicBoolean(true); - MergeScheduler mergeScheduler = new SerialMergeSchedulerAnonymousInnerClassHelper(this, mayMerge); - using (IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(mergeScheduler).SetMergePolicy(NewMergePolicy()))) + int numDocs = TestUtil.NextInt32(Random, 1, 5); + for (int j = 0; j < numDocs; ++j) { - writer.Config.MergePolicy.NoCFSRatio = Random.NextBoolean() ? 0 : 1; - int numSegments = TestUtil.NextInt32(Random, 2, 20); - for (int i = 0; i < numSegments; ++i) - { - int numDocs = TestUtil.NextInt32(Random, 1, 5); - for (int j = 0; j < numDocs; ++j) - { - writer.AddDocument(new Document()); - } - writer.GetReader().Dispose(); - } - for (int i = 5; i >= 0; --i) - { - int segmentCount = writer.SegmentCount; - int maxNumSegments = i == 0 ? 1 : TestUtil.NextInt32(Random, 1, 10); - mayMerge.Value = (segmentCount > maxNumSegments); - writer.ForceMerge(maxNumSegments); - } - } // writer.Dispose(); - } // dir.Dispose(); + writer.AddDocument(new Document()); + } + writer.GetReader().Dispose(); + } + for (int i = 5; i >= 0; --i) + { + int segmentCount = writer.SegmentCount; + int maxNumSegments = i == 0 ? 1 : TestUtil.NextInt32(Random, 1, 10); + mayMerge.Value = (segmentCount > maxNumSegments); + writer.ForceMerge(maxNumSegments); + } } private class SerialMergeSchedulerAnonymousInnerClassHelper : SerialMergeScheduler { - private readonly BaseMergePolicyTestCase outerInstance; - - private AtomicBoolean mayMerge; + private readonly AtomicBoolean mayMerge; - public SerialMergeSchedulerAnonymousInnerClassHelper(BaseMergePolicyTestCase outerInstance, AtomicBoolean mayMerge) + public SerialMergeSchedulerAnonymousInnerClassHelper(AtomicBoolean mayMerge) { - this.outerInstance = outerInstance; this.mayMerge = mayMerge; } diff --git a/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs b/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs index 829ff244b2..ad48543df4 100644 --- a/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs +++ b/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs @@ -124,7 +124,7 @@ private class SeedPostings : DocsAndPositionsEnum private readonly bool fixedPayloads; private readonly IBits liveDocs; private readonly BytesRef payload; - private readonly IndexOptions options; + //private readonly IndexOptions options; // LUCENENET: Never read private readonly bool doPositions; private int docID; @@ -161,7 +161,7 @@ public SeedPostings(long seed, int minDocFreq, int maxDocFreq, IBits liveDocs, I fixedPayloads = random.NextBoolean(); var payloadBytes = new byte[payloadSize]; payload = new BytesRef(payloadBytes); - this.options = options; + //this.options = options; // LUCENENET: Never read // LUCENENET specific - to avoid boxing, changed from CompareTo() to IndexOptionsComparer.Compare() doPositions = IndexOptionsComparer.Default.Compare(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, options) <= 0; } @@ -178,7 +178,9 @@ public override int NextDoc() } } +#pragma warning disable IDE1006 // Naming Styles private int _nextDoc() +#pragma warning restore IDE1006 // Naming Styles { // Must consume random: while (posUpto < freq) @@ -1109,10 +1111,10 @@ private void VerifyEnum(ThreadState threadState, private class TestThread : ThreadJob { private Fields fieldsSource; - private ISet public class MockIndexOutputWrapper : IndexOutput { - private MockDirectoryWrapper dir; + private readonly MockDirectoryWrapper dir; private readonly IndexOutput @delegate; private bool first = true; internal readonly string name; diff --git a/src/Lucene.Net.TestFramework/Support/ApiScanTestBase.cs b/src/Lucene.Net.TestFramework/Support/ApiScanTestBase.cs index 3d7f537eb5..854ddae09d 100644 --- a/src/Lucene.Net.TestFramework/Support/ApiScanTestBase.cs +++ b/src/Lucene.Net.TestFramework/Support/ApiScanTestBase.cs @@ -86,10 +86,10 @@ internal ApiScanTestBase(BeforeAfterClass beforeAfter) /// private static readonly Regex ContainsNonNetNumeric = new Regex("(? - /// Constants should not contain the word INT that is not followed by 16, 32, or 64, LONG, SHORT, or FLOAT - /// - private static readonly Regex ConstContainsNonNetNumeric = new Regex("(? + ///// Constants should not contain the word INT that is not followed by 16, 32, or 64, LONG, SHORT, or FLOAT + ///// + //private static readonly Regex ConstContainsNonNetNumeric = new Regex("(? /// Matches IL code pattern for a method body with only a return statement for a local variable. diff --git a/src/Lucene.Net.TestFramework/Support/Attributes/DeadlockAttribute.cs b/src/Lucene.Net.TestFramework/Support/Attributes/DeadlockAttribute.cs index 04e0952aa7..0893326a3d 100644 --- a/src/Lucene.Net.TestFramework/Support/Attributes/DeadlockAttribute.cs +++ b/src/Lucene.Net.TestFramework/Support/Attributes/DeadlockAttribute.cs @@ -27,7 +27,7 @@ namespace Lucene.Net.Attributes /// Indicates a test has contention between concurrent processes and may deadlock. /// [AttributeUsage(AttributeTargets.Method | AttributeTargets.Class, AllowMultiple = false, Inherited = false)] - internal class DeadlockAttribute : TimeoutAttribute, IApplyToTest + internal sealed class DeadlockAttribute : TimeoutAttribute, IApplyToTest { public DeadlockAttribute() : base(600000) { } diff --git a/src/Lucene.Net.TestFramework/Support/Attributes/LuceneNetSpecificAttribute.cs b/src/Lucene.Net.TestFramework/Support/Attributes/LuceneNetSpecificAttribute.cs index 387deda6fb..dde5f1cdbc 100644 --- a/src/Lucene.Net.TestFramework/Support/Attributes/LuceneNetSpecificAttribute.cs +++ b/src/Lucene.Net.TestFramework/Support/Attributes/LuceneNetSpecificAttribute.cs @@ -26,7 +26,7 @@ namespace Lucene.Net.Attributes /// In other words, apply this attribute to the test if it /// did not exist in Java Lucene. /// - internal class LuceneNetSpecificAttribute : CategoryAttribute + internal sealed class LuceneNetSpecificAttribute : CategoryAttribute { public LuceneNetSpecificAttribute() : base("LUCENENET") diff --git a/src/Lucene.Net.TestFramework/Support/Attributes/NoOpAttribute.cs b/src/Lucene.Net.TestFramework/Support/Attributes/NoOpAttribute.cs index 244f1a3c1f..01210d41c2 100644 --- a/src/Lucene.Net.TestFramework/Support/Attributes/NoOpAttribute.cs +++ b/src/Lucene.Net.TestFramework/Support/Attributes/NoOpAttribute.cs @@ -22,7 +22,7 @@ namespace Lucene.Net.Attributes /// /// An attribute to use as a placeholder when a test framework doesn't support such a behavior. /// - internal class NoOpAttribute : Attribute + internal sealed class NoOpAttribute : Attribute { } } diff --git a/src/Lucene.Net.TestFramework/Support/ExceptionSerializationTestBase.cs b/src/Lucene.Net.TestFramework/Support/ExceptionSerializationTestBase.cs index 65134a1f5a..f70e070aa5 100644 --- a/src/Lucene.Net.TestFramework/Support/ExceptionSerializationTestBase.cs +++ b/src/Lucene.Net.TestFramework/Support/ExceptionSerializationTestBase.cs @@ -45,16 +45,12 @@ protected static bool TypeCanSerialize(T exception) try { var binaryFormatter = new BinaryFormatter(); - using (var serializationStream = new MemoryStream()) - { - binaryFormatter.Serialize(serializationStream, exception); - serializationStream.Seek(0, SeekOrigin.Begin); - clone = (T)binaryFormatter.Deserialize(serializationStream); - } + using var serializationStream = new MemoryStream(); + binaryFormatter.Serialize(serializationStream, exception); + serializationStream.Seek(0, SeekOrigin.Begin); + clone = (T)binaryFormatter.Deserialize(serializationStream); } -#pragma warning disable 168 - catch (SerializationException ex) -#pragma warning restore 168 + catch (SerializationException) { return false; } diff --git a/src/Lucene.Net.TestFramework/Support/JavaCompatibility/LuceneTestCase.cs b/src/Lucene.Net.TestFramework/Support/JavaCompatibility/LuceneTestCase.cs index 98f0f95b39..a83f718e6c 100644 --- a/src/Lucene.Net.TestFramework/Support/JavaCompatibility/LuceneTestCase.cs +++ b/src/Lucene.Net.TestFramework/Support/JavaCompatibility/LuceneTestCase.cs @@ -1,8 +1,7 @@ -using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Diagnostics; +using System.Runtime.CompilerServices; using Assert = Lucene.Net.TestFramework.Assert; using JCG = J2N.Collections.Generic; @@ -29,6 +28,7 @@ namespace Lucene.Net.Util /// LUCENENET specific extensions to to make it easier to port tests /// from Java with fewer changes. /// + [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE1006:Naming Styles", Justification = "These methods are for making porting tests from Java simpler")] public abstract partial class LuceneTestCase { // LUCENENET NOTE: This was not added because it causes naming collisions with @@ -40,225 +40,268 @@ public abstract partial class LuceneTestCase // return Random; //} + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertTrue(bool condition) { Assert.IsTrue(condition); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertTrue(string message, bool condition) { Assert.IsTrue(condition, message); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertFalse(bool condition) { Assert.IsFalse(condition); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertFalse(string message, bool condition) { Assert.IsFalse(condition, message); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertEquals(T expected, T actual) { Assert.AreEqual(expected, actual); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertEquals(string message, T expected, T actual) { Assert.AreEqual(expected, actual, message); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertEquals(string expected, string actual) { Assert.AreEqual(expected, actual); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertEquals(string message, string expected, string actual) { Assert.AreEqual(expected, actual, message); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertEquals(bool expected, bool actual) { Assert.AreEqual(expected, actual); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertEquals(string message, bool expected, bool actual) { Assert.AreEqual(expected, actual, message); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertEquals(long expected, long actual) { Assert.AreEqual(expected, actual); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertEquals(string message, long expected, long actual) { Assert.AreEqual(expected, actual, message); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertEquals(int expected, int actual) { Assert.AreEqual(expected, actual); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertEquals(string message, int expected, int actual) { Assert.AreEqual(expected, actual, message); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertEquals(byte expected, byte actual) { Assert.AreEqual(expected, actual); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertEquals(string message, byte expected, byte actual) { Assert.AreEqual(expected, actual, message); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertEquals(double d1, double d2, double delta) { Assert.AreEqual(d1, d2, delta); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertEquals(string msg, float d1, float d2, float delta) { Assert.AreEqual(d1, d2, delta, msg); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertEquals(float d1, float d2, float delta) { Assert.AreEqual(d1, d2, delta); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertEquals(string msg, double d1, double d2, double delta) { Assert.AreEqual(d1, d2, delta, msg); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertEquals(ISet expected, ISet actual, bool aggressive = true) { Assert.AreEqual(expected, actual, aggressive); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertEquals(string message, ISet expected, ISet actual, bool aggressive = true) { Assert.AreEqual(expected, actual, aggressive, message); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertEquals(IList expected, IList actual, bool aggressive = true) { Assert.AreEqual(expected, actual, aggressive); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertEquals(string message, IList expected, IList actual, bool aggressive = true) { Assert.AreEqual(expected, actual, aggressive, message); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertEquals(T[] expected, T[] actual) { Assert.AreEqual(expected, actual); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertEquals(string message, T[] expected, T[] actual) { Assert.AreEqual(expected, actual, message); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertEquals(IDictionary expected, IDictionary actual, bool aggressive = true) { Assert.AreEqual(expected, actual, aggressive); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertEquals(string message, IDictionary expected, IDictionary actual, bool aggressive = true) { Assert.AreEqual(expected, actual, aggressive, message); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertNotSame(object unexpected, object actual) { Assert.AreNotSame(unexpected, actual); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertNotSame(string message, object unexpected, object actual) { Assert.AreNotSame(unexpected, actual, message); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertNotNull(object o) { Assert.NotNull(o); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertNotNull(string msg, object o) { Assert.NotNull(o, msg); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertNull(object o) { Assert.Null(o); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertNull(string msg, object o) { Assert.Null(o, msg); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertArrayEquals(T[] a1, T[] a2) { Assert.AreEqual(a1, a2); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertArrayEquals(string message, T[] a1, T[] a2) { Assert.AreEqual(a1, a2, message); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertArrayEquals(Func getMessage, T[] a1, T[] a2) { Assert.AreEqual(a1, a2, getMessage()); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertSame(object expected, object actual) { Assert.AreSame(expected, actual); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void assertSame(string message, object expected, object actual) { Assert.AreSame(expected, actual, message); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void fail() { Assert.Fail(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void fail(string message) { Assert.Fail(message); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ISet AsSet(params T[] args) { return new JCG.HashSet(args); } [ExceptionToNetNumericConvention] // LUCENENET: This is for making test porting easier, keeping as-is - internal int randomInt(int max) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + internal static int randomInt(int max) { return randomIntBetween(0, max); } [ExceptionToNetNumericConvention] // LUCENENET: This is for making test porting easier, keeping as-is - internal int randomIntBetween(int min, int max) + internal static int randomIntBetween(int min, int max) { // LUCENENET specific - added guard clause instead of assert if (max < min) @@ -286,7 +329,8 @@ private static int toIntExact(long value) return (int)value; } } - + + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal double randomGaussian() { return RandomGaussian(); diff --git a/src/Lucene.Net.TestFramework/Support/JavaCompatibility/RandomHelpers.cs b/src/Lucene.Net.TestFramework/Support/JavaCompatibility/RandomHelpers.cs index 5f338bc723..790f8ded8b 100644 --- a/src/Lucene.Net.TestFramework/Support/JavaCompatibility/RandomHelpers.cs +++ b/src/Lucene.Net.TestFramework/Support/JavaCompatibility/RandomHelpers.cs @@ -1,5 +1,6 @@ using Lucene.Net.Support; using System; +using System.Diagnostics.CodeAnalysis; namespace Lucene.Net { @@ -24,6 +25,7 @@ namespace Lucene.Net /// LUCENENET specific extensions to to make it easier to port tests /// from Java with fewer changes. /// + [SuppressMessage("Style", "IDE1006:Naming Styles", Justification = "These methods are intended to make porting tests from Java easier")] internal static class RandomHelpers { [ExceptionToNetNumericConvention] // LUCENENET: This is for making test porting easier, keeping as-is diff --git a/src/Lucene.Net.TestFramework/Support/JavaCompatibility/SystemTypesHelpers.cs b/src/Lucene.Net.TestFramework/Support/JavaCompatibility/SystemTypesHelpers.cs index 5a1d897dcb..4e8776ad15 100644 --- a/src/Lucene.Net.TestFramework/Support/JavaCompatibility/SystemTypesHelpers.cs +++ b/src/Lucene.Net.TestFramework/Support/JavaCompatibility/SystemTypesHelpers.cs @@ -2,6 +2,7 @@ using System; using System.Collections; using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.IO; using System.Linq; @@ -32,6 +33,7 @@ namespace Lucene.Net /// LUCENENET specific extensions to various .NET types to make it easier to port tests /// from Java with fewer changes. /// + [SuppressMessage("Style", "IDE1006:Naming Styles", Justification = "These are intended to make porting tests from Java simpler")] internal static class SystemTypesHelpers { public static char[] toCharArray(this string str) diff --git a/src/Lucene.Net.TestFramework/Support/Util/LuceneTestFrameworkInitializer.cs b/src/Lucene.Net.TestFramework/Support/Util/LuceneTestFrameworkInitializer.cs index b3e3dbab54..b0f23a79f2 100644 --- a/src/Lucene.Net.TestFramework/Support/Util/LuceneTestFrameworkInitializer.cs +++ b/src/Lucene.Net.TestFramework/Support/Util/LuceneTestFrameworkInitializer.cs @@ -160,7 +160,7 @@ protected void OneTimeSetUpBeforeTests() catch (Exception ex) { // Write the stack trace so we have something to go on if an error occurs here. - throw new Exception($"An exception occurred during OneTimeSetUpBeforeTests:\n{ex.ToString()}", ex); + throw new Exception($"An exception occurred during OneTimeSetUpBeforeTests:\n{ex}", ex); } } @@ -178,7 +178,7 @@ protected void OneTimeTearDownAfterTests() catch (Exception ex) { // Write the stack trace so we have something to go on if an error occurs here. - throw new Exception($"An exception occurred during OneTimeTearDownAfterTests:\n{ex.ToString()}", ex); + throw new Exception($"An exception occurred during OneTimeTearDownAfterTests:\n{ex}", ex); } // Cleanup our LineDocsFile and reset LuceneTestCase back to its original state. diff --git a/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs b/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs index 6fc769d3b7..07462342f1 100644 --- a/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs +++ b/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs @@ -50,9 +50,9 @@ public static string RandomRegexp(Random r) new RegExp(regexp, RegExpSyntax.NONE); return regexp; } -#pragma warning disable 168 +#pragma warning disable 168, IDE0059 catch (Exception e) -#pragma warning restore 168 +#pragma warning restore 168, IDE0059 { } } diff --git a/src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs b/src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs index 94898c208b..507fb7416c 100644 --- a/src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs +++ b/src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs @@ -217,7 +217,7 @@ public virtual void DoTest(bool testPruning) // isn't accepted. if prefixLength is non-null it must be // length 1 int array; prefixLength[0] is set to the length // of the term prefix that matches - private T Run(FST fst, Int32sRef term, int[] prefixLength) + private static T Run(FST fst, Int32sRef term, int[] prefixLength) // LUCENENET: CA1822: Mark members as static { if (Debugging.AssertsEnabled) Debugging.Assert(prefixLength == null || prefixLength.Length == 1); FST.Arc arc = fst.GetFirstArc(new FST.Arc()); @@ -247,7 +247,7 @@ private T Run(FST fst, Int32sRef term, int[] prefixLength) } else { - return default(T); + return default; } } output = fst.Outputs.Add(output, arc.Output); @@ -557,7 +557,7 @@ private void VerifyUnPruned(int inputMode, FST fst) while (true) { Int32sRef term = ToInt32sRef(GetRandomString(random), inputMode); - int pos = pairs.BinarySearch(new InputOutput(term, default(T))); + int pos = pairs.BinarySearch(new InputOutput(term, default)); if (pos < 0) { pos = -(pos + 1); @@ -689,7 +689,7 @@ private void VerifyUnPruned(int inputMode, FST fst) Int32sRef term = ToInt32sRef(GetRandomString(random), inputMode); if (!termsMap.ContainsKey(term) && term.CompareTo(pairs[upto].Input) > 0) { - int pos = pairs.BinarySearch(new InputOutput(term, default(T))); + int pos = pairs.BinarySearch(new InputOutput(term, default)); if (Debugging.AssertsEnabled) Debugging.Assert(pos < 0); upto = -(pos + 1); diff --git a/src/Lucene.Net.TestFramework/Util/LineFileDocs.cs b/src/Lucene.Net.TestFramework/Util/LineFileDocs.cs index 42884d955f..3c5a8e4af2 100644 --- a/src/Lucene.Net.TestFramework/Util/LineFileDocs.cs +++ b/src/Lucene.Net.TestFramework/Util/LineFileDocs.cs @@ -118,7 +118,7 @@ protected virtual void Dispose(bool disposing) } } - private long RandomSeekPos(Random random, long size) + private static long RandomSeekPos(Random random, long size) // LUCENENET: CA1822: Mark members as static { if (random == null || size <= 3L) { @@ -133,15 +133,13 @@ private long RandomSeekPos(Random random, long size) // so tests can be run without the overhead of seeking within a MemoryStream private Stream PrepareGZipStream(Stream input) { - using (var gzs = new GZipStream(input, CompressionMode.Decompress, leaveOpen: false)) - { - FileInfo tempFile = LuceneTestCase.CreateTempFile(TEMP_FILE_PREFIX, TEMP_FILE_SUFFIX); - tempFilePath = tempFile.FullName; - Stream result = new FileStream(tempFilePath, FileMode.Open, FileAccess.ReadWrite, FileShare.Read); - gzs.CopyTo(result); - // Use the decompressed stream now - return new BufferedStream(result); - } + using var gzs = new GZipStream(input, CompressionMode.Decompress, leaveOpen: false); + FileInfo tempFile = LuceneTestCase.CreateTempFile(TEMP_FILE_PREFIX, TEMP_FILE_SUFFIX); + tempFilePath = tempFile.FullName; + Stream result = new FileStream(tempFilePath, FileMode.Open, FileAccess.ReadWrite, FileShare.Read); // Leave open + gzs.CopyTo(result); + // Use the decompressed stream now + return new BufferedStream(result); } private void Open(Random random) @@ -367,11 +365,9 @@ internal static string MaybeCreateTempFile(bool removeAfterClass = true) ? LuceneTestCase.CreateTempFile(TEMP_FILE_PREFIX, TEMP_FILE_SUFFIX) : FileSupport.CreateTempFile(TEMP_FILE_PREFIX, TEMP_FILE_SUFFIX); result = file.FullName; - using (var gzs = new GZipStream(temp, CompressionMode.Decompress, leaveOpen: false)) - using (Stream output = new FileStream(result, FileMode.Open, FileAccess.Write, FileShare.Read)) - { - gzs.CopyTo(output); - } + using var gzs = new GZipStream(temp, CompressionMode.Decompress, leaveOpen: false); + using Stream output = new FileStream(result, FileMode.Open, FileAccess.Write, FileShare.Read); + gzs.CopyTo(output); } return result; } diff --git a/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs b/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs index 46a69cd1a2..6a458a4253 100644 --- a/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs +++ b/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs @@ -292,9 +292,8 @@ void IApplyToContext.ApplyToContext(TestExecutionContext context) { // Cover the case where this attribute is applied to the whole test fixture var currentTest = context.CurrentTest; - if (!TestNightly && currentTest is NUnit.Framework.Internal.TestFixture) + if (!TestNightly && currentTest is NUnit.Framework.Internal.TestFixture fixture) { - var fixture = (NUnit.Framework.Internal.TestFixture)currentTest; foreach (var testInterface in fixture.Tests) { var test = (NUnit.Framework.Internal.Test)testInterface; @@ -337,9 +336,8 @@ void IApplyToContext.ApplyToContext(TestExecutionContext context) { // Cover the case where this attribute is applied to the whole test fixture var currentTest = context.CurrentTest; - if (!TestWeekly && currentTest is NUnit.Framework.Internal.TestFixture) + if (!TestWeekly && currentTest is NUnit.Framework.Internal.TestFixture fixture) { - var fixture = (NUnit.Framework.Internal.TestFixture)currentTest; foreach (var testInterface in fixture.Tests) { var test = (NUnit.Framework.Internal.Test)testInterface; @@ -380,9 +378,8 @@ void IApplyToContext.ApplyToContext(TestExecutionContext context) { // Cover the case where this attribute is applied to the whole test fixture var currentTest = context.CurrentTest; - if (!TestAwaitsFix && currentTest is NUnit.Framework.Internal.TestFixture) + if (!TestAwaitsFix && currentTest is NUnit.Framework.Internal.TestFixture fixture) { - var fixture = (NUnit.Framework.Internal.TestFixture)currentTest; foreach (var testInterface in fixture.Tests) { var test = (NUnit.Framework.Internal.Test)testInterface; @@ -427,9 +424,8 @@ void IApplyToContext.ApplyToContext(TestExecutionContext context) { // Cover the case where this attribute is applied to the whole test fixture var currentTest = context.CurrentTest; - if (!TestSlow && currentTest is NUnit.Framework.Internal.TestFixture) + if (!TestSlow && currentTest is NUnit.Framework.Internal.TestFixture fixture) { - var fixture = (NUnit.Framework.Internal.TestFixture)currentTest; foreach (var testInterface in fixture.Tests) { var test = (NUnit.Framework.Internal.Test)testInterface; @@ -507,7 +503,7 @@ public SuppressCodecsAttribute(params string[] value) /// up after the suite is completed. /// /// - /// + /// [AttributeUsage(AttributeTargets.Class, AllowMultiple = false, Inherited = true)] [SuppressMessage("Design", "CA1034:Nested types should not be visible", Justification = "API looks better with this nested.")] public sealed class SuppressTempFileChecksAttribute : System.Attribute @@ -1129,7 +1125,7 @@ public virtual void BeforeClass() catch (Exception ex) { // Write the stack trace so we have something to go on if an error occurs here. - throw new Exception($"An exception occurred during BeforeClass:\n{ex.ToString()}", ex); + throw new Exception($"An exception occurred during BeforeClass:\n{ex}", ex); } } @@ -1182,7 +1178,7 @@ public virtual void AfterClass() catch (Exception ex) { // Write the stack trace so we have something to go on if an error occurs here. - throw new Exception($"An exception occurred during AfterClass:\n{ex.ToString()}", ex); + throw new Exception($"An exception occurred during AfterClass:\n{ex}", ex); } } @@ -1210,6 +1206,7 @@ public virtual void AfterClass() /// // tight loop with many invocations. /// /// + [SuppressMessage("Style", "IDE0025:Use expression body for properties", Justification = "Multiple lines")] public static Random Random { get @@ -1279,9 +1276,11 @@ public static Type GetTestClass() } + /// /// Return the name of the currently executing test case. /// + [SuppressMessage("Style", "IDE0025:Use expression body for properties", Justification = "Multiple lines")] public virtual string TestName { get @@ -2150,7 +2149,7 @@ public static IndexReader MaybeWrapReader(IndexReader r) case 1: // will create no FC insanity in atomic case, as ParallelAtomicReader has own cache key: - r = (r is AtomicReader) ? (IndexReader)new ParallelAtomicReader((AtomicReader)r) : new ParallelCompositeReader((CompositeReader)r); + r = (r is AtomicReader atomicReader) ? (IndexReader)new ParallelAtomicReader(atomicReader) : new ParallelCompositeReader((CompositeReader)r); break; case 2: @@ -2178,13 +2177,13 @@ public static IndexReader MaybeWrapReader(IndexReader r) // Häckidy-Hick-Hack: a standard Reader will cause FC insanity, so we use // QueryUtils' reader with a fake cache key, so insanity checker cannot walk // along our reader: - if (r is AtomicReader) + if (r is AtomicReader atomicReader2) { - r = new AssertingAtomicReader((AtomicReader)r); + r = new AssertingAtomicReader(atomicReader2); } - else if (r is DirectoryReader) + else if (r is DirectoryReader directoryReader) { - r = new AssertingDirectoryReader((DirectoryReader)r); + r = new AssertingDirectoryReader(directoryReader); } break; @@ -2458,11 +2457,9 @@ protected virtual Stream GetDataFile(string name) { return this.GetType().getResourceAsStream(name); } -#pragma warning disable 168 catch (Exception e) -#pragma warning restore 168 { - throw new IOException("Cannot find resource: " + name); + throw new IOException("Cannot find resource: " + name, e); // LUCENENET specific - wrapped inner exception } } @@ -2589,18 +2586,16 @@ public virtual void AssertFieldsEquals(string info, IndexReader leftReader, Fiel } AssertFieldStatisticsEquals(info, leftFields, rightFields); - using (IEnumerator leftEnum = leftFields.GetEnumerator()) - using (IEnumerator rightEnum = rightFields.GetEnumerator()) + using IEnumerator leftEnum = leftFields.GetEnumerator(); + using IEnumerator rightEnum = rightFields.GetEnumerator(); + while (leftEnum.MoveNext()) { - while (leftEnum.MoveNext()) - { - string field = leftEnum.Current; - rightEnum.MoveNext(); - Assert.AreEqual(field, rightEnum.Current, info); - AssertTermsEquals(info, leftReader, leftFields.GetTerms(field), rightFields.GetTerms(field), deep); - } - Assert.IsFalse(rightEnum.MoveNext()); + string field = leftEnum.Current; + rightEnum.MoveNext(); + Assert.AreEqual(field, rightEnum.Current, info); + AssertTermsEquals(info, leftReader, leftFields.GetTerms(field), rightFields.GetTerms(field), deep); } + Assert.IsFalse(rightEnum.MoveNext()); } /// @@ -3048,20 +3043,18 @@ public virtual void AssertStoredFieldsEquals(string info, IndexReader leftReader // in whatever way it wants (e.g. maybe it packs related fields together or something) // To fix this, we sort the fields in both documents by name, but // we still assume that all instances with same name are in order: - Comparison comp = (a, b) => String.Compare(a.Name, b.Name, StringComparison.Ordinal); + var comp = Comparer.Create((a, b) => string.Compare(a.Name, b.Name, StringComparison.Ordinal)); leftDoc.Fields.Sort(comp); rightDoc.Fields.Sort(comp); - using (var leftIterator = leftDoc.GetEnumerator()) - using (var rightIterator = rightDoc.GetEnumerator()) + using var leftIterator = leftDoc.GetEnumerator(); + using var rightIterator = rightDoc.GetEnumerator(); + while (leftIterator.MoveNext()) { - while (leftIterator.MoveNext()) - { - Assert.IsTrue(rightIterator.MoveNext(), info); - AssertStoredFieldEquals(info, leftIterator.Current, rightIterator.Current); - } - Assert.IsFalse(rightIterator.MoveNext(), info); + Assert.IsTrue(rightIterator.MoveNext(), info); + AssertStoredFieldEquals(info, leftIterator.Current, rightIterator.Current); } + Assert.IsFalse(rightIterator.MoveNext(), info); } } @@ -3428,9 +3421,9 @@ public static DirectoryInfo CreateTempDir(string prefix) iterate = false; } } -#pragma warning disable 168 +#pragma warning disable 168, IDE0059 catch (IOException exc) -#pragma warning restore 168 +#pragma warning restore 168, IDE0059 { iterate = true; } @@ -3471,7 +3464,7 @@ public static FileInfo CreateTempFile(string prefix, string suffix) /// /// Creates an empty temporary file. /// - /// + /// public static FileInfo CreateTempFile() { return CreateTempFile("tempFile", ".tmp"); @@ -3516,8 +3509,7 @@ private static void CleanupTemporaryFiles() // and leave them there. if (LuceneTestCase.SuiteFailureMarker /*.WasSuccessful()*/) { - string f; - while (cleanupQueue.TryDequeue(out f)) + while (cleanupQueue.TryDequeue(out string f)) { try { @@ -3605,7 +3597,7 @@ internal static void LogNativeFSFactoryDebugInfo() return; } - Stream lockStream; + Stream lockStream = null; try { lockStream = new FileStream(fileName, FileMode.OpenOrCreate, FileAccess.Write, FileShare.None, 1, FileOptions.None); @@ -3616,12 +3608,14 @@ internal static void LogNativeFSFactoryDebugInfo() SystemConsole.WriteLine($"******* HResult: {e.HResult}"); return; } + finally + { + lockStream?.Dispose(); + } try { // Try to get an exclusive lock on the file - this should throw an IOException with the current platform's HResult value for FileShare violation - using (var stream = new FileStream(fileName, FileMode.Open, FileAccess.Write, FileShare.None, 1, FileOptions.None)) - { - } + using var stream = new FileStream(fileName, FileMode.Open, FileAccess.Write, FileShare.None, 1, FileOptions.None); } catch (IOException io) when (io.HResult != 0) { @@ -3631,7 +3625,7 @@ internal static void LogNativeFSFactoryDebugInfo() } finally { - lockStream.Dispose(); + lockStream?.Dispose(); } } diff --git a/src/Lucene.Net.TestFramework/Util/TestRuleSetupAndRestoreClassEnv.cs b/src/Lucene.Net.TestFramework/Util/TestRuleSetupAndRestoreClassEnv.cs index 5363ed5ed6..43e6aabc12 100644 --- a/src/Lucene.Net.TestFramework/Util/TestRuleSetupAndRestoreClassEnv.cs +++ b/src/Lucene.Net.TestFramework/Util/TestRuleSetupAndRestoreClassEnv.cs @@ -61,10 +61,10 @@ namespace Lucene.Net.Util // we didn't port over the entire test suite from Java. internal sealed class TestRuleSetupAndRestoreClassEnv : AbstractBeforeAfterRule { - /// - /// Restore these system property values. - /// - private Dictionary restoreProperties = new Dictionary(); + ///// + ///// Restore these system property values. + ///// + //private Dictionary restoreProperties = new Dictionary(); // LUCENENET: Never read private Codec savedCodec; private CultureInfo savedLocale; @@ -242,7 +242,7 @@ public override void Before(LuceneTestCase testInstance) dvFormat = DocValuesFormat.ForName(LuceneTestCase.TestDocValuesFormat); } - codec = new Lucene46CodecAnonymousInnerClassHelper(this, format, dvFormat); + codec = new Lucene46CodecAnonymousInnerClassHelper(format, dvFormat); } else if ("SimpleText".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) || ("random".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) && randomVal == 9 && LuceneTestCase.Rarely(random) && !ShouldAvoidCodec("SimpleText"))) @@ -317,14 +317,11 @@ public override void Before(LuceneTestCase testInstance) private class Lucene46CodecAnonymousInnerClassHelper : Lucene46Codec { - private readonly TestRuleSetupAndRestoreClassEnv outerInstance; - - private PostingsFormat format; - private DocValuesFormat dvFormat; + private readonly PostingsFormat format; + private readonly DocValuesFormat dvFormat; - public Lucene46CodecAnonymousInnerClassHelper(TestRuleSetupAndRestoreClassEnv outerInstance, PostingsFormat format, DocValuesFormat dvFormat) + public Lucene46CodecAnonymousInnerClassHelper(PostingsFormat format, DocValuesFormat dvFormat) { - this.outerInstance = outerInstance; this.format = format; this.dvFormat = dvFormat; } @@ -353,9 +350,9 @@ private void CheckCodecRestrictions(Codec codec) { LuceneTestCase.AssumeFalse("Class not allowed to use codec: " + codec.Name + ".", ShouldAvoidCodec(codec.Name)); - if (codec is RandomCodec && avoidCodecs.Count > 0) + if (codec is RandomCodec randomCodec && avoidCodecs.Count > 0) { - foreach (string name in ((RandomCodec)codec).FormatNames) + foreach (string name in randomCodec.FormatNames) { LuceneTestCase.AssumeFalse("Class not allowed to use postings format: " + name + ".", ShouldAvoidCodec(name)); } diff --git a/src/Lucene.Net.TestFramework/Util/TestUtil.cs b/src/Lucene.Net.TestFramework/Util/TestUtil.cs index 2d83b15388..eb8728a172 100644 --- a/src/Lucene.Net.TestFramework/Util/TestUtil.cs +++ b/src/Lucene.Net.TestFramework/Util/TestUtil.cs @@ -134,29 +134,23 @@ public static void Unzip(Stream zipFileStream, DirectoryInfo destDir) Rm(destDir); destDir.Create(); - using (ZipArchive zip = new ZipArchive(zipFileStream)) + using ZipArchive zip = new ZipArchive(zipFileStream); + foreach (var entry in zip.Entries) { - foreach (var entry in zip.Entries) + // Ignore internal folders - these are tacked onto the FullName anyway + if (entry.FullName.EndsWith("/", StringComparison.Ordinal) || entry.FullName.EndsWith("\\", StringComparison.Ordinal)) { - // Ignore internal folders - these are tacked onto the FullName anyway - if (entry.FullName.EndsWith("/", StringComparison.Ordinal) || entry.FullName.EndsWith("\\", StringComparison.Ordinal)) - { - continue; - } - using (Stream input = entry.Open()) - { - FileInfo targetFile = new FileInfo(CorrectPath(Path.Combine(destDir.FullName, entry.FullName))); - if (!targetFile.Directory.Exists) - { - targetFile.Directory.Create(); - } - - using (Stream output = new FileStream(targetFile.FullName, FileMode.Create, FileAccess.Write)) - { - input.CopyTo(output); - } - } + continue; } + using Stream input = entry.Open(); + FileInfo targetFile = new FileInfo(CorrectPath(Path.Combine(destDir.FullName, entry.FullName))); + if (!targetFile.Directory.Exists) + { + targetFile.Directory.Create(); + } + + using Stream output = new FileStream(targetFile.FullName, FileMode.Create, FileAccess.Write); + input.CopyTo(output); } } @@ -180,9 +174,9 @@ public static void SyncConcurrentMerges(IndexWriter writer) public static void SyncConcurrentMerges(IMergeScheduler ms) { - if (ms is IConcurrentMergeScheduler) + if (ms is IConcurrentMergeScheduler concurrentMergeScheduler) { - ((IConcurrentMergeScheduler)ms).Sync(); + concurrentMergeScheduler.Sync(); } } @@ -942,9 +936,9 @@ public static string GetPostingsFormat(string field) public static string GetPostingsFormat(Codec codec, string field) { PostingsFormat p = codec.PostingsFormat; - if (p is PerFieldPostingsFormat) + if (p is PerFieldPostingsFormat perFieldPostingsFormat) { - return ((PerFieldPostingsFormat)p).GetPostingsFormatForField(field).Name; + return perFieldPostingsFormat.GetPostingsFormatForField(field).Name; } else { @@ -960,9 +954,9 @@ public static string GetDocValuesFormat(string field) public static string GetDocValuesFormat(Codec codec, string field) { DocValuesFormat f = codec.DocValuesFormat; - if (f is PerFieldDocValuesFormat) + if (f is PerFieldDocValuesFormat perFieldDocValuesFormat) { - return ((PerFieldDocValuesFormat)f).GetDocValuesFormatForField(field).Name; + return perFieldDocValuesFormat.GetDocValuesFormatForField(field).Name; } else { @@ -1004,24 +998,22 @@ public static void ReduceOpenFiles(IndexWriter w) { // keep number of open files lowish MergePolicy mp = w.Config.MergePolicy; - if (mp is LogMergePolicy) + if (mp is LogMergePolicy lmp) { - LogMergePolicy lmp = (LogMergePolicy)mp; lmp.MergeFactor = Math.Min(5, lmp.MergeFactor); lmp.NoCFSRatio = 1.0; } - else if (mp is TieredMergePolicy) + else if (mp is TieredMergePolicy tmp) { - TieredMergePolicy tmp = (TieredMergePolicy)mp; tmp.MaxMergeAtOnce = Math.Min(5, tmp.MaxMergeAtOnce); tmp.SegmentsPerTier = Math.Min(5, tmp.SegmentsPerTier); tmp.NoCFSRatio = 1.0; } IMergeScheduler ms = w.Config.MergeScheduler; - if (ms is IConcurrentMergeScheduler) + if (ms is IConcurrentMergeScheduler concurrentMergeScheduler) { // wtf... shouldnt it be even lower since its 1 by default?!?! - ((IConcurrentMergeScheduler)ms).SetMaxMergesAndThreads(3, 2); + concurrentMergeScheduler.SetMaxMergesAndThreads(3, 2); } } @@ -1068,10 +1060,10 @@ public static void AssertEquals(TopDocs expected, TopDocs actual) ScoreDoc actualSD = actual.ScoreDocs[hitIDX]; Assert.AreEqual(expectedSD.Doc, actualSD.Doc, "wrong hit docID"); Assert.AreEqual(expectedSD.Score, actualSD.Score, "wrong hit score"); - if (expectedSD is FieldDoc) + if (expectedSD is FieldDoc expectedFieldDoc) { Assert.IsTrue(actualSD is FieldDoc); - Assert.AreEqual(((FieldDoc)expectedSD).Fields, ((FieldDoc)actualSD).Fields, "wrong sort field values"); + Assert.AreEqual(expectedFieldDoc.Fields, ((FieldDoc)actualSD).Fields, "wrong sort field values"); } else { @@ -1271,9 +1263,9 @@ public static Regex RandomRegex(Random random) // LUCENENET specific - renamed f return p; } } -#pragma warning disable 168 +#pragma warning disable 168, IDE0059 catch (Exception ignored) -#pragma warning restore 168 +#pragma warning restore 168, IDE0059 { // Loop trying until we hit something that compiles. } diff --git a/src/Lucene.Net.TestFramework/Util/ThrottledIndexOutput.cs b/src/Lucene.Net.TestFramework/Util/ThrottledIndexOutput.cs index 15dd14f3c1..3d9bae3d78 100644 --- a/src/Lucene.Net.TestFramework/Util/ThrottledIndexOutput.cs +++ b/src/Lucene.Net.TestFramework/Util/ThrottledIndexOutput.cs @@ -30,12 +30,12 @@ public class ThrottledIndexOutput : IndexOutput { public const int DEFAULT_MIN_WRITTEN_BYTES = 1024; private readonly int bytesPerSecond; - private IndexOutput @delegate; - private long flushDelayMillis; - private long closeDelayMillis; - private long seekDelayMillis; + private readonly IndexOutput @delegate; // LUCENENET: marked readonly + private readonly long flushDelayMillis; // LUCENENET: marked readonly + private readonly long closeDelayMillis; // LUCENENET: marked readonly + private readonly long seekDelayMillis; // LUCENENET: marked readonly private long pendingBytes; - private long minBytesWritten; + private readonly long minBytesWritten; // LUCENENET: marked readonly private long timeElapsed; private readonly byte[] bytes = new byte[1]; @@ -86,7 +86,7 @@ protected override void Dispose(bool disposing) } finally { - @delegate.Dispose(); + @delegate?.Dispose(); // LUCENENET specific - only call if non-null } } } diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Compound/TestCompoundWordTokenFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Compound/TestCompoundWordTokenFilter.cs index 58d0c84ccb..d62b59d9d9 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Compound/TestCompoundWordTokenFilter.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Compound/TestCompoundWordTokenFilter.cs @@ -40,13 +40,11 @@ public virtual void TestHyphenationCompoundWordsDA() CharArraySet dict = makeDictionary("læse", "hest"); //InputSource @is = new InputSource(this.GetType().getResource("da_UTF8.xml").toExternalForm()); - using (var @is = this.GetType().getResourceAsStream("da_UTF8.xml")) - { - HyphenationTree hyphenator = HyphenationCompoundWordTokenFilter.GetHyphenationTree(@is); + using var @is = this.GetType().getResourceAsStream("da_UTF8.xml"); + HyphenationTree hyphenator = HyphenationCompoundWordTokenFilter.GetHyphenationTree(@is); - HyphenationCompoundWordTokenFilter tf = new HyphenationCompoundWordTokenFilter(TEST_VERSION_CURRENT, new MockTokenizer(new StringReader("min veninde som er lidt af en læsehest"), MockTokenizer.WHITESPACE, false), hyphenator, dict, CompoundWordTokenFilterBase.DEFAULT_MIN_WORD_SIZE, CompoundWordTokenFilterBase.DEFAULT_MIN_SUBWORD_SIZE, CompoundWordTokenFilterBase.DEFAULT_MAX_SUBWORD_SIZE, false); - AssertTokenStreamContents(tf, new string[] { "min", "veninde", "som", "er", "lidt", "af", "en", "læsehest", "læse", "hest" }, new int[] { 1, 1, 1, 1, 1, 1, 1, 1, 0, 0 }); - } + HyphenationCompoundWordTokenFilter tf = new HyphenationCompoundWordTokenFilter(TEST_VERSION_CURRENT, new MockTokenizer(new StringReader("min veninde som er lidt af en læsehest"), MockTokenizer.WHITESPACE, false), hyphenator, dict, CompoundWordTokenFilterBase.DEFAULT_MIN_WORD_SIZE, CompoundWordTokenFilterBase.DEFAULT_MIN_SUBWORD_SIZE, CompoundWordTokenFilterBase.DEFAULT_MAX_SUBWORD_SIZE, false); + AssertTokenStreamContents(tf, new string[] { "min", "veninde", "som", "er", "lidt", "af", "en", "læsehest", "læse", "hest" }, new int[] { 1, 1, 1, 1, 1, 1, 1, 1, 0, 0 }); } [Test] @@ -55,14 +53,12 @@ public virtual void TestHyphenationCompoundWordsDELongestMatch() CharArraySet dict = makeDictionary("basketball", "basket", "ball", "kurv"); //InputSource @is = new InputSource(this.GetType().getResource("da_UTF8.xml").toExternalForm()); - using (var @is = this.GetType().getResourceAsStream("da_UTF8.xml")) - { - HyphenationTree hyphenator = HyphenationCompoundWordTokenFilter.GetHyphenationTree(@is); + using var @is = this.GetType().getResourceAsStream("da_UTF8.xml"); + HyphenationTree hyphenator = HyphenationCompoundWordTokenFilter.GetHyphenationTree(@is); - // the word basket will not be added due to the longest match option - HyphenationCompoundWordTokenFilter tf = new HyphenationCompoundWordTokenFilter(TEST_VERSION_CURRENT, new MockTokenizer(new StringReader("basketballkurv"), MockTokenizer.WHITESPACE, false), hyphenator, dict, CompoundWordTokenFilterBase.DEFAULT_MIN_WORD_SIZE, CompoundWordTokenFilterBase.DEFAULT_MIN_SUBWORD_SIZE, 40, true); - AssertTokenStreamContents(tf, new string[] { "basketballkurv", "basketball", "ball", "kurv" }, new int[] { 1, 0, 0, 0 }); - } + // the word basket will not be added due to the longest match option + HyphenationCompoundWordTokenFilter tf = new HyphenationCompoundWordTokenFilter(TEST_VERSION_CURRENT, new MockTokenizer(new StringReader("basketballkurv"), MockTokenizer.WHITESPACE, false), hyphenator, dict, CompoundWordTokenFilterBase.DEFAULT_MIN_WORD_SIZE, CompoundWordTokenFilterBase.DEFAULT_MIN_SUBWORD_SIZE, 40, true); + AssertTokenStreamContents(tf, new string[] { "basketballkurv", "basketball", "ball", "kurv" }, new int[] { 1, 0, 0, 0 }); } /// @@ -73,25 +69,23 @@ public virtual void TestHyphenationCompoundWordsDELongestMatch() public virtual void TestHyphenationOnly() { //InputSource @is = new InputSource(this.GetType().getResource("da_UTF8.xml").toExternalForm()); - using (var @is = this.GetType().getResourceAsStream("da_UTF8.xml")) - { - HyphenationTree hyphenator = HyphenationCompoundWordTokenFilter.GetHyphenationTree(@is); + using var @is = this.GetType().getResourceAsStream("da_UTF8.xml"); + HyphenationTree hyphenator = HyphenationCompoundWordTokenFilter.GetHyphenationTree(@is); - HyphenationCompoundWordTokenFilter tf = new HyphenationCompoundWordTokenFilter(TEST_VERSION_CURRENT, new MockTokenizer(new StringReader("basketballkurv"), MockTokenizer.WHITESPACE, false), hyphenator, CompoundWordTokenFilterBase.DEFAULT_MIN_WORD_SIZE, 2, 4); + HyphenationCompoundWordTokenFilter tf = new HyphenationCompoundWordTokenFilter(TEST_VERSION_CURRENT, new MockTokenizer(new StringReader("basketballkurv"), MockTokenizer.WHITESPACE, false), hyphenator, CompoundWordTokenFilterBase.DEFAULT_MIN_WORD_SIZE, 2, 4); - // min=2, max=4 - AssertTokenStreamContents(tf, new string[] { "basketballkurv", "ba", "sket", "bal", "ball", "kurv" }); + // min=2, max=4 + AssertTokenStreamContents(tf, new string[] { "basketballkurv", "ba", "sket", "bal", "ball", "kurv" }); - tf = new HyphenationCompoundWordTokenFilter(TEST_VERSION_CURRENT, new MockTokenizer(new StringReader("basketballkurv"), MockTokenizer.WHITESPACE, false), hyphenator, CompoundWordTokenFilterBase.DEFAULT_MIN_WORD_SIZE, 4, 6); + tf = new HyphenationCompoundWordTokenFilter(TEST_VERSION_CURRENT, new MockTokenizer(new StringReader("basketballkurv"), MockTokenizer.WHITESPACE, false), hyphenator, CompoundWordTokenFilterBase.DEFAULT_MIN_WORD_SIZE, 4, 6); - // min=4, max=6 - AssertTokenStreamContents(tf, new string[] { "basketballkurv", "basket", "sket", "ball", "lkurv", "kurv" }); + // min=4, max=6 + AssertTokenStreamContents(tf, new string[] { "basketballkurv", "basket", "sket", "ball", "lkurv", "kurv" }); - tf = new HyphenationCompoundWordTokenFilter(TEST_VERSION_CURRENT, new MockTokenizer(new StringReader("basketballkurv"), MockTokenizer.WHITESPACE, false), hyphenator, CompoundWordTokenFilterBase.DEFAULT_MIN_WORD_SIZE, 4, 10); + tf = new HyphenationCompoundWordTokenFilter(TEST_VERSION_CURRENT, new MockTokenizer(new StringReader("basketballkurv"), MockTokenizer.WHITESPACE, false), hyphenator, CompoundWordTokenFilterBase.DEFAULT_MIN_WORD_SIZE, 4, 10); - // min=4, max=10 - AssertTokenStreamContents(tf, new string[] { "basketballkurv", "basket", "basketbal", "basketball", "sket", "sketbal", "sketball", "ball", "ballkurv", "lkurv", "kurv" }); - } + // min=4, max=10 + AssertTokenStreamContents(tf, new string[] { "basketballkurv", "basket", "basketbal", "basketball", "sket", "sketbal", "sketball", "ball", "ballkurv", "lkurv", "kurv" }); } [Test] @@ -260,17 +254,15 @@ public virtual void TestRandomStrings() CheckRandomData(Random, a, 1000 * RandomMultiplier); //InputSource @is = new InputSource(this.GetType().getResource("da_UTF8.xml").toExternalForm()); - using (var @is = this.GetType().getResourceAsStream("da_UTF8.xml")) + using var @is = this.GetType().getResourceAsStream("da_UTF8.xml"); + HyphenationTree hyphenator = HyphenationCompoundWordTokenFilter.GetHyphenationTree(@is); + Analyzer b = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => { - HyphenationTree hyphenator = HyphenationCompoundWordTokenFilter.GetHyphenationTree(@is); - Analyzer b = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => - { - Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); - TokenFilter filter = new HyphenationCompoundWordTokenFilter(TEST_VERSION_CURRENT, tokenizer, hyphenator); - return new TokenStreamComponents(tokenizer, filter); - }); - CheckRandomData(Random, b, 1000 * RandomMultiplier); - } + Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); + TokenFilter filter = new HyphenationCompoundWordTokenFilter(TEST_VERSION_CURRENT, tokenizer, hyphenator); + return new TokenStreamComponents(tokenizer, filter); + }); + CheckRandomData(Random, b, 1000 * RandomMultiplier); } [Test] @@ -285,18 +277,15 @@ public virtual void TestEmptyTerm() CheckOneTerm(a, "", ""); //InputSource @is = new InputSource(this.GetType().getResource("da_UTF8.xml").toExternalForm()); - using (var @is = this.GetType().getResourceAsStream("da_UTF8.xml")) + using var @is = this.GetType().getResourceAsStream("da_UTF8.xml"); + HyphenationTree hyphenator = HyphenationCompoundWordTokenFilter.GetHyphenationTree(@is); + Analyzer b = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => { - - HyphenationTree hyphenator = HyphenationCompoundWordTokenFilter.GetHyphenationTree(@is); - Analyzer b = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => - { - Tokenizer tokenizer = new KeywordTokenizer(reader); - TokenFilter filter = new HyphenationCompoundWordTokenFilter(TEST_VERSION_CURRENT, tokenizer, hyphenator); - return new TokenStreamComponents(tokenizer, filter); - }); - CheckOneTerm(b, "", ""); - } + Tokenizer tokenizer = new KeywordTokenizer(reader); + TokenFilter filter = new HyphenationCompoundWordTokenFilter(TEST_VERSION_CURRENT, tokenizer, hyphenator); + return new TokenStreamComponents(tokenizer, filter); + }); + CheckOneTerm(b, "", ""); } } } \ No newline at end of file diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestAllAnalyzersHaveFactories.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestAllAnalyzersHaveFactories.cs index e8a4e19cef..63d79cffd5 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestAllAnalyzersHaveFactories.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestAllAnalyzersHaveFactories.cs @@ -127,9 +127,9 @@ public virtual void Test() { instance = TokenizerFactory.ForName(simpleName, args); assertNotNull(instance); - if (instance is IResourceLoaderAware) + if (instance is IResourceLoaderAware resourceLoaderAware) { - ((IResourceLoaderAware)instance).Inform(loader); + resourceLoaderAware.Inform(loader); } assertSame(c, instance.Create(new StringReader("")).GetType()); } @@ -154,9 +154,9 @@ public virtual void Test() { instance = TokenFilterFactory.ForName(simpleName, args); assertNotNull(instance); - if (instance is IResourceLoaderAware) + if (instance is IResourceLoaderAware resourceLoaderAware) { - ((IResourceLoaderAware)instance).Inform(loader); + resourceLoaderAware.Inform(loader); } Type createdClazz = instance.Create(new KeywordTokenizer(new StringReader(""))).GetType(); // only check instance if factory have wrapped at all! @@ -186,9 +186,9 @@ public virtual void Test() { instance = CharFilterFactory.ForName(simpleName, args); assertNotNull(instance); - if (instance is IResourceLoaderAware) + if (instance is IResourceLoaderAware resourceLoaderAware) { - ((IResourceLoaderAware)instance).Inform(loader); + resourceLoaderAware.Inform(loader); } Type createdClazz = instance.Create(new StringReader("")).GetType(); // only check instance if factory have wrapped at all! diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestAnalyzers.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestAnalyzers.cs index 8a5d1b8d24..d7919fd390 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestAnalyzers.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestAnalyzers.cs @@ -244,15 +244,10 @@ public virtual void TestRandomHugeStrings() internal sealed class PayloadSetter : TokenFilter { - private void InitializeInstanceFields() - { - p = new BytesRef(data, 0, 1); - } - internal IPayloadAttribute payloadAtt; public PayloadSetter(TokenStream input) : base(input) { - InitializeInstanceFields(); + p = new BytesRef(data, 0, 1); payloadAtt = AddAttribute(); } diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestClassicAnalyzer.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestClassicAnalyzer.cs index 1c30cc8948..9997422930 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestClassicAnalyzer.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestClassicAnalyzer.cs @@ -281,68 +281,64 @@ public virtual void TestJava14BWCompatibility() [Test] public virtual void TestWickedLongTerm() { - using (RAMDirectory dir = new RAMDirectory()) - { - - char[] chars = new char[IndexWriter.MAX_TERM_LENGTH]; - Arrays.Fill(chars, 'x'); + using RAMDirectory dir = new RAMDirectory(); + char[] chars = new char[IndexWriter.MAX_TERM_LENGTH]; + Arrays.Fill(chars, 'x'); - string bigTerm = new string(chars); - Document doc = new Document(); + string bigTerm = new string(chars); + Document doc = new Document(); - using (IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new ClassicAnalyzer(TEST_VERSION_CURRENT)))) - { - // This produces a too-long term: - string contents = "abc xyz x" + bigTerm + " another term"; - doc.Add(new TextField("content", contents, Field.Store.NO)); - writer.AddDocument(doc); + using (IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new ClassicAnalyzer(TEST_VERSION_CURRENT)))) + { + // This produces a too-long term: + string contents = "abc xyz x" + bigTerm + " another term"; + doc.Add(new TextField("content", contents, Field.Store.NO)); + writer.AddDocument(doc); - // Make sure we can add another normal document - doc = new Document(); - doc.Add(new TextField("content", "abc bbb ccc", Field.Store.NO)); - writer.AddDocument(doc); - } + // Make sure we can add another normal document + doc = new Document(); + doc.Add(new TextField("content", "abc bbb ccc", Field.Store.NO)); + writer.AddDocument(doc); + } #pragma warning disable 612, 618 - using (IndexReader reader = IndexReader.Open(dir)) + using (IndexReader reader = IndexReader.Open(dir)) #pragma warning restore 612, 618 - { + { - // Make sure all terms < max size were indexed - assertEquals(2, reader.DocFreq(new Term("content", "abc"))); - assertEquals(1, reader.DocFreq(new Term("content", "bbb"))); - assertEquals(1, reader.DocFreq(new Term("content", "term"))); - assertEquals(1, reader.DocFreq(new Term("content", "another"))); + // Make sure all terms < max size were indexed + assertEquals(2, reader.DocFreq(new Term("content", "abc"))); + assertEquals(1, reader.DocFreq(new Term("content", "bbb"))); + assertEquals(1, reader.DocFreq(new Term("content", "term"))); + assertEquals(1, reader.DocFreq(new Term("content", "another"))); - // Make sure position is still incremented when - // massive term is skipped: - DocsAndPositionsEnum tps = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), "content", new BytesRef("another")); - assertTrue(tps.NextDoc() != DocIdSetIterator.NO_MORE_DOCS); - assertEquals(1, tps.Freq); - assertEquals(3, tps.NextPosition()); + // Make sure position is still incremented when + // massive term is skipped: + DocsAndPositionsEnum tps = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), "content", new BytesRef("another")); + assertTrue(tps.NextDoc() != DocIdSetIterator.NO_MORE_DOCS); + assertEquals(1, tps.Freq); + assertEquals(3, tps.NextPosition()); - // Make sure the doc that has the massive term is in - // the index: - assertEquals("document with wicked long term should is not in the index!", 2, reader.NumDocs); + // Make sure the doc that has the massive term is in + // the index: + assertEquals("document with wicked long term should is not in the index!", 2, reader.NumDocs); - } + } - // Make sure we can add a document with exactly the - // maximum length term, and search on that term: - doc = new Document(); - doc.Add(new TextField("content", bigTerm, Field.Store.NO)); - ClassicAnalyzer sa = new ClassicAnalyzer(TEST_VERSION_CURRENT); - sa.MaxTokenLength = 100000; - using (var writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, sa))) - { - writer.AddDocument(doc); - } + // Make sure we can add a document with exactly the + // maximum length term, and search on that term: + doc = new Document(); + doc.Add(new TextField("content", bigTerm, Field.Store.NO)); + ClassicAnalyzer sa = new ClassicAnalyzer(TEST_VERSION_CURRENT); + sa.MaxTokenLength = 100000; + using (var writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, sa))) + { + writer.AddDocument(doc); + } #pragma warning disable 612, 618 - using (var reader = IndexReader.Open(dir)) + using (var reader = IndexReader.Open(dir)) #pragma warning restore 612, 618 - { - assertEquals(1, reader.DocFreq(new Term("content", bigTerm))); - } - + { + assertEquals(1, reader.DocFreq(new Term("content", bigTerm))); } } diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs index 13b2cf322b..072d41d529 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs @@ -66,9 +66,9 @@ private void DoTestTokenizer(string tokenizer) // we managed to fully create an instance. check a few more things: // if it implements MultiTermAware, sanity check its impl - if (factory is IMultiTermAwareComponent) + if (factory is IMultiTermAwareComponent multiTermAwareComponent) { - AbstractAnalysisFactory mtc = ((IMultiTermAwareComponent)factory).GetMultiTermComponent(); + AbstractAnalysisFactory mtc = multiTermAwareComponent.GetMultiTermComponent(); assertNotNull(mtc); // its not ok to return e.g. a charfilter here: but a tokenizer could wrap a filter around it assertFalse(mtc is CharFilterFactory); @@ -89,9 +89,9 @@ private void DoTestTokenFilter(string tokenfilter) // we managed to fully create an instance. check a few more things: // if it implements MultiTermAware, sanity check its impl - if (factory is IMultiTermAwareComponent) + if (factory is IMultiTermAwareComponent multiTermAwareComponent) { - AbstractAnalysisFactory mtc = ((IMultiTermAwareComponent)factory).GetMultiTermComponent(); + AbstractAnalysisFactory mtc = multiTermAwareComponent.GetMultiTermComponent(); assertNotNull(mtc); // its not ok to return a charfilter or tokenizer here, this makes no sense assertTrue(mtc is TokenFilterFactory); @@ -112,9 +112,9 @@ private void DoTestCharFilter(string charfilter) // we managed to fully create an instance. check a few more things: // if it implements MultiTermAware, sanity check its impl - if (factory is IMultiTermAwareComponent) + if (factory is IMultiTermAwareComponent multiTermAwareComponent) { - AbstractAnalysisFactory mtc = ((IMultiTermAwareComponent)factory).GetMultiTermComponent(); + AbstractAnalysisFactory mtc = multiTermAwareComponent.GetMultiTermComponent(); assertNotNull(mtc); // its not ok to return a tokenizer or tokenfilter here, this makes no sense assertTrue(mtc is CharFilterFactory); @@ -131,7 +131,7 @@ private void DoTestCharFilter(string charfilter) /// /// tries to initialize a factory with no arguments - private AbstractAnalysisFactory Initialize(Type factoryClazz) + private static AbstractAnalysisFactory Initialize(Type factoryClazz) // LUCENENET: CA1822: Mark members as static { IDictionary args = new Dictionary { ["luceneMatchVersion"] = TEST_VERSION_CURRENT_STRING }; diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs index 0dce8c7fe1..b1d9c5d07a 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs @@ -88,6 +88,8 @@ public virtual bool Apply(object[] args) private static readonly IDictionary> brokenOffsetsConstructors = new Dictionary>(); internal static readonly ISet allowedTokenizerArgs, allowedTokenFilterArgs, allowedCharFilterArgs; + + [System.Diagnostics.CodeAnalysis.SuppressMessage("Performance", "CA1810:Initialize reference type static fields inline", Justification = "Complexity")] static TestRandomChains() { try @@ -336,9 +338,7 @@ private class AnonymousProducer : IArgProducer public AnonymousProducer(Func create) { - if (create == null) - throw new ArgumentNullException("create"); - this.create = create; + this.create = create ?? throw new ArgumentNullException(nameof(create)); } public object Create(Random random) @@ -502,22 +502,18 @@ private class DictionaryArgProducer : IArgProducer public object Create(Random random) { // TODO: make nastier - using (Stream affixStream = typeof(TestHunspellStemFilter).getResourceAsStream("simple.aff")) + using Stream affixStream = typeof(TestHunspellStemFilter).getResourceAsStream("simple.aff"); + using Stream dictStream = typeof(TestHunspellStemFilter).getResourceAsStream("simple.dic"); + try { - using (Stream dictStream = typeof(TestHunspellStemFilter).getResourceAsStream("simple.dic")) - { - try - { - return new Dictionary(affixStream, dictStream); - } - catch (Exception /*ex*/) - { - throw; // LUCENENET: CA2200: Rethrow to preserve stack details (https://docs.microsoft.com/en-us/visualstudio/code-quality/ca2200-rethrow-to-preserve-stack-details) + return new Dictionary(affixStream, dictStream); + } + catch (Exception /*ex*/) + { + throw; // LUCENENET: CA2200: Rethrow to preserve stack details (https://docs.microsoft.com/en-us/visualstudio/code-quality/ca2200-rethrow-to-preserve-stack-details) #pragma warning disable 162 - return null; // unreachable code + return null; // unreachable code #pragma warning restore 162 - } - } } } } @@ -553,11 +549,9 @@ public object Create(Random random) // TODO: make nastier try { - using (Stream @is = typeof(TestCompoundWordTokenFilter).getResourceAsStream("da_UTF8.xml")) - { - HyphenationTree hyphenator = HyphenationCompoundWordTokenFilter.GetHyphenationTree(@is); - return hyphenator; - } + using Stream @is = typeof(TestCompoundWordTokenFilter).getResourceAsStream("da_UTF8.xml"); + HyphenationTree hyphenator = HyphenationCompoundWordTokenFilter.GetHyphenationTree(@is); + return hyphenator; } catch (Exception /*ex*/) { @@ -673,12 +667,12 @@ public object Create(Random random) do { input = TestUtil.RandomRealisticUnicodeString(random); - } while (input == string.Empty); + } while (input.Length == 0); // LUCENENET: CA1820: Test for empty strings using string length string @out = ""; TestUtil.RandomSimpleString(random); do { @out = TestUtil.RandomRealisticUnicodeString(random); - } while (@out == string.Empty); + } while (@out.Length == 0); // LUCENENET: CA1820: Test for empty strings using string length builder.Add(input, @out); } try @@ -923,15 +917,15 @@ private T CreateComponent(ConstructorInfo ctor, object[] args, StringBuilder //{ // Rethrow.rethrow(ie); //} - return default(T); // no success + return default; // no success } - private bool Broken(ConstructorInfo ctor, object[] args) + private static bool Broken(ConstructorInfo ctor, object[] args) // LUCENENET: CA1822: Mark members as static { return brokenConstructors.TryGetValue(ctor, out IPredicate pred) && pred != null && pred.Apply(args); } - private bool BrokenOffsets(ConstructorInfo ctor, object[] args) + private static bool BrokenOffsets(ConstructorInfo ctor, object[] args) // LUCENENET: CA1822: Mark members as static { return brokenOffsetsConstructors.TryGetValue(ctor, out IPredicate pred) && pred != null && pred.Apply(args); } diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries.cs index c10147ab75..8773f5139a 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries.cs @@ -170,32 +170,24 @@ public virtual void Test() FileInfo f = new FileInfo(System.IO.Path.Combine(DICTIONARY_HOME.FullName, tests[i])); if (Debugging.AssertsEnabled) Debugging.Assert(f.Exists); - using (Stream fileStream = f.OpenRead()) - { - using (ZipArchive zip = new ZipArchive(fileStream, ZipArchiveMode.Read, false, Encoding.UTF8)) - { - ZipArchiveEntry dicEntry = zip.GetEntry(tests[i + 1]); - if (Debugging.AssertsEnabled) Debugging.Assert(dicEntry != null); - ZipArchiveEntry affEntry = zip.GetEntry(tests[i + 2]); - if (Debugging.AssertsEnabled) Debugging.Assert(affEntry != null); + using Stream fileStream = f.OpenRead(); + using ZipArchive zip = new ZipArchive(fileStream, ZipArchiveMode.Read, false, Encoding.UTF8); + ZipArchiveEntry dicEntry = zip.GetEntry(tests[i + 1]); + if (Debugging.AssertsEnabled) Debugging.Assert(dicEntry != null); + ZipArchiveEntry affEntry = zip.GetEntry(tests[i + 2]); + if (Debugging.AssertsEnabled) Debugging.Assert(affEntry != null); - using (Stream dictionary = dicEntry.Open()) - { - using (Stream affix = affEntry.Open()) - { - Dictionary dic = new Dictionary(affix, dictionary); - Console.WriteLine(tests[i] + "\t" + RamUsageEstimator.HumanSizeOf(dic) + "\t(" + - "words=" + RamUsageEstimator.HumanSizeOf(dic.words) + ", " + - "flags=" + RamUsageEstimator.HumanSizeOf(dic.flagLookup) + ", " + - "strips=" + RamUsageEstimator.HumanSizeOf(dic.stripData) + ", " + - "conditions=" + RamUsageEstimator.HumanSizeOf(dic.patterns) + ", " + - "affixData=" + RamUsageEstimator.HumanSizeOf(dic.affixData) + ", " + - "prefixes=" + RamUsageEstimator.HumanSizeOf(dic.prefixes) + ", " + - "suffixes=" + RamUsageEstimator.HumanSizeOf(dic.suffixes) + ")"); - } - } - } - } + using Stream dictionary = dicEntry.Open(); + using Stream affix = affEntry.Open(); + Dictionary dic = new Dictionary(affix, dictionary); + Console.WriteLine(tests[i] + "\t" + RamUsageEstimator.HumanSizeOf(dic) + "\t(" + + "words=" + RamUsageEstimator.HumanSizeOf(dic.words) + ", " + + "flags=" + RamUsageEstimator.HumanSizeOf(dic.flagLookup) + ", " + + "strips=" + RamUsageEstimator.HumanSizeOf(dic.stripData) + ", " + + "conditions=" + RamUsageEstimator.HumanSizeOf(dic.patterns) + ", " + + "affixData=" + RamUsageEstimator.HumanSizeOf(dic.affixData) + ", " + + "prefixes=" + RamUsageEstimator.HumanSizeOf(dic.prefixes) + ", " + + "suffixes=" + RamUsageEstimator.HumanSizeOf(dic.suffixes) + ")"); } } @@ -210,25 +202,16 @@ public virtual void TestOneDictionary() FileInfo f = new FileInfo(System.IO.Path.Combine(DICTIONARY_HOME.FullName, tests[i])); if (Debugging.AssertsEnabled) Debugging.Assert(f.Exists); - using (Stream fileStream = f.OpenRead()) - { - using (ZipArchive zip = new ZipArchive(fileStream, ZipArchiveMode.Read, false, Encoding.UTF8)) - { - ZipArchiveEntry dicEntry = zip.GetEntry(tests[i + 1]); - if (Debugging.AssertsEnabled) Debugging.Assert(dicEntry != null); - ZipArchiveEntry affEntry = zip.GetEntry(tests[i + 2]); - if (Debugging.AssertsEnabled) Debugging.Assert(affEntry != null); - - using (Stream dictionary = dicEntry.Open()) - { - using (Stream affix = affEntry.Open()) - { - new Dictionary(affix, dictionary); - } - } + using Stream fileStream = f.OpenRead(); + using ZipArchive zip = new ZipArchive(fileStream, ZipArchiveMode.Read, false, Encoding.UTF8); + ZipArchiveEntry dicEntry = zip.GetEntry(tests[i + 1]); + if (Debugging.AssertsEnabled) Debugging.Assert(dicEntry != null); + ZipArchiveEntry affEntry = zip.GetEntry(tests[i + 2]); + if (Debugging.AssertsEnabled) Debugging.Assert(affEntry != null); - } - } + using Stream dictionary = dicEntry.Open(); + using Stream affix = affEntry.Open(); + new Dictionary(affix, dictionary); } } } diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries2.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries2.cs index a1ac6b5abc..1568b3b06b 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries2.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestAllDictionaries2.cs @@ -186,32 +186,24 @@ public virtual void Test() FileInfo f = new FileInfo(System.IO.Path.Combine(DICTIONARY_HOME.FullName, tests[i])); if (Debugging.AssertsEnabled) Debugging.Assert(f.Exists); - using (Stream fileStream = f.OpenRead()) - { - using (ZipArchive zip = new ZipArchive(fileStream, ZipArchiveMode.Read, false, Encoding.UTF8)) - { - ZipArchiveEntry dicEntry = zip.GetEntry(tests[i + 1]); - if (Debugging.AssertsEnabled) Debugging.Assert(dicEntry != null); - ZipArchiveEntry affEntry = zip.GetEntry(tests[i + 2]); - if (Debugging.AssertsEnabled) Debugging.Assert(affEntry != null); + using Stream fileStream = f.OpenRead(); + using ZipArchive zip = new ZipArchive(fileStream, ZipArchiveMode.Read, false, Encoding.UTF8); + ZipArchiveEntry dicEntry = zip.GetEntry(tests[i + 1]); + if (Debugging.AssertsEnabled) Debugging.Assert(dicEntry != null); + ZipArchiveEntry affEntry = zip.GetEntry(tests[i + 2]); + if (Debugging.AssertsEnabled) Debugging.Assert(affEntry != null); - using (Stream dictionary = dicEntry.Open()) - { - using (Stream affix = affEntry.Open()) - { - Dictionary dic = new Dictionary(affix, dictionary); - Console.WriteLine(tests[i] + "\t" + RamUsageEstimator.HumanSizeOf(dic) + "\t(" + - "words=" + RamUsageEstimator.HumanSizeOf(dic.words) + ", " + - "flags=" + RamUsageEstimator.HumanSizeOf(dic.flagLookup) + ", " + - "strips=" + RamUsageEstimator.HumanSizeOf(dic.stripData) + ", " + - "conditions=" + RamUsageEstimator.HumanSizeOf(dic.patterns) + ", " + - "affixData=" + RamUsageEstimator.HumanSizeOf(dic.affixData) + ", " + - "prefixes=" + RamUsageEstimator.HumanSizeOf(dic.prefixes) + ", " + - "suffixes=" + RamUsageEstimator.HumanSizeOf(dic.suffixes) + ")"); - } - } - } - } + using Stream dictionary = dicEntry.Open(); + using Stream affix = affEntry.Open(); + Dictionary dic = new Dictionary(affix, dictionary); + Console.WriteLine(tests[i] + "\t" + RamUsageEstimator.HumanSizeOf(dic) + "\t(" + + "words=" + RamUsageEstimator.HumanSizeOf(dic.words) + ", " + + "flags=" + RamUsageEstimator.HumanSizeOf(dic.flagLookup) + ", " + + "strips=" + RamUsageEstimator.HumanSizeOf(dic.stripData) + ", " + + "conditions=" + RamUsageEstimator.HumanSizeOf(dic.patterns) + ", " + + "affixData=" + RamUsageEstimator.HumanSizeOf(dic.affixData) + ", " + + "prefixes=" + RamUsageEstimator.HumanSizeOf(dic.prefixes) + ", " + + "suffixes=" + RamUsageEstimator.HumanSizeOf(dic.suffixes) + ")"); } } @@ -228,25 +220,16 @@ public virtual void TestOneDictionary() FileInfo f = new FileInfo(System.IO.Path.Combine(DICTIONARY_HOME.FullName, tests[i])); if (Debugging.AssertsEnabled) Debugging.Assert(f.Exists); - using (Stream fileStream = f.OpenRead()) - { - using (ZipArchive zip = new ZipArchive(fileStream, ZipArchiveMode.Read, false, Encoding.UTF8)) - { - ZipArchiveEntry dicEntry = zip.GetEntry(tests[i + 1]); - if (Debugging.AssertsEnabled) Debugging.Assert(dicEntry != null); - ZipArchiveEntry affEntry = zip.GetEntry(tests[i + 2]); - if (Debugging.AssertsEnabled) Debugging.Assert(affEntry != null); - - using (Stream dictionary = dicEntry.Open()) - { - using (Stream affix = affEntry.Open()) - { - new Dictionary(affix, dictionary); - } - } + using Stream fileStream = f.OpenRead(); + using ZipArchive zip = new ZipArchive(fileStream, ZipArchiveMode.Read, false, Encoding.UTF8); + ZipArchiveEntry dicEntry = zip.GetEntry(tests[i + 1]); + if (Debugging.AssertsEnabled) Debugging.Assert(dicEntry != null); + ZipArchiveEntry affEntry = zip.GetEntry(tests[i + 2]); + if (Debugging.AssertsEnabled) Debugging.Assert(affEntry != null); - } - } + using Stream dictionary = dicEntry.Open(); + using Stream affix = affEntry.Open(); + new Dictionary(affix, dictionary); } } } diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestDictionary.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestDictionary.cs index 65a7f34836..33123e7f32 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestDictionary.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestDictionary.cs @@ -32,112 +32,88 @@ public class TestDictionary : LuceneTestCase [Test] public virtual void TestSimpleDictionary() { - using (System.IO.Stream affixStream = this.GetType().getResourceAsStream("simple.aff")) - { - using (System.IO.Stream dictStream = this.GetType().getResourceAsStream("simple.dic")) - { - - Dictionary dictionary = new Dictionary(affixStream, dictStream); - assertEquals(3, dictionary.LookupSuffix(new char[] { 'e' }, 0, 1).Length); - assertEquals(1, dictionary.LookupPrefix(new char[] { 's' }, 0, 1).Length); - Int32sRef ordList = dictionary.LookupWord(new char[] { 'o', 'l', 'r' }, 0, 3); - assertNotNull(ordList); - assertEquals(1, ordList.Length); - - BytesRef @ref = new BytesRef(); - dictionary.flagLookup.Get(ordList.Int32s[0], @ref); - char[] flags = Dictionary.DecodeFlags(@ref); - assertEquals(1, flags.Length); - - ordList = dictionary.LookupWord(new char[] { 'l', 'u', 'c', 'e', 'n' }, 0, 5); - assertNotNull(ordList); - assertEquals(1, ordList.Length); - dictionary.flagLookup.Get(ordList.Int32s[0], @ref); - flags = Dictionary.DecodeFlags(@ref); - assertEquals(1, flags.Length); - } - } + using Stream affixStream = this.GetType().getResourceAsStream("simple.aff"); + using Stream dictStream = this.GetType().getResourceAsStream("simple.dic"); + Dictionary dictionary = new Dictionary(affixStream, dictStream); + assertEquals(3, dictionary.LookupSuffix(new char[] { 'e' }, 0, 1).Length); + assertEquals(1, dictionary.LookupPrefix(new char[] { 's' }, 0, 1).Length); + Int32sRef ordList = dictionary.LookupWord(new char[] { 'o', 'l', 'r' }, 0, 3); + assertNotNull(ordList); + assertEquals(1, ordList.Length); + + BytesRef @ref = new BytesRef(); + dictionary.flagLookup.Get(ordList.Int32s[0], @ref); + char[] flags = Dictionary.DecodeFlags(@ref); + assertEquals(1, flags.Length); + + ordList = dictionary.LookupWord(new char[] { 'l', 'u', 'c', 'e', 'n' }, 0, 5); + assertNotNull(ordList); + assertEquals(1, ordList.Length); + dictionary.flagLookup.Get(ordList.Int32s[0], @ref); + flags = Dictionary.DecodeFlags(@ref); + assertEquals(1, flags.Length); } [Test] public virtual void TestCompressedDictionary() { - using (System.IO.Stream affixStream = this.GetType().getResourceAsStream("compressed.aff")) - { - using (System.IO.Stream dictStream = this.GetType().getResourceAsStream("compressed.dic")) - { - - Dictionary dictionary = new Dictionary(affixStream, dictStream); - assertEquals(3, dictionary.LookupSuffix(new char[] { 'e' }, 0, 1).Length); - assertEquals(1, dictionary.LookupPrefix(new char[] { 's' }, 0, 1).Length); - Int32sRef ordList = dictionary.LookupWord(new char[] { 'o', 'l', 'r' }, 0, 3); - BytesRef @ref = new BytesRef(); - dictionary.flagLookup.Get(ordList.Int32s[0], @ref); - char[] flags = Dictionary.DecodeFlags(@ref); - assertEquals(1, flags.Length); - } - } + using Stream affixStream = this.GetType().getResourceAsStream("compressed.aff"); + using Stream dictStream = this.GetType().getResourceAsStream("compressed.dic"); + Dictionary dictionary = new Dictionary(affixStream, dictStream); + assertEquals(3, dictionary.LookupSuffix(new char[] { 'e' }, 0, 1).Length); + assertEquals(1, dictionary.LookupPrefix(new char[] { 's' }, 0, 1).Length); + Int32sRef ordList = dictionary.LookupWord(new char[] { 'o', 'l', 'r' }, 0, 3); + BytesRef @ref = new BytesRef(); + dictionary.flagLookup.Get(ordList.Int32s[0], @ref); + char[] flags = Dictionary.DecodeFlags(@ref); + assertEquals(1, flags.Length); } [Test] public virtual void TestCompressedBeforeSetDictionary() { - using (System.IO.Stream affixStream = this.GetType().getResourceAsStream("compressed-before-set.aff")) - { - using (System.IO.Stream dictStream = this.GetType().getResourceAsStream("compressed.dic")) - { - - Dictionary dictionary = new Dictionary(affixStream, dictStream); - assertEquals(3, dictionary.LookupSuffix(new char[] { 'e' }, 0, 1).Length); - assertEquals(1, dictionary.LookupPrefix(new char[] { 's' }, 0, 1).Length); - Int32sRef ordList = dictionary.LookupWord(new char[] { 'o', 'l', 'r' }, 0, 3); - BytesRef @ref = new BytesRef(); - dictionary.flagLookup.Get(ordList.Int32s[0], @ref); - char[] flags = Dictionary.DecodeFlags(@ref); - assertEquals(1, flags.Length); - } - } + using Stream affixStream = this.GetType().getResourceAsStream("compressed-before-set.aff"); + using Stream dictStream = this.GetType().getResourceAsStream("compressed.dic"); + Dictionary dictionary = new Dictionary(affixStream, dictStream); + assertEquals(3, dictionary.LookupSuffix(new char[] { 'e' }, 0, 1).Length); + assertEquals(1, dictionary.LookupPrefix(new char[] { 's' }, 0, 1).Length); + Int32sRef ordList = dictionary.LookupWord(new char[] { 'o', 'l', 'r' }, 0, 3); + BytesRef @ref = new BytesRef(); + dictionary.flagLookup.Get(ordList.Int32s[0], @ref); + char[] flags = Dictionary.DecodeFlags(@ref); + assertEquals(1, flags.Length); } [Test] public virtual void TestCompressedEmptyAliasDictionary() { - using (System.IO.Stream affixStream = this.GetType().getResourceAsStream("compressed-empty-alias.aff")) - { - using (System.IO.Stream dictStream = this.GetType().getResourceAsStream("compressed.dic")) - { - Dictionary dictionary = new Dictionary(affixStream, dictStream); - assertEquals(3, dictionary.LookupSuffix(new char[] { 'e' }, 0, 1).Length); - assertEquals(1, dictionary.LookupPrefix(new char[] { 's' }, 0, 1).Length); - Int32sRef ordList = dictionary.LookupWord(new char[] { 'o', 'l', 'r' }, 0, 3); - BytesRef @ref = new BytesRef(); - dictionary.flagLookup.Get(ordList.Int32s[0], @ref); - char[] flags = Dictionary.DecodeFlags(@ref); - assertEquals(1, flags.Length); - } - } + using Stream affixStream = this.GetType().getResourceAsStream("compressed-empty-alias.aff"); + using Stream dictStream = this.GetType().getResourceAsStream("compressed.dic"); + Dictionary dictionary = new Dictionary(affixStream, dictStream); + assertEquals(3, dictionary.LookupSuffix(new char[] { 'e' }, 0, 1).Length); + assertEquals(1, dictionary.LookupPrefix(new char[] { 's' }, 0, 1).Length); + Int32sRef ordList = dictionary.LookupWord(new char[] { 'o', 'l', 'r' }, 0, 3); + BytesRef @ref = new BytesRef(); + dictionary.flagLookup.Get(ordList.Int32s[0], @ref); + char[] flags = Dictionary.DecodeFlags(@ref); + assertEquals(1, flags.Length); } // malformed rule causes ParseException [Test] public virtual void TestInvalidData() { - using (System.IO.Stream affixStream = this.GetType().getResourceAsStream("broken.aff")) + using Stream affixStream = this.GetType().getResourceAsStream("broken.aff"); + using Stream dictStream = this.GetType().getResourceAsStream("simple.dic"); + try + { + new Dictionary(affixStream, dictStream); + fail("didn't get expected exception"); + } + catch (Exception expected) { - using (System.IO.Stream dictStream = this.GetType().getResourceAsStream("simple.dic")) - { - - try - { - new Dictionary(affixStream, dictStream); - fail("didn't get expected exception"); - } - catch (Exception expected) - { - assertTrue(expected.Message.StartsWith("The affix file contains a rule with less than four elements", StringComparison.Ordinal)); - //assertEquals(24, expected.ErrorOffset); // No parse exception in LUCENENET - } - } + assertTrue(expected.Message.StartsWith("The affix file contains a rule with less than four elements", StringComparison.Ordinal)); + //assertEquals(24, expected.ErrorOffset); // No parse exception in LUCENENET } } @@ -145,20 +121,16 @@ public virtual void TestInvalidData() [Test] public virtual void TestInvalidFlags() { - using (System.IO.Stream affixStream = this.GetType().getResourceAsStream("broken-flags.aff")) + using Stream affixStream = this.GetType().getResourceAsStream("broken-flags.aff"); + using Stream dictStream = this.GetType().getResourceAsStream("simple.dic"); + try + { + new Dictionary(affixStream, dictStream); + fail("didn't get expected exception"); + } + catch (Exception expected) { - using (System.IO.Stream dictStream = this.GetType().getResourceAsStream("simple.dic")) - { - try - { - new Dictionary(affixStream, dictStream); - fail("didn't get expected exception"); - } - catch (Exception expected) - { - assertTrue(expected.Message.StartsWith("expected only one flag", StringComparison.Ordinal)); - } - } + assertTrue(expected.Message.StartsWith("expected only one flag", StringComparison.Ordinal)); } } diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/BaseTokenStreamFactoryTestCase.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/BaseTokenStreamFactoryTestCase.cs index d37208f106..8d965ac4bd 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/BaseTokenStreamFactoryTestCase.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/BaseTokenStreamFactoryTestCase.cs @@ -77,18 +77,18 @@ private AbstractAnalysisFactory AnalysisFactory(Type clazz, Version matchVersion catch (TargetInvocationException e) { // to simplify tests that check for illegal parameters - if (e.InnerException is ArgumentException) + if (e.InnerException is ArgumentException argumentException) { - throw (ArgumentException)e.InnerException; + throw argumentException; } else { throw; // LUCENENET: CA2200: Rethrow to preserve stack details (https://docs.microsoft.com/en-us/visualstudio/code-quality/ca2200-rethrow-to-preserve-stack-details) } } - if (factory is IResourceLoaderAware) + if (factory is IResourceLoaderAware resourceLoaderAware) { - ((IResourceLoaderAware)factory).Inform(loader); + resourceLoaderAware.Inform(loader); } return factory; } diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestFilesystemResourceLoader.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestFilesystemResourceLoader.cs index dfbbe3a7a0..ec29080abf 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestFilesystemResourceLoader.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestFilesystemResourceLoader.cs @@ -58,14 +58,12 @@ private void assertClasspathDelegation(IResourceLoader rl) string englishStopFile = "english_stop.txt"; var file = CreateTempFile(System.IO.Path.GetFileNameWithoutExtension(englishStopFile), System.IO.Path.GetExtension(englishStopFile)); using (var stream = typeof(Snowball.SnowballFilter).FindAndGetManifestResourceStream(englishStopFile)) + using (var outputStream = new FileStream(file.FullName, FileMode.OpenOrCreate, FileAccess.Write)) { - using (var outputStream = new FileStream(file.FullName, FileMode.OpenOrCreate, FileAccess.Write)) - { - stream.CopyTo(outputStream); - } + stream.CopyTo(outputStream); } // try a stopwords file from classpath - CharArraySet set = WordlistLoader.GetSnowballWordSet(new System.IO.StreamReader(rl.OpenResource(file.FullName), Encoding.UTF8), TEST_VERSION_CURRENT); + CharArraySet set = WordlistLoader.GetSnowballWordSet(new StreamReader(rl.OpenResource(file.FullName), Encoding.UTF8), TEST_VERSION_CURRENT); assertTrue(set.contains("you")); // try to load a class; we use string comparison because classloader may be different... assertEquals("Lucene.Net.Analysis.Util.RollingCharBuffer", rl.NewInstance("Lucene.Net.Analysis.Util.RollingCharBuffer").ToString()); @@ -79,7 +77,7 @@ public virtual void TestBaseDir() DirectoryInfo @base = CreateTempDir("fsResourceLoaderBase"); try { - TextWriter os = new System.IO.StreamWriter(new System.IO.FileStream(System.IO.Path.Combine(@base.FullName, "template.txt"), System.IO.FileMode.Create, System.IO.FileAccess.Write), Encoding.UTF8); + TextWriter os = new StreamWriter(new FileStream(System.IO.Path.Combine(@base.FullName, "template.txt"), FileMode.Create, FileAccess.Write), Encoding.UTF8); try { os.Write("foobar\n"); diff --git a/src/Lucene.Net.Tests.Analysis.Common/Collation/TestCollationKeyAnalyzer.cs b/src/Lucene.Net.Tests.Analysis.Common/Collation/TestCollationKeyAnalyzer.cs index 1d68aca786..a621b4317f 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Collation/TestCollationKeyAnalyzer.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Collation/TestCollationKeyAnalyzer.cs @@ -32,11 +32,6 @@ namespace Lucene.Net.Collation public class TestCollationKeyAnalyzer : CollationTestBase { public TestCollationKeyAnalyzer() - { - InitializeInstanceFields(); - } - - private void InitializeInstanceFields() { this.analyzer = new CollationKeyAnalyzer(LuceneTestCase.TEST_VERSION_CURRENT, this.collator); this.firstRangeBeginning = new BytesRef(this.collator.GetSortKey(this.FirstRangeBeginningOriginal).KeyData); @@ -153,7 +148,7 @@ private Collator GetCollator(params string[] localeNames) .Select(x => new Locale(x)) .FirstOrDefault(x => availableCollationLocales.Contains(x.Id)); - if (firstAvailableLocale == default(Locale)) + if (firstAvailableLocale == default) throw new ArgumentException($"None of the locales are available: {string.Join(", ", localeNames)}"); Collator collator = Collator.Create(firstAvailableLocale.Id); diff --git a/src/Lucene.Net.Tests.Analysis.Common/Collation/TestCollationKeyFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Collation/TestCollationKeyFilter.cs index 3224cf4d01..c40476a1ff 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Collation/TestCollationKeyFilter.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Collation/TestCollationKeyFilter.cs @@ -34,11 +34,6 @@ namespace Lucene.Net.Collation public class TestCollationKeyFilter : CollationTestBase { public TestCollationKeyFilter() - { - InitializeInstanceFields(); - } - - private void InitializeInstanceFields() { this.analyzer = new TestAnalyzer(this, this.collator); this.firstRangeBeginning = new BytesRef(this.EncodeCollationKey(this.collator.GetSortKey(this.FirstRangeBeginningOriginal).KeyData.ToSByteArray())); @@ -125,7 +120,7 @@ private Collator GetCollator(params string[] localeNames) .Select(x => new Locale(x)) .FirstOrDefault(x => availableCollationLocales.Contains(x.Id)); - if (firstMatchingLocale == default(Locale)) + if (firstMatchingLocale == default) { throw new ArgumentException($"Could not find a collator locale matching any of the following: {string.Join(", ", localeNames)}"); } diff --git a/src/Lucene.Net.Tests.Analysis.Common/Collation/TestCollationKeyFilterFactory.cs b/src/Lucene.Net.Tests.Analysis.Common/Collation/TestCollationKeyFilterFactory.cs index f952e95eee..3ba9abd366 100644 --- a/src/Lucene.Net.Tests.Analysis.Common/Collation/TestCollationKeyFilterFactory.cs +++ b/src/Lucene.Net.Tests.Analysis.Common/Collation/TestCollationKeyFilterFactory.cs @@ -107,7 +107,7 @@ public virtual void TestCustomRules() var allRules = RuleBasedCollator.GetAvailableCollationLocales(); var localeToUse = possiblelocales.FirstOrDefault(locl => allRules.Contains(locl.Id)); - Assert.True(localeToUse != default(Locale), "Should have found a matching collation locale given the two locales to use."); + Assert.True(localeToUse != default, "Should have found a matching collation locale given the two locales to use."); const string DIN5007_2_tailorings = "& ae , a\u0308 & AE , A\u0308" + "& oe , o\u0308 & OE , O\u0308" + "& ue , u\u0308 & UE , u\u0308"; var collationRules = Collator.GetCollationRules(localeToUse.Id); diff --git a/src/Lucene.Net.Tests.Analysis.ICU/Analysis/Icu/Segmentation/TestICUTokenizer.cs b/src/Lucene.Net.Tests.Analysis.ICU/Analysis/Icu/Segmentation/TestICUTokenizer.cs index 336aef829f..73c21acf16 100644 --- a/src/Lucene.Net.Tests.Analysis.ICU/Analysis/Icu/Segmentation/TestICUTokenizer.cs +++ b/src/Lucene.Net.Tests.Analysis.ICU/Analysis/Icu/Segmentation/TestICUTokenizer.cs @@ -338,19 +338,17 @@ public void TestRandomHugeStrings() [Test] public void TestTokenAttributes() { - using (TokenStream ts = a.GetTokenStream("dummy", "This is a test")) + using TokenStream ts = a.GetTokenStream("dummy", "This is a test"); + IScriptAttribute scriptAtt = ts.AddAttribute(); + ts.Reset(); + while (ts.IncrementToken()) { - IScriptAttribute scriptAtt = ts.AddAttribute(); - ts.Reset(); - while (ts.IncrementToken()) - { - assertEquals(UScript.Latin, scriptAtt.Code); - assertEquals(UScript.GetName(UScript.Latin), scriptAtt.GetName()); - assertEquals(UScript.GetShortName(UScript.Latin), scriptAtt.GetShortName()); - assertTrue(ts.ReflectAsString(false).Contains("script=Latin")); - } - ts.End(); + assertEquals(UScript.Latin, scriptAtt.Code); + assertEquals(UScript.GetName(UScript.Latin), scriptAtt.GetName()); + assertEquals(UScript.GetShortName(UScript.Latin), scriptAtt.GetShortName()); + assertTrue(ts.ReflectAsString(false).Contains("script=Latin")); } + ts.End(); } private class ThreadAnonymousHelper : ThreadJob diff --git a/src/Lucene.Net.Tests.Analysis.ICU/Analysis/Icu/TestICUFoldingFilter.cs b/src/Lucene.Net.Tests.Analysis.ICU/Analysis/Icu/TestICUFoldingFilter.cs index d3f94d1177..9bad6ade15 100644 --- a/src/Lucene.Net.Tests.Analysis.ICU/Analysis/Icu/TestICUFoldingFilter.cs +++ b/src/Lucene.Net.Tests.Analysis.ICU/Analysis/Icu/TestICUFoldingFilter.cs @@ -99,14 +99,12 @@ public void TestRandomStrings() [Test] public void TestEmptyTerm() { - using (Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => + using Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => { Tokenizer tokenizer = new KeywordTokenizer(reader); return new TokenStreamComponents(tokenizer, new ICUFoldingFilter(tokenizer)); - })) - { - CheckOneTerm(a, "", ""); - } + }); + CheckOneTerm(a, "", ""); } } } \ No newline at end of file diff --git a/src/Lucene.Net.Tests.Analysis.ICU/Analysis/Icu/TestICUNormalizer2CharFilter.cs b/src/Lucene.Net.Tests.Analysis.ICU/Analysis/Icu/TestICUNormalizer2CharFilter.cs index c929e010fd..d2b3283a18 100644 --- a/src/Lucene.Net.Tests.Analysis.ICU/Analysis/Icu/TestICUNormalizer2CharFilter.cs +++ b/src/Lucene.Net.Tests.Analysis.ICU/Analysis/Icu/TestICUNormalizer2CharFilter.cs @@ -111,29 +111,26 @@ public void TestMassiveLigature() public void DoTestMode(Normalizer2 normalizer, int maxLength, int iterations, int bufferSize) { - using (Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => + using Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => { return new TokenStreamComponents(new MockTokenizer(reader, MockTokenizer.KEYWORD, false)); }, initReader: (fieldName, reader) => { return new ICUNormalizer2CharFilter(reader, normalizer, bufferSize); - })) + }); + for (int i = 0; i < iterations; i++) { - - for (int i = 0; i < iterations; i++) + String input = TestUtil.RandomUnicodeString(Random, maxLength); + if (input.Length == 0) + { + continue; + } + String normalized = normalizer.Normalize(input); + if (normalized.Length == 0) { - String input = TestUtil.RandomUnicodeString(Random, maxLength); - if (input.Length == 0) - { - continue; - } - String normalized = normalizer.Normalize(input); - if (normalized.Length == 0) - { - continue; // MockTokenizer doesnt tokenize empty string... - } - CheckOneTerm(a, input, normalized); + continue; // MockTokenizer doesnt tokenize empty string... } + CheckOneTerm(a, input, normalized); } } @@ -235,18 +232,16 @@ public void TestRandomStrings() public void TestCuriousString() { String text = "\udb40\udc3d\uf273\ue960\u06c8\ud955\udc13\ub7fc\u0692 \u2089\u207b\u2073\u2075"; - using (Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => + using Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => { return new TokenStreamComponents(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false)); }, initReader: (fieldName, reader) => { return new ICUNormalizer2CharFilter(reader, Normalizer2.GetInstance(null, "nfkc_cf", Normalizer2Mode.Compose)); - })) + }); + for (int i = 0; i < 1000; i++) { - for (int i = 0; i < 1000; i++) - { - CheckAnalysisConsistency(Random, a, false, text); - } + CheckAnalysisConsistency(Random, a, false, text); } } @@ -426,18 +421,16 @@ public void TestCuriousMassiveString() "\uda96\udfde \u0010\ufb41\u06dd\u06d0\ue4ef\u241b \ue1a3d\ub55d=\ud8fd\udd54\ueb5f\ud844" + "\udf25 xnygolayn txnlsggei yhn \u0e5c\u0e02 \\ fornos oe epp "; - using (Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => + using Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => { return new TokenStreamComponents(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false)); }, initReader: (fieldName, reader) => { return new ICUNormalizer2CharFilter(reader, Normalizer2.GetInstance(null, "nfkc_cf", Normalizer2Mode.Compose)); - })) + }); + for (int i = 0; i < 25; i++) { - for (int i = 0; i < 25; i++) - { - CheckAnalysisConsistency(Random, a, false, text); - } + CheckAnalysisConsistency(Random, a, false, text); } } @@ -447,17 +440,15 @@ public void TestVeryLargeInputOfNonInertChars() { char[] text = new char[1000000]; Arrays.Fill(text, 'a'); - using (Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => + using Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => { return new TokenStreamComponents(new KeywordTokenizer(reader)); }, initReader: (fieldName, reader) => { return new ICUNormalizer2CharFilter(reader, Normalizer2.GetInstance(null, "nfkc_cf", Normalizer2Mode.Compose)); - })) - { - CheckAnalysisConsistency(Random, a, false, new string(text)); - } + }); + CheckAnalysisConsistency(Random, a, false, new string(text)); } } } diff --git a/src/Lucene.Net.Tests.Analysis.ICU/Analysis/Icu/TestICUNormalizer2Filter.cs b/src/Lucene.Net.Tests.Analysis.ICU/Analysis/Icu/TestICUNormalizer2Filter.cs index 1ee1ad3090..e0eedb3172 100644 --- a/src/Lucene.Net.Tests.Analysis.ICU/Analysis/Icu/TestICUNormalizer2Filter.cs +++ b/src/Lucene.Net.Tests.Analysis.ICU/Analysis/Icu/TestICUNormalizer2Filter.cs @@ -74,18 +74,16 @@ public void TestDefaults() [Test] public void TestAlternate() { - using (Analyzer a = Analysis.Analyzer.NewAnonymous(createComponents: (fieldName, reader) => + using Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => { Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); return new TokenStreamComponents(tokenizer, new ICUNormalizer2Filter( tokenizer, /* specify nfc with decompose to get nfd */ Normalizer2.GetInstance(null, "nfc", Normalizer2Mode.Decompose))); - })) - { - // decompose EAcute into E + combining Acute - AssertAnalyzesTo(a, "\u00E9", new String[] { "\u0065\u0301" }); - } + }); + // decompose EAcute into E + combining Acute + AssertAnalyzesTo(a, "\u00E9", new String[] { "\u0065\u0301" }); } /** blast some random strings through the analyzer */ @@ -98,14 +96,12 @@ public void TestRandomStrings() [Test] public void TestEmptyTerm() { - using (Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => + using Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => { Tokenizer tokenizer = new KeywordTokenizer(reader); return new TokenStreamComponents(tokenizer, new ICUNormalizer2Filter(tokenizer)); - })) - { - CheckOneTerm(a, "", ""); - } + }); + CheckOneTerm(a, "", ""); } } } diff --git a/src/Lucene.Net.Tests.Analysis.ICU/Analysis/Icu/TestICUTransformFilter.cs b/src/Lucene.Net.Tests.Analysis.ICU/Analysis/Icu/TestICUTransformFilter.cs index 3bb38e55a6..776849e695 100644 --- a/src/Lucene.Net.Tests.Analysis.ICU/Analysis/Icu/TestICUTransformFilter.cs +++ b/src/Lucene.Net.Tests.Analysis.ICU/Analysis/Icu/TestICUTransformFilter.cs @@ -92,27 +92,23 @@ private void CheckToken(Transliterator transform, string input, string expected) public void TestRandomStrings() { Transliterator transform = Transliterator.GetInstance("Any-Latin"); - using (Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => + using Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => { Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); return new TokenStreamComponents(tokenizer, new ICUTransformFilter(tokenizer, transform)); - })) - { - CheckRandomData(Random, a, 1000 * RandomMultiplier); - } + }); + CheckRandomData(Random, a, 1000 * RandomMultiplier); } [Test] public void TestEmptyTerm() { - using (Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => + using Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => { Tokenizer tokenizer = new KeywordTokenizer(reader); return new TokenStreamComponents(tokenizer, new ICUTransformFilter(tokenizer, Transliterator.GetInstance("Any-Latin"))); - })) - { - CheckOneTerm(a, "", ""); - } + }); + CheckOneTerm(a, "", ""); } } } \ No newline at end of file diff --git a/src/Lucene.Net.Tests.Analysis.ICU/Collation/TestICUCollationKeyFilterFactory.cs b/src/Lucene.Net.Tests.Analysis.ICU/Collation/TestICUCollationKeyFilterFactory.cs index b2948d9e91..9223bcc2d4 100644 --- a/src/Lucene.Net.Tests.Analysis.ICU/Collation/TestICUCollationKeyFilterFactory.cs +++ b/src/Lucene.Net.Tests.Analysis.ICU/Collation/TestICUCollationKeyFilterFactory.cs @@ -290,7 +290,7 @@ internal StringMockResourceLoader(String text) public T NewInstance(String cname) { - return default(T); + return default; } public Type FindType(String cname) diff --git a/src/Lucene.Net.Tests.Analysis.Kuromoji/TestSearchMode.cs b/src/Lucene.Net.Tests.Analysis.Kuromoji/TestSearchMode.cs index 812e9f2437..bb564494e4 100644 --- a/src/Lucene.Net.Tests.Analysis.Kuromoji/TestSearchMode.cs +++ b/src/Lucene.Net.Tests.Analysis.Kuromoji/TestSearchMode.cs @@ -56,7 +56,7 @@ public void TestSearchSegmentation() // Remove comments line = Regex.Replace(line, "#.*$", ""); // Skip empty lines or comment lines - if (line.Trim() == string.Empty) + if (line.Trim().Length == 0) // LUCENENET: CA1820: Test for empty strings using string length { continue; } diff --git a/src/Lucene.Net.Tests.Analysis.Morfologik/Morfologik/TestMorfologikAnalyzer.cs b/src/Lucene.Net.Tests.Analysis.Morfologik/Morfologik/TestMorfologikAnalyzer.cs index ad8b8765bf..e65d3ea7c8 100644 --- a/src/Lucene.Net.Tests.Analysis.Morfologik/Morfologik/TestMorfologikAnalyzer.cs +++ b/src/Lucene.Net.Tests.Analysis.Morfologik/Morfologik/TestMorfologikAnalyzer.cs @@ -79,19 +79,17 @@ public void TestMultipleTokens() private void dumpTokens(String input) { - using (Analyzer a = getTestAnalyzer()) - using (TokenStream ts = a.GetTokenStream("dummy", input)) + using Analyzer a = getTestAnalyzer(); + using TokenStream ts = a.GetTokenStream("dummy", input); + ts.Reset(); + + IMorphosyntacticTagsAttribute attribute = ts.GetAttribute(); + ICharTermAttribute charTerm = ts.GetAttribute(); + while (ts.IncrementToken()) { - ts.Reset(); - - IMorphosyntacticTagsAttribute attribute = ts.GetAttribute(); - ICharTermAttribute charTerm = ts.GetAttribute(); - while (ts.IncrementToken()) - { - Console.WriteLine(charTerm.ToString() + " => " + string.Format(StringFormatter.InvariantCulture, "{0}", attribute.Tags)); - } - ts.End(); + Console.WriteLine(charTerm.ToString() + " => " + string.Format(StringFormatter.InvariantCulture, "{0}", attribute.Tags)); } + ts.End(); } /** Test reuse of MorfologikFilter with leftover stems. */ @@ -167,29 +165,27 @@ private void assertPOSToken(TokenStream ts, String term, params String[] tags) [Test] public void TestPOSAttribute() { - using (Analyzer a = getTestAnalyzer()) - using (TokenStream ts = a.GetTokenStream("dummy", "liście")) - { - ts.Reset(); - assertPOSToken(ts, "liście", - "subst:sg:acc:n2", - "subst:sg:nom:n2", - "subst:sg:voc:n2"); - - assertPOSToken(ts, "liść", - "subst:pl:acc:m3", - "subst:pl:nom:m3", - "subst:pl:voc:m3"); - - assertPOSToken(ts, "list", - "subst:sg:loc:m3", - "subst:sg:voc:m3"); - - assertPOSToken(ts, "lista", - "subst:sg:dat:f", - "subst:sg:loc:f"); - ts.End(); - } + using Analyzer a = getTestAnalyzer(); + using TokenStream ts = a.GetTokenStream("dummy", "liście"); + ts.Reset(); + assertPOSToken(ts, "liście", + "subst:sg:acc:n2", + "subst:sg:nom:n2", + "subst:sg:voc:n2"); + + assertPOSToken(ts, "liść", + "subst:pl:acc:m3", + "subst:pl:nom:m3", + "subst:pl:voc:m3"); + + assertPOSToken(ts, "list", + "subst:sg:loc:m3", + "subst:sg:voc:m3"); + + assertPOSToken(ts, "lista", + "subst:sg:dat:f", + "subst:sg:loc:f"); + ts.End(); } private class MockMorfologikAnalyzer : MorfologikAnalyzer diff --git a/src/Lucene.Net.Tests.Analysis.OpenNLP/TestOpenNLPLemmatizerFilterFactory.cs b/src/Lucene.Net.Tests.Analysis.OpenNLP/TestOpenNLPLemmatizerFilterFactory.cs index 55b466a323..309f197749 100644 --- a/src/Lucene.Net.Tests.Analysis.OpenNLP/TestOpenNLPLemmatizerFilterFactory.cs +++ b/src/Lucene.Net.Tests.Analysis.OpenNLP/TestOpenNLPLemmatizerFilterFactory.cs @@ -30,7 +30,6 @@ public class TestOpenNLPLemmatizerFilterFactory : BaseTokenStreamTestCase private static readonly String[] SENTENCE_dict_punc = { "they", "send", "he", "run", "in", "the", "evening", "." }; private static readonly String[] SENTENCE_maxent_punc = { "they", "send", "he", "runn", "in", "the", "evening", "." }; private static readonly String[] SENTENCE_posTags = { "NNP", "VBD", "PRP", "VBG", "IN", "DT", "NN", "." }; - private static readonly String SENTENCES = "They sent him running in the evening. He did not come back."; private static readonly String[] SENTENCES_dict_punc = { "they", "send", "he", "run", "in", "the", "evening", ".", "he", "do", "not", "come", "back", "." }; diff --git a/src/Lucene.Net.Tests.Analysis.OpenNLP/TestOpenNLPSentenceBreakIterator.cs b/src/Lucene.Net.Tests.Analysis.OpenNLP/TestOpenNLPSentenceBreakIterator.cs index 2d168f9410..7a5e17d33c 100644 --- a/src/Lucene.Net.Tests.Analysis.OpenNLP/TestOpenNLPSentenceBreakIterator.cs +++ b/src/Lucene.Net.Tests.Analysis.OpenNLP/TestOpenNLPSentenceBreakIterator.cs @@ -36,8 +36,8 @@ private const String TEXT = "Sentence number 1 has 6 words. Sentence number 2, 5 words. And finally, sentence number 3 has 8 words."; private static readonly String[] SENTENCES = new String[] { "Sentence number 1 has 6 words. ", "Sentence number 2, 5 words. ", "And finally, sentence number 3 has 8 words." }; - private static readonly String PADDING = " Word. Word. "; - private static readonly String sentenceModelFile = "en-test-sent.bin"; + private const String PADDING = " Word. Word. "; + private const String sentenceModelFile = "en-test-sent.bin"; public override void BeforeClass() { diff --git a/src/Lucene.Net.Tests.Analysis.Phonetic/Language/Bm/BeiderMorseEncoderTest.cs b/src/Lucene.Net.Tests.Analysis.Phonetic/Language/Bm/BeiderMorseEncoderTest.cs index 0cae6838b1..97ef30fc32 100644 --- a/src/Lucene.Net.Tests.Analysis.Phonetic/Language/Bm/BeiderMorseEncoderTest.cs +++ b/src/Lucene.Net.Tests.Analysis.Phonetic/Language/Bm/BeiderMorseEncoderTest.cs @@ -33,7 +33,7 @@ public class BeiderMorseEncoderTest : StringEncoderAbstractTest" + - ""; - Parser parser = new Parser(new StringReader(text)); - assertEquals("ııı", parser.Title); - assertEquals("[ş]", parser.Body); - } + using var context = new CultureContext("tr-TR"); + String text = "ııı" + + "\"ş\"" + + ""; + Parser parser = new Parser(new StringReader(text)); + assertEquals("ııı", parser.Title); + assertEquals("[ş]", parser.Body); } [Test] diff --git a/src/Lucene.Net.Tests.Benchmark/ByTask/Feeds/TrecContentSourceTest.cs b/src/Lucene.Net.Tests.Benchmark/ByTask/Feeds/TrecContentSourceTest.cs index b2b8f89a04..5a67e5dc0e 100644 --- a/src/Lucene.Net.Tests.Benchmark/ByTask/Feeds/TrecContentSourceTest.cs +++ b/src/Lucene.Net.Tests.Benchmark/ByTask/Feeds/TrecContentSourceTest.cs @@ -370,7 +370,7 @@ public void TestTrecFeedDirAllTypes() DirectoryInfo dataDir = CreateTempDir("trecFeedAllTypes"); using (var stream = GetDataFile("trecdocs.zip")) TestUtil.Unzip(stream, dataDir); - TrecContentSource tcs = new TrecContentSource(); + using TrecContentSource tcs = new TrecContentSource(); Dictionary props = new Dictionary(); props["print.props"] = "false"; props["content.source.verbose"] = "false"; diff --git a/src/Lucene.Net.Tests.Benchmark/ByTask/Tasks/WriteLineDocTaskTest.cs b/src/Lucene.Net.Tests.Benchmark/ByTask/Tasks/WriteLineDocTaskTest.cs index a93151813c..b9372467e1 100644 --- a/src/Lucene.Net.Tests.Benchmark/ByTask/Tasks/WriteLineDocTaskTest.cs +++ b/src/Lucene.Net.Tests.Benchmark/ByTask/Tasks/WriteLineDocTaskTest.cs @@ -418,7 +418,7 @@ public void TestMultiThreaded() for (int i = 0; i < threads.Length; i++) { line = br.ReadLine(); - assertNotNull($"line for index {i.ToString()} is missing", line); // LUCENENET specific - ensure the line is there before splitting + assertNotNull($"line for index {i} is missing", line); // LUCENENET specific - ensure the line is there before splitting String[] parts = line.Split(WriteLineDocTask.SEP).TrimEnd(); assertEquals(line, 3, parts.Length); // check that all thread names written are the same in the same line diff --git a/src/Lucene.Net.Tests.Demo/Facet/TestDistanceFacetsExample.cs b/src/Lucene.Net.Tests.Demo/Facet/TestDistanceFacetsExample.cs index a45f057527..0c875843ea 100644 --- a/src/Lucene.Net.Tests.Demo/Facet/TestDistanceFacetsExample.cs +++ b/src/Lucene.Net.Tests.Demo/Facet/TestDistanceFacetsExample.cs @@ -28,23 +28,19 @@ public class TestDistanceFacetsExample : LuceneTestCase [Test] public void TestSimple() { - using (DistanceFacetsExample example = new DistanceFacetsExample()) - { - example.Index(); - FacetResult result = example.Search(); - assertEquals("dim=field path=[] value=3 childCount=4\n < 1 km (1)\n < 2 km (2)\n < 5 km (2)\n < 10 km (3)\n", result.toString()); - } + using DistanceFacetsExample example = new DistanceFacetsExample(); + example.Index(); + FacetResult result = example.Search(); + assertEquals("dim=field path=[] value=3 childCount=4\n < 1 km (1)\n < 2 km (2)\n < 5 km (2)\n < 10 km (3)\n", result.toString()); } [Test] public void TestDrillDown() { - using (DistanceFacetsExample example = new DistanceFacetsExample()) - { - example.Index(); - TopDocs hits = example.DrillDown(DistanceFacetsExample.FIVE_KM); - assertEquals(2, hits.TotalHits); - } + using DistanceFacetsExample example = new DistanceFacetsExample(); + example.Index(); + TopDocs hits = example.DrillDown(DistanceFacetsExample.FIVE_KM); + assertEquals(2, hits.TotalHits); } } } diff --git a/src/Lucene.Net.Tests.Demo/Facet/TestRangeFacetsExample.cs b/src/Lucene.Net.Tests.Demo/Facet/TestRangeFacetsExample.cs index caebd0f33b..3a12ac50c1 100644 --- a/src/Lucene.Net.Tests.Demo/Facet/TestRangeFacetsExample.cs +++ b/src/Lucene.Net.Tests.Demo/Facet/TestRangeFacetsExample.cs @@ -28,23 +28,19 @@ public class TestRangeFacetsExample : LuceneTestCase [Test] public void TestSimple() { - using (RangeFacetsExample example = new RangeFacetsExample()) - { - example.Index(); - FacetResult result = example.Search(); - assertEquals("dim=timestamp path=[] value=87 childCount=3\n Past hour (4)\n Past six hours (22)\n Past day (87)\n", result.toString()); - } + using RangeFacetsExample example = new RangeFacetsExample(); + example.Index(); + FacetResult result = example.Search(); + assertEquals("dim=timestamp path=[] value=87 childCount=3\n Past hour (4)\n Past six hours (22)\n Past day (87)\n", result.toString()); } [Test] public void TestDrillDown() { - using (RangeFacetsExample example = new RangeFacetsExample()) - { - example.Index(); - TopDocs hits = example.DrillDown(example.PAST_SIX_HOURS); - assertEquals(22, hits.TotalHits); - } + using RangeFacetsExample example = new RangeFacetsExample(); + example.Index(); + TopDocs hits = example.DrillDown(example.PAST_SIX_HOURS); + assertEquals(22, hits.TotalHits); } } } diff --git a/src/Lucene.Net.Tests.Demo/TestDemo.cs b/src/Lucene.Net.Tests.Demo/TestDemo.cs index e30f8262b6..356405bc0d 100644 --- a/src/Lucene.Net.Tests.Demo/TestDemo.cs +++ b/src/Lucene.Net.Tests.Demo/TestDemo.cs @@ -72,11 +72,9 @@ public void TestIndexSearch() string fileName = Regex.Replace(file, ".*" + embeddedDocsLocation.Replace(".", @"\."), ""); string destinationPath = Path.Combine(filesDir.FullName, fileName); - using (Stream input = thisAssembly.GetManifestResourceStream(file)) - using (Stream output = new FileStream(destinationPath, FileMode.Create, FileAccess.Write)) - { - input.CopyTo(output); - } + using Stream input = thisAssembly.GetManifestResourceStream(file); + using Stream output = new FileStream(destinationPath, FileMode.Create, FileAccess.Write); + input.CopyTo(output); } } diff --git a/src/Lucene.Net.Tests.Expressions/JS/TestCustomFunctions.cs b/src/Lucene.Net.Tests.Expressions/JS/TestCustomFunctions.cs index 547b32ac7d..8cfcd242ce 100644 --- a/src/Lucene.Net.Tests.Expressions/JS/TestCustomFunctions.cs +++ b/src/Lucene.Net.Tests.Expressions/JS/TestCustomFunctions.cs @@ -231,7 +231,7 @@ public virtual void TestWrongNestedNotPublic() internal static string MESSAGE = "This should not happen but it happens"; - public class StaticThrowingException + public static class StaticThrowingException // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { public static double Method() { diff --git a/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs b/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs index 9ff7aeeeab..b235b471f9 100644 --- a/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs +++ b/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs @@ -366,9 +366,7 @@ public virtual void TestRandom() { if (doc.dims[j] != null) { - int? v; - - if (!expectedCounts[j].TryGetValue(doc.dims[j],out v)) + if (!expectedCounts[j].TryGetValue(doc.dims[j], out int? v)) { expectedCounts[j][doc.dims[j]] = 1; } diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestAddTaxonomy.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestAddTaxonomy.cs index ee2732f713..18c7b05c77 100644 --- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestAddTaxonomy.cs +++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestAddTaxonomy.cs @@ -120,27 +120,23 @@ private IOrdinalMap randomOrdinalMap() private void validate(Directory dest, Directory src, IOrdinalMap ordMap) { - using (var destTr = new DirectoryTaxonomyReader(dest)) + using var destTr = new DirectoryTaxonomyReader(dest); + int destSize = destTr.Count; + using var srcTR = new DirectoryTaxonomyReader(src); + var map = ordMap.GetMap(); + + // validate taxo sizes + int srcSize = srcTR.Count; + Assert.True(destSize >= srcSize, "destination taxonomy expected to be larger than source; dest=" + destSize + " src=" + srcSize); + + // validate that all source categories exist in destination, and their + // ordinals are as expected. + for (int j = 1; j < srcSize; j++) { - int destSize = destTr.Count; - using (var srcTR = new DirectoryTaxonomyReader(src)) - { - var map = ordMap.GetMap(); - - // validate taxo sizes - int srcSize = srcTR.Count; - Assert.True(destSize >= srcSize, "destination taxonomy expected to be larger than source; dest=" + destSize + " src=" + srcSize); - - // validate that all source categories exist in destination, and their - // ordinals are as expected. - for (int j = 1; j < srcSize; j++) - { - FacetLabel cp = srcTR.GetPath(j); - int destOrdinal = destTr.GetOrdinal(cp); - Assert.True(destOrdinal > 0, cp + " not found in destination"); - Assert.AreEqual(destOrdinal, map[j]); - } - } + FacetLabel cp = srcTR.GetPath(j); + int destOrdinal = destTr.GetOrdinal(cp); + Assert.True(destOrdinal > 0, cp + " not found in destination"); + Assert.AreEqual(destOrdinal, map[j]); } } diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs index 168299414a..33c13886f6 100644 --- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs +++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs @@ -52,6 +52,8 @@ public class TestTaxonomyFacetCounts2 : FacetTestCase private const int NUM_CHILDREN_CP_C = 5, NUM_CHILDREN_CP_D = 5; private static readonly FacetField[] CATEGORIES_A, CATEGORIES_B; private static readonly FacetField[] CATEGORIES_C, CATEGORIES_D; + + [System.Diagnostics.CodeAnalysis.SuppressMessage("Performance", "CA1810:Initialize reference type static fields inline", Justification = "Complexity")] static TestTaxonomyFacetCounts2() { CATEGORIES_A = new FacetField[NUM_CHILDREN_CP_A]; diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCompactLabelToOrdinal.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCompactLabelToOrdinal.cs index 7b66a839a9..eaa271a72c 100644 --- a/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCompactLabelToOrdinal.cs +++ b/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCompactLabelToOrdinal.cs @@ -283,8 +283,7 @@ public override void AddLabel(FacetLabel label, int ordinal) public override int GetOrdinal(FacetLabel label) { - int value; - if (map.TryGetValue(label, out value)) + if (map.TryGetValue(label, out int value)) { return value; } diff --git a/src/Lucene.Net.Tests.Grouping/DistinctValuesCollectorTest.cs b/src/Lucene.Net.Tests.Grouping/DistinctValuesCollectorTest.cs index f0b0560c71..d3d7bc38dd 100644 --- a/src/Lucene.Net.Tests.Grouping/DistinctValuesCollectorTest.cs +++ b/src/Lucene.Net.Tests.Grouping/DistinctValuesCollectorTest.cs @@ -302,9 +302,9 @@ private void PrintGroups(List> groupToCounts; - if (!searchTermToGroupCounts.TryGetValue(content, out groupToCounts)) + if (!searchTermToGroupCounts.TryGetValue(content, out var groupToCounts)) { // Groups sort always DOCID asc... searchTermToGroupCounts.Add(content, groupToCounts = new JCG.LinkedDictionary>()); contentStrings.Add(content); } - ISet countsVals; - if (!groupToCounts.TryGetValue(groupValue, out countsVals)) + if (!groupToCounts.TryGetValue(groupValue, out var countsVals)) { groupToCounts.Add(groupValue, countsVals = new JCG.HashSet()); } diff --git a/src/Lucene.Net.Tests.Grouping/GroupFacetCollectorTest.cs b/src/Lucene.Net.Tests.Grouping/GroupFacetCollectorTest.cs index 07d085c2ea..7a7a2ad0c3 100644 --- a/src/Lucene.Net.Tests.Grouping/GroupFacetCollectorTest.cs +++ b/src/Lucene.Net.Tests.Grouping/GroupFacetCollectorTest.cs @@ -689,7 +689,7 @@ private IndexContext CreateIndexContext(bool multipleFacetValuesPerDocument) if (Verbose) { - Console.WriteLine(" doc content=" + contentStr + " group=" + (groupValue == null ? "null" : groupValue) + " facetVals=" + Collections.ToString(facetVals)); + Console.WriteLine(" doc content=" + contentStr + " group=" + (groupValue ?? "null") + " facetVals=" + Collections.ToString(facetVals)); } if (groupValue != null) @@ -732,8 +732,7 @@ private IndexContext CreateIndexContext(bool multipleFacetValuesPerDocument) private GroupedFacetResult CreateExpectedFacetResult(string searchTerm, IndexContext context, int offset, int limit, int minCount, bool orderByCount, string facetPrefix) { - JCG.Dictionary> facetGroups; - if (!context.searchTermToFacetGroups.TryGetValue(searchTerm, out facetGroups)) + if (!context.searchTermToFacetGroups.TryGetValue(searchTerm, out var facetGroups)) { facetGroups = new JCG.Dictionary>(); } diff --git a/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterTest.cs b/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterTest.cs index 9f615ceb4b..66d2194faf 100644 --- a/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterTest.cs +++ b/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterTest.cs @@ -2167,10 +2167,18 @@ public override void End() protected override void Dispose(bool disposing) { - base.Dispose(disposing); - if (disposing) + try { - this.realStream.Dispose(); + if (disposing) + { + this.realStream.Dispose(); + this.st?.Dispose(); + this.st = null; + } + } + finally + { + base.Dispose(disposing); } } } diff --git a/src/Lucene.Net.Tests.Join/TestJoinUtil.cs b/src/Lucene.Net.Tests.Join/TestJoinUtil.cs index ecd5cdc1e2..29f7f8d190 100644 --- a/src/Lucene.Net.Tests.Join/TestJoinUtil.cs +++ b/src/Lucene.Net.Tests.Join/TestJoinUtil.cs @@ -249,7 +249,7 @@ public void TestInsideBooleanQuery() bq.Add(joinQuery, Occur.SHOULD); bq.Add(new TermQuery(new Term("id", "3")), Occur.SHOULD); - indexSearcher.Search(bq, new CollectorAnonymousInnerClassHelper(this)); + indexSearcher.Search(bq, new CollectorAnonymousInnerClassHelper()); indexSearcher.IndexReader.Dispose(); dir.Dispose(); @@ -257,13 +257,6 @@ public void TestInsideBooleanQuery() private class CollectorAnonymousInnerClassHelper : ICollector { - private readonly TestJoinUtil outerInstance; - - public CollectorAnonymousInnerClassHelper(TestJoinUtil outerInstance) - { - this.outerInstance = outerInstance; - } - internal bool sawFive; public virtual void SetNextReader(AtomicReaderContext context) @@ -470,7 +463,7 @@ private void ExecuteRandomJoin(bool multipleValuesPerDocument, int maxIndexIter, FixedBitSet actualResult = new FixedBitSet(indexSearcher.IndexReader.MaxDoc); TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.Create(10, false); indexSearcher.Search(joinQuery, - new CollectorAnonymousInnerClassHelper2(this, scoreDocsInOrder, context, actualResult, + new CollectorAnonymousInnerClassHelper2(scoreDocsInOrder, actualResult, topScoreDocCollector)); // Asserting bit set... if (Verbose) @@ -525,20 +518,15 @@ private void ExecuteRandomJoin(bool multipleValuesPerDocument, int maxIndexIter, private class CollectorAnonymousInnerClassHelper2 : ICollector { - private readonly TestJoinUtil outerInstance; - private bool scoreDocsInOrder; - private IndexIterationContext context; private FixedBitSet actualResult; private TopScoreDocCollector topScoreDocCollector; - public CollectorAnonymousInnerClassHelper2(TestJoinUtil outerInstance, bool scoreDocsInOrder, - IndexIterationContext context, FixedBitSet actualResult, + public CollectorAnonymousInnerClassHelper2(bool scoreDocsInOrder, + FixedBitSet actualResult, TopScoreDocCollector topScoreDocCollector) { - this.outerInstance = outerInstance; this.scoreDocsInOrder = scoreDocsInOrder; - this.context = context; this.actualResult = actualResult; this.topScoreDocCollector = topScoreDocCollector; } @@ -690,12 +678,12 @@ private IndexIterationContext CreateContext(int nDocs, RandomIndexWriter fromWri if (multipleValuesPerDocument) { fromSearcher.Search(new TermQuery(new Term("value", uniqueRandomValue)), - new CollectorAnonymousInnerClassHelper3(this, context, fromField, joinValueToJoinScores)); + new CollectorAnonymousInnerClassHelper3(fromField, joinValueToJoinScores)); } else { fromSearcher.Search(new TermQuery(new Term("value", uniqueRandomValue)), - new CollectorAnonymousInnerClassHelper4(this, context, fromField, joinValueToJoinScores)); + new CollectorAnonymousInnerClassHelper4(fromField, joinValueToJoinScores)); } IDictionary docToJoinScore = new Dictionary(); @@ -738,14 +726,14 @@ private IndexIterationContext CreateContext(int nDocs, RandomIndexWriter fromWri else { toSearcher.Search(new MatchAllDocsQuery(), - new CollectorAnonymousInnerClassHelper5(this, context, toField, joinValueToJoinScores, + new CollectorAnonymousInnerClassHelper5(toField, joinValueToJoinScores, docToJoinScore)); } } else { toSearcher.Search(new MatchAllDocsQuery(), - new CollectorAnonymousInnerClassHelper6(this, toField, joinValueToJoinScores, + new CollectorAnonymousInnerClassHelper6(toField, joinValueToJoinScores, docToJoinScore)); } queryVals[uniqueRandomValue] = docToJoinScore; @@ -759,18 +747,12 @@ private IndexIterationContext CreateContext(int nDocs, RandomIndexWriter fromWri private class CollectorAnonymousInnerClassHelper3 : ICollector { - private readonly TestJoinUtil outerInstance; - - private readonly IndexIterationContext context; private readonly string fromField; private readonly IDictionary joinValueToJoinScores; - public CollectorAnonymousInnerClassHelper3(TestJoinUtil outerInstance, - IndexIterationContext context, string fromField, + public CollectorAnonymousInnerClassHelper3(string fromField, IDictionary joinValueToJoinScores) { - this.outerInstance = outerInstance; - this.context = context; this.fromField = fromField; this.joinValueToJoinScores = joinValueToJoinScores; joinValue = new BytesRef(); @@ -811,18 +793,12 @@ public virtual void SetScorer(Scorer scorer) private class CollectorAnonymousInnerClassHelper4 : ICollector { - private readonly TestJoinUtil outerInstance; - - private readonly IndexIterationContext context; private readonly string fromField; private readonly IDictionary joinValueToJoinScores; - public CollectorAnonymousInnerClassHelper4(TestJoinUtil outerInstance, - IndexIterationContext context, string fromField, + public CollectorAnonymousInnerClassHelper4(string fromField, IDictionary joinValueToJoinScores) { - this.outerInstance = outerInstance; - this.context = context; this.fromField = fromField; this.joinValueToJoinScores = joinValueToJoinScores; spare = new BytesRef(); @@ -866,8 +842,6 @@ public virtual void SetScorer(Scorer scorer) private class CollectorAnonymousInnerClassHelper5 : ICollector { - private readonly TestJoinUtil outerInstance; - private readonly string toField; private readonly IDictionary joinValueToJoinScores; private readonly IDictionary docToJoinScore; @@ -876,11 +850,10 @@ private class CollectorAnonymousInnerClassHelper5 : ICollector private readonly BytesRef scratch = new BytesRef(); private int docBase; - public CollectorAnonymousInnerClassHelper5(TestJoinUtil testJoinUtil, IndexIterationContext context, + public CollectorAnonymousInnerClassHelper5( string toField, IDictionary joinValueToJoinScores, IDictionary docToJoinScore) { - outerInstance = testJoinUtil; this.toField = toField; this.joinValueToJoinScores = joinValueToJoinScores; this.docToJoinScore = docToJoinScore; @@ -922,8 +895,6 @@ public virtual void SetScorer(Scorer scorer) private class CollectorAnonymousInnerClassHelper6 : ICollector { - private readonly TestJoinUtil outerInstance; - private readonly string toField; private readonly IDictionary joinValueToJoinScores; private readonly IDictionary docToJoinScore; @@ -932,12 +903,11 @@ private class CollectorAnonymousInnerClassHelper6 : ICollector private int docBase; private readonly BytesRef spare = new BytesRef(); - public CollectorAnonymousInnerClassHelper6(TestJoinUtil testJoinUtil, + public CollectorAnonymousInnerClassHelper6( string toField, IDictionary joinValueToJoinScores, IDictionary docToJoinScore) { - outerInstance = testJoinUtil; this.toField = toField; this.joinValueToJoinScores = joinValueToJoinScores; this.docToJoinScore = docToJoinScore; diff --git a/src/Lucene.Net.Tests.Misc/Index/Sorter/SorterTestBase.cs b/src/Lucene.Net.Tests.Misc/Index/Sorter/SorterTestBase.cs index 4beacb08f1..ada508a1ab 100644 --- a/src/Lucene.Net.Tests.Misc/Index/Sorter/SorterTestBase.cs +++ b/src/Lucene.Net.Tests.Misc/Index/Sorter/SorterTestBase.cs @@ -192,25 +192,23 @@ public void CreateIndex(Directory dir, int numDocs, Random random) IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); conf.SetMaxBufferedDocs(4); // create some segments conf.SetSimilarity(new NormsSimilarity(conf.Similarity)); // for testing norms field - using (RandomIndexWriter writer = new RandomIndexWriter(random, dir, conf)) + using RandomIndexWriter writer = new RandomIndexWriter(random, dir, conf); + writer.DoRandomForceMerge = (false); + foreach (int id in ids) { - writer.DoRandomForceMerge = (false); - foreach (int id in ids) - { - writer.AddDocument(Doc(id, positions)); - } - // delete some documents - writer.Commit(); - foreach (int id in ids) + writer.AddDocument(Doc(id, positions)); + } + // delete some documents + writer.Commit(); + foreach (int id in ids) + { + if (random.NextDouble() < 0.2) { - if (random.NextDouble() < 0.2) + if (Verbose) { - if (Verbose) - { - Console.WriteLine("delete doc_id " + id); - } - writer.DeleteDocuments(new Term(ID_FIELD, id.ToString())); + Console.WriteLine("delete doc_id " + id); } + writer.DeleteDocuments(new Term(ID_FIELD, id.ToString())); } } } diff --git a/src/Lucene.Net.Tests.Misc/Index/TestPKIndexSplitter.cs b/src/Lucene.Net.Tests.Misc/Index/TestPKIndexSplitter.cs index 4a1cc0655c..81ee10c6b0 100644 --- a/src/Lucene.Net.Tests.Misc/Index/TestPKIndexSplitter.cs +++ b/src/Lucene.Net.Tests.Misc/Index/TestPKIndexSplitter.cs @@ -33,73 +33,61 @@ public void TestSplit() { string format = "{0:000000000}"; IndexWriter w; - using (Directory dir = NewDirectory()) + using Directory dir = NewDirectory(); + using (w = new IndexWriter(dir, NewIndexWriterConfig( + TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)) + .SetOpenMode(OpenMode.CREATE).SetMergePolicy(NoMergePolicy.COMPOUND_FILES))) { - using (w = new IndexWriter(dir, NewIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)) - .SetOpenMode(OpenMode.CREATE).SetMergePolicy(NoMergePolicy.COMPOUND_FILES))) + for (int x = 0; x < 11; x++) { - for (int x = 0; x < 11; x++) - { - Document doc = CreateDocument(x, "1", 3, format); - w.AddDocument(doc); - if (x % 3 == 0) w.Commit(); - } - for (int x = 11; x < 20; x++) - { - Document doc = CreateDocument(x, "2", 3, format); - w.AddDocument(doc); - if (x % 3 == 0) w.Commit(); - } + Document doc = CreateDocument(x, "1", 3, format); + w.AddDocument(doc); + if (x % 3 == 0) w.Commit(); } + for (int x = 11; x < 20; x++) + { + Document doc = CreateDocument(x, "2", 3, format); + w.AddDocument(doc); + if (x % 3 == 0) w.Commit(); + } + } - Term midTerm = new Term("id", string.Format(CultureInfo.InvariantCulture, format, 11)); + Term midTerm = new Term("id", string.Format(CultureInfo.InvariantCulture, format, 11)); - CheckSplitting(dir, midTerm, 11, 9); + CheckSplitting(dir, midTerm, 11, 9); - // delete some documents - using (w = new IndexWriter(dir, NewIndexWriterConfig( - - TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)) - .SetOpenMode(OpenMode.APPEND).SetMergePolicy(NoMergePolicy.COMPOUND_FILES))) - { - w.DeleteDocuments(midTerm); - w.DeleteDocuments(new Term("id", string.Format(CultureInfo.InvariantCulture, format, 2))); - } + // delete some documents + using (w = new IndexWriter(dir, NewIndexWriterConfig( + TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)) + .SetOpenMode(OpenMode.APPEND).SetMergePolicy(NoMergePolicy.COMPOUND_FILES))) + { + w.DeleteDocuments(midTerm); + w.DeleteDocuments(new Term("id", string.Format(CultureInfo.InvariantCulture, format, 2))); + } - CheckSplitting(dir, midTerm, 10, 8); - } + CheckSplitting(dir, midTerm, 10, 8); } private void CheckSplitting(Directory dir, Term splitTerm, int leftCount, int rightCount) { - using (Directory dir1 = NewDirectory()) - { - using (Directory dir2 = NewDirectory()) - { - PKIndexSplitter splitter = new PKIndexSplitter(dir, dir1, dir2, splitTerm, - NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)), - NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); - splitter.Split(); + using Directory dir1 = NewDirectory(); + using Directory dir2 = NewDirectory(); + PKIndexSplitter splitter = new PKIndexSplitter(dir, dir1, dir2, splitTerm, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)), + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); + splitter.Split(); - using (IndexReader ir1 = DirectoryReader.Open(dir1)) - { - using (IndexReader ir2 = DirectoryReader.Open(dir2)) - { - assertEquals(leftCount, ir1.NumDocs); - assertEquals(rightCount, ir2.NumDocs); + using IndexReader ir1 = DirectoryReader.Open(dir1); + using IndexReader ir2 = DirectoryReader.Open(dir2); + assertEquals(leftCount, ir1.NumDocs); + assertEquals(rightCount, ir2.NumDocs); - CheckContents(ir1, "1"); - CheckContents(ir2, "2"); - - } - } - } - } + CheckContents(ir1, "1"); + CheckContents(ir2, "2"); } private void CheckContents(IndexReader ir, string indexname) diff --git a/src/Lucene.Net.Tests.Queries/BooleanFilterTest.cs b/src/Lucene.Net.Tests.Queries/BooleanFilterTest.cs index a9eb77620e..6653f9cd53 100644 --- a/src/Lucene.Net.Tests.Queries/BooleanFilterTest.cs +++ b/src/Lucene.Net.Tests.Queries/BooleanFilterTest.cs @@ -91,17 +91,11 @@ private Filter GetWrappedTermQuery(string field, string text) private Filter GetEmptyFilter() { - return new AnonymousFilter(this); + return new AnonymousFilter(); } private sealed class AnonymousFilter : Filter { - public AnonymousFilter(BooleanFilterTest parent) - { - this.parent = parent; - } - - private readonly BooleanFilterTest parent; public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs) { return new FixedBitSet(context.AtomicReader.MaxDoc); @@ -110,17 +104,11 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo private Filter GetNullDISFilter() { - return new AnonymousFilter1(this); + return new AnonymousFilter1(); } private sealed class AnonymousFilter1 : Filter { - public AnonymousFilter1(BooleanFilterTest parent) - { - this.parent = parent; - } - - private readonly BooleanFilterTest parent; public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs) { return null; @@ -129,7 +117,7 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo private Filter GetNullDISIFilter() { - return new AnonymousFilter2(this); + return new AnonymousFilter2(); } private sealed class AnonymousDocIdSet : DocIdSet @@ -144,12 +132,6 @@ public override DocIdSetIterator GetIterator() private sealed class AnonymousFilter2 : Filter { - public AnonymousFilter2(BooleanFilterTest parent) - { - this.parent = parent; - } - - private readonly BooleanFilterTest parent; public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs) { return new AnonymousDocIdSet(); diff --git a/src/Lucene.Net.Tests.Queries/CommonTermsQueryTest.cs b/src/Lucene.Net.Tests.Queries/CommonTermsQueryTest.cs index d1292191f3..320e8e290d 100644 --- a/src/Lucene.Net.Tests.Queries/CommonTermsQueryTest.cs +++ b/src/Lucene.Net.Tests.Queries/CommonTermsQueryTest.cs @@ -417,8 +417,8 @@ public void TestRandomIndex() AtomicReader wrapper = SlowCompositeReaderWrapper.Wrap(reader); string field = @"body"; Terms terms = wrapper.GetTerms(field); - var lowFreqQueue = new AnonymousPriorityQueue(this, 5); - Util.PriorityQueue highFreqQueue = new AnonymousPriorityQueue1(this, 5); + var lowFreqQueue = new AnonymousPriorityQueue(5); + Util.PriorityQueue highFreqQueue = new AnonymousPriorityQueue1(5); try { TermsEnum iterator = terms.GetEnumerator(); @@ -502,13 +502,11 @@ public void TestRandomIndex() private sealed class AnonymousPriorityQueue : Util.PriorityQueue { - public AnonymousPriorityQueue(CommonTermsQueryTest parent, int maxSize) + public AnonymousPriorityQueue(int maxSize) : base(maxSize) { - this.parent = parent; } - private readonly CommonTermsQueryTest parent; protected internal override bool LessThan(TermAndFreq a, TermAndFreq b) { return a.freq > b.freq; @@ -517,13 +515,11 @@ protected internal override bool LessThan(TermAndFreq a, TermAndFreq b) private sealed class AnonymousPriorityQueue1 : Util.PriorityQueue { - public AnonymousPriorityQueue1(CommonTermsQueryTest parent, int maxSize) + public AnonymousPriorityQueue1(int maxSize) : base(maxSize) { - this.parent = parent; } - private readonly CommonTermsQueryTest parent; protected internal override bool LessThan(TermAndFreq a, TermAndFreq b) { return a.freq < b.freq; diff --git a/src/Lucene.Net.Tests.Queries/TestCustomScoreQuery.cs b/src/Lucene.Net.Tests.Queries/TestCustomScoreQuery.cs index f85cd10731..163f82e7df 100644 --- a/src/Lucene.Net.Tests.Queries/TestCustomScoreQuery.cs +++ b/src/Lucene.Net.Tests.Queries/TestCustomScoreQuery.cs @@ -101,16 +101,13 @@ internal CustomAddQuery(Query q, FunctionQuery qValSrc) : base(q, qValSrc) protected override CustomScoreProvider GetCustomScoreProvider(AtomicReaderContext context) { - return new CustomScoreProviderAnonymousInnerClassHelper(this, context); + return new CustomScoreProviderAnonymousInnerClassHelper(context); } private class CustomScoreProviderAnonymousInnerClassHelper : CustomScoreProvider { - private readonly CustomAddQuery outerInstance; - - public CustomScoreProviderAnonymousInnerClassHelper(CustomAddQuery outerInstance, AtomicReaderContext context) : base(context) + public CustomScoreProviderAnonymousInnerClassHelper(AtomicReaderContext context) : base(context) { - this.outerInstance = outerInstance; } public override float CustomScore(int doc, float subQueryScore, float valSrcScore) @@ -144,16 +141,13 @@ internal CustomMulAddQuery(Query q, FunctionQuery qValSrc1, FunctionQuery qValSr protected override CustomScoreProvider GetCustomScoreProvider(AtomicReaderContext context) { - return new CustomScoreProviderAnonymousInnerClassHelper(this, context); + return new CustomScoreProviderAnonymousInnerClassHelper(context); } private class CustomScoreProviderAnonymousInnerClassHelper : CustomScoreProvider { - private readonly CustomMulAddQuery outerInstance; - - public CustomScoreProviderAnonymousInnerClassHelper(CustomMulAddQuery outerInstance, AtomicReaderContext context) : base(context) + public CustomScoreProviderAnonymousInnerClassHelper(AtomicReaderContext context) : base(context) { - this.outerInstance = outerInstance; } public override float CustomScore(int doc, float subQueryScore, float[] valSrcScores) @@ -195,23 +189,18 @@ public override Explanation CustomExplain(int doc, Explanation subQueryExpl, Exp private sealed class CustomExternalQuery : CustomScoreQuery { - private readonly TestCustomScoreQuery outerInstance; - protected override CustomScoreProvider GetCustomScoreProvider(AtomicReaderContext context) { FieldCache.Int32s values = FieldCache.DEFAULT.GetInt32s(context.AtomicReader, INT_FIELD, false); - return new CustomScoreProviderAnonymousInnerClassHelper(this, context, values); + return new CustomScoreProviderAnonymousInnerClassHelper(context, values); } private class CustomScoreProviderAnonymousInnerClassHelper : CustomScoreProvider { - private readonly CustomExternalQuery outerInstance; - private FieldCache.Int32s values; - public CustomScoreProviderAnonymousInnerClassHelper(CustomExternalQuery outerInstance, AtomicReaderContext context, FieldCache.Int32s values) : base(context) + public CustomScoreProviderAnonymousInnerClassHelper(AtomicReaderContext context, FieldCache.Int32s values) : base(context) { - this.outerInstance = outerInstance; this.values = values; } @@ -222,9 +211,8 @@ public override float CustomScore(int doc, float subScore, float valSrcScore) } } - public CustomExternalQuery(TestCustomScoreQuery outerInstance, Query q) : base(q) + public CustomExternalQuery(Query q) : base(q) { - this.outerInstance = outerInstance; } } @@ -236,7 +224,7 @@ public virtual void TestCustomExternalQuery() q1.Add(new TermQuery(new Term(TEXT_FIELD, "aid")), Occur.SHOULD); q1.Add(new TermQuery(new Term(TEXT_FIELD, "text")), Occur.SHOULD); - Query q = new CustomExternalQuery(this, q1); + Query q = new CustomExternalQuery(q1); Log(q); IndexReader r = DirectoryReader.Open(dir); diff --git a/src/Lucene.Net.Tests.QueryParser/Analyzing/TestAnalyzingQueryParser.cs b/src/Lucene.Net.Tests.QueryParser/Analyzing/TestAnalyzingQueryParser.cs index dc1f9efbe6..0caca62fd7 100644 --- a/src/Lucene.Net.Tests.QueryParser/Analyzing/TestAnalyzingQueryParser.cs +++ b/src/Lucene.Net.Tests.QueryParser/Analyzing/TestAnalyzingQueryParser.cs @@ -326,12 +326,10 @@ private bool IsAHit(Query q, string content, Analyzer analyzer) doc.Add(field); writer.AddDocument(doc); } - using (DirectoryReader ir = DirectoryReader.Open(ramDir)) - { - IndexSearcher @is = new IndexSearcher(ir); + using DirectoryReader ir = DirectoryReader.Open(ramDir); + IndexSearcher @is = new IndexSearcher(ir); - hits = @is.Search(q, 10).TotalHits; - } + hits = @is.Search(q, 10).TotalHits; } if (hits == 1) { diff --git a/src/Lucene.Net.Tests.QueryParser/Classic/TestMultiFieldQueryParser.cs b/src/Lucene.Net.Tests.QueryParser/Classic/TestMultiFieldQueryParser.cs index ef44def1f0..8132c8bafd 100644 --- a/src/Lucene.Net.Tests.QueryParser/Classic/TestMultiFieldQueryParser.cs +++ b/src/Lucene.Net.Tests.QueryParser/Classic/TestMultiFieldQueryParser.cs @@ -310,26 +310,22 @@ public void TestAnalyzerReturningNull() public virtual void TestStopWordSearching() { Analyzer analyzer = new MockAnalyzer(Random); - using (var ramDir = NewDirectory()) + using var ramDir = NewDirectory(); + using (IndexWriter iw = new IndexWriter(ramDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer))) { - using (IndexWriter iw = new IndexWriter(ramDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer))) - { - Document doc = new Document(); - doc.Add(NewTextField("body", "blah the footest blah", Field.Store.NO)); - iw.AddDocument(doc); - } - - MultiFieldQueryParser mfqp = - new MultiFieldQueryParser(TEST_VERSION_CURRENT, new string[] { "body" }, analyzer); - mfqp.DefaultOperator = Operator.AND; - Query q = mfqp.Parse("the footest"); - using (IndexReader ir = DirectoryReader.Open(ramDir)) - { - IndexSearcher @is = NewSearcher(ir); - ScoreDoc[] hits = @is.Search(q, null, 1000).ScoreDocs; - assertEquals(1, hits.Length); - } + Document doc = new Document(); + doc.Add(NewTextField("body", "blah the footest blah", Field.Store.NO)); + iw.AddDocument(doc); } + + MultiFieldQueryParser mfqp = + new MultiFieldQueryParser(TEST_VERSION_CURRENT, new string[] { "body" }, analyzer); + mfqp.DefaultOperator = Operator.AND; + Query q = mfqp.Parse("the footest"); + using IndexReader ir = DirectoryReader.Open(ramDir); + IndexSearcher @is = NewSearcher(ir); + ScoreDoc[] hits = @is.Search(q, null, 1000).ScoreDocs; + assertEquals(1, hits.Length); } private class AnalyzerReturningNull : Analyzer @@ -340,7 +336,7 @@ public AnalyzerReturningNull() : base(PER_FIELD_REUSE_STRATEGY) { } - protected internal override System.IO.TextReader InitReader(string fieldName, TextReader reader) + protected internal override TextReader InitReader(string fieldName, TextReader reader) { if ("f1".Equals(fieldName, StringComparison.Ordinal)) { @@ -359,6 +355,23 @@ protected internal override TokenStreamComponents CreateComponents(string fieldN { return stdAnalyzer.CreateComponents(fieldName, reader); } + + // LUCENENET specific + protected override void Dispose(bool disposing) + { + try + { + if (disposing) + { + stdAnalyzer?.Dispose(); // LUCENENET specific - dispose stdAnalyzer and set to null + stdAnalyzer = null; + } + } + finally + { + base.Dispose(disposing); + } + } } [Test] diff --git a/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestMultiFieldQPHelper.cs b/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestMultiFieldQPHelper.cs index a70f03d650..eeee49b301 100644 --- a/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestMultiFieldQPHelper.cs +++ b/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestMultiFieldQPHelper.cs @@ -427,6 +427,23 @@ protected internal override TokenStreamComponents CreateComponents(String fieldN { return stdAnalyzer.CreateComponents(fieldName, reader); } + + // LUCENENET specific + protected override void Dispose(bool disposing) + { + try + { + if (disposing) + { + stdAnalyzer?.Dispose(); // LUCENENET specific - dispose stdAnalyzer and set to null + stdAnalyzer = null; + } + } + finally + { + base.Dispose(disposing); + } + } } } } diff --git a/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestNumericQueryParser.cs b/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestNumericQueryParser.cs index 090857ba07..db78f57712 100644 --- a/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestNumericQueryParser.cs +++ b/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestNumericQueryParser.cs @@ -71,9 +71,8 @@ public enum NumberType private static bool checkDateFormatSanity(/*DateFormat*/string dateFormat, long date) { - DateTime result; return DateTime.TryParseExact(new DateTime(NumberDateFormat.EPOCH).AddMilliseconds(date).ToString(dateFormat), - dateFormat, CultureInfo.CurrentCulture, DateTimeStyles.RoundtripKind, out result); + dateFormat, CultureInfo.CurrentCulture, DateTimeStyles.RoundtripKind, out DateTime _); } [OneTimeSetUp] diff --git a/src/Lucene.Net.Tests.QueryParser/Surround/Query/BooleanQueryTst.cs b/src/Lucene.Net.Tests.QueryParser/Surround/Query/BooleanQueryTst.cs index 6e6042e68e..a8bd471536 100644 --- a/src/Lucene.Net.Tests.QueryParser/Surround/Query/BooleanQueryTst.cs +++ b/src/Lucene.Net.Tests.QueryParser/Surround/Query/BooleanQueryTst.cs @@ -1,7 +1,6 @@ using Lucene.Net.Index; using Lucene.Net.Search; -using NUnit.Framework; -using System; +using System.Diagnostics.CodeAnalysis; using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; @@ -51,6 +50,7 @@ public BooleanQueryTst( public virtual string QueryText => this.queryText; + [SuppressMessage("Performance", "CA1819:Properties should not return arrays", Justification = "Lucene's design requires some array properties")] public virtual int[] ExpectedDocNrs => this.expectedDocNrs; internal class TestCollector : ICollector diff --git a/src/Lucene.Net.Tests.QueryParser/Surround/Query/SingleFieldTestDb.cs b/src/Lucene.Net.Tests.QueryParser/Surround/Query/SingleFieldTestDb.cs index bd1b9259fd..2682eeb9e4 100644 --- a/src/Lucene.Net.Tests.QueryParser/Surround/Query/SingleFieldTestDb.cs +++ b/src/Lucene.Net.Tests.QueryParser/Surround/Query/SingleFieldTestDb.cs @@ -4,6 +4,7 @@ using Lucene.Net.Store; using Lucene.Net.Util; using System; +using System.Diagnostics.CodeAnalysis; namespace Lucene.Net.QueryParsers.Surround.Query { @@ -35,22 +36,21 @@ public SingleFieldTestDb(Random random, string[] documents, string fName) db = new MockDirectoryWrapper(random, new RAMDirectory()); docs = documents; fieldName = fName; - using (IndexWriter writer = new IndexWriter(db, new IndexWriterConfig( + using IndexWriter writer = new IndexWriter(db, new IndexWriterConfig( #pragma warning disable 612, 618 LuceneVersion.LUCENE_CURRENT, #pragma warning restore 612, 618 - new MockAnalyzer(random)))) + new MockAnalyzer(random))); + for (int j = 0; j < docs.Length; j++) { - for (int j = 0; j < docs.Length; j++) - { - Document d = new Document(); - d.Add(new TextField(fieldName, docs[j], Field.Store.NO)); - writer.AddDocument(d); - } + Document d = new Document(); + d.Add(new TextField(fieldName, docs[j], Field.Store.NO)); + writer.AddDocument(d); } } public Directory Db => db; + [SuppressMessage("Performance", "CA1819:Properties should not return arrays", Justification = "Lucene's design requires some array properties")] public string[] Docs => docs; public string Fieldname => fieldName; } diff --git a/src/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs b/src/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs index 67d8d10410..af1fa509bc 100644 --- a/src/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs +++ b/src/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs @@ -1191,23 +1191,17 @@ public override void TearDown() [Test] public virtual void TestPositionIncrements() { - using (Directory dir = NewDirectory()) - { - Analyzer a = new MockAnalyzer(Random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET); - using (IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, a))) - { - Document doc = new Document(); - doc.Add(NewTextField("field", "the wizard of ozzy", Field.Store.NO)); - w.AddDocument(doc); - using (IndexReader r = DirectoryReader.Open(w, true)) - { - IndexSearcher s = NewSearcher(r); - - Query q = GetQuery("\"wizard of ozzy\"", a); - assertEquals(1, s.Search(q, 1).TotalHits); - } - } - } + using Directory dir = NewDirectory(); + Analyzer a = new MockAnalyzer(Random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET); + using IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, a)); + Document doc = new Document(); + doc.Add(NewTextField("field", "the wizard of ozzy", Field.Store.NO)); + w.AddDocument(doc); + using IndexReader r = DirectoryReader.Open(w, true); + IndexSearcher s = NewSearcher(r); + + Query q = GetQuery("\"wizard of ozzy\"", a); + assertEquals(1, s.Search(q, 1).TotalHits); } /// diff --git a/src/Lucene.Net.Tests.QueryParser/Xml/TestParser.cs b/src/Lucene.Net.Tests.QueryParser/Xml/TestParser.cs index 734f34cc4b..c759ddc003 100644 --- a/src/Lucene.Net.Tests.QueryParser/Xml/TestParser.cs +++ b/src/Lucene.Net.Tests.QueryParser/Xml/TestParser.cs @@ -248,11 +248,9 @@ public void TestNumericRangeQueryQueryXML() private Query Parse(String xmlFileName) { - using (Stream xmlStream = typeof(TestParser).getResourceAsStream(xmlFileName)) - { - Query result = builder.Parse(xmlStream); - return result; - } + using Stream xmlStream = typeof(TestParser).getResourceAsStream(xmlFileName); + Query result = builder.Parse(xmlStream); + return result; } private void DumpResults(String qType, Query q, int numDocs) diff --git a/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs b/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs index 91b1d8e6d6..0adf72efee 100644 --- a/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs +++ b/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs @@ -44,8 +44,10 @@ private class IndexAndTaxonomyReadyCallback : IDisposable { private readonly Directory indexDir; private readonly Directory taxoDir; +#pragma warning disable CA2213 // Disposable fields should be disposed private DirectoryReader indexReader; private DirectoryTaxonomyReader taxoReader; +#pragma warning restore CA2213 // Disposable fields should be disposed private readonly FacetsConfig config; private long lastIndexGeneration = -1; diff --git a/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyRevisionTest.cs b/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyRevisionTest.cs index c27b1265f6..20d978cc36 100644 --- a/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyRevisionTest.cs +++ b/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyRevisionTest.cs @@ -151,28 +151,26 @@ public void TestOpen() Directory dir = source.Equals(IndexAndTaxonomyRevision.INDEX_SOURCE, StringComparison.Ordinal) ? indexDir : taxoDir; foreach (RevisionFile file in e.Value) { - using (IndexInput src = dir.OpenInput(file.FileName, IOContext.READ_ONCE)) - using (System.IO.Stream @in = rev.Open(source, file.FileName)) + using IndexInput src = dir.OpenInput(file.FileName, IOContext.READ_ONCE); + using Stream @in = rev.Open(source, file.FileName); + assertEquals(src.Length, @in.Length); + byte[] srcBytes = new byte[(int)src.Length]; + byte[] inBytes = new byte[(int)src.Length]; + int offset = 0; + if (Random.nextBoolean()) { - assertEquals(src.Length, @in.Length); - byte[] srcBytes = new byte[(int)src.Length]; - byte[] inBytes = new byte[(int)src.Length]; - int offset = 0; - if (Random.nextBoolean()) + int skip = Random.Next(10); + if (skip >= src.Length) { - int skip = Random.Next(10); - if (skip >= src.Length) - { - skip = 0; - } - @in.Seek(skip, SeekOrigin.Current); - src.Seek(skip); - offset = skip; + skip = 0; } - src.ReadBytes(srcBytes, offset, srcBytes.Length - offset); - @in.Read(inBytes, offset, inBytes.Length - offset); - assertArrayEquals(srcBytes, inBytes); + @in.Seek(skip, SeekOrigin.Current); + src.Seek(skip); + offset = skip; } + src.ReadBytes(srcBytes, offset, srcBytes.Length - offset); + @in.Read(inBytes, offset, inBytes.Length - offset); + assertArrayEquals(srcBytes, inBytes); } } } diff --git a/src/Lucene.Net.Tests.Replicator/IndexReplicationClientTest.cs b/src/Lucene.Net.Tests.Replicator/IndexReplicationClientTest.cs index 03bb4287d3..6b4624ce92 100644 --- a/src/Lucene.Net.Tests.Replicator/IndexReplicationClientTest.cs +++ b/src/Lucene.Net.Tests.Replicator/IndexReplicationClientTest.cs @@ -39,7 +39,9 @@ public class IndexReplicationClientTest : ReplicatorTestCase private class IndexReadyCallback : IDisposable { private readonly Directory indexDir; +#pragma warning disable CA2213 // Disposable fields should be disposed private DirectoryReader reader; +#pragma warning restore CA2213 // Disposable fields should be disposed private long lastGeneration = -1; public IndexReadyCallback(Directory indexDir) diff --git a/src/Lucene.Net.Tests.Suggest/Spell/TestSpellChecker.cs b/src/Lucene.Net.Tests.Suggest/Spell/TestSpellChecker.cs index 92c323d647..20492dee1e 100644 --- a/src/Lucene.Net.Tests.Suggest/Spell/TestSpellChecker.cs +++ b/src/Lucene.Net.Tests.Suggest/Spell/TestSpellChecker.cs @@ -101,162 +101,147 @@ public override void TearDown() [Test] public void TestBuild() { - using (IndexReader r = DirectoryReader.Open(userindex)) - { - spellChecker.ClearIndex(); + using IndexReader r = DirectoryReader.Open(userindex); + spellChecker.ClearIndex(); - Addwords(r, spellChecker, "field1"); - int num_field1 = this.NumDoc(); + Addwords(r, spellChecker, "field1"); + int num_field1 = this.NumDoc(); - Addwords(r, spellChecker, "field2"); - int num_field2 = this.NumDoc(); + Addwords(r, spellChecker, "field2"); + int num_field2 = this.NumDoc(); - assertEquals(num_field2, num_field1 + 1); + assertEquals(num_field2, num_field1 + 1); - AssertLastSearcherOpen(4); + AssertLastSearcherOpen(4); - CheckCommonSuggestions(r); - CheckLevenshteinSuggestions(r); + CheckCommonSuggestions(r); + CheckLevenshteinSuggestions(r); - spellChecker.StringDistance = (new JaroWinklerDistance()); - spellChecker.Accuracy = (0.8f); - CheckCommonSuggestions(r); - CheckJaroWinklerSuggestions(); - // the accuracy is set to 0.8 by default, but the best result has a score of 0.925 - string[] similar = spellChecker.SuggestSimilar("fvie", 2, 0.93f); - assertTrue(similar.Length == 0); - similar = spellChecker.SuggestSimilar("fvie", 2, 0.92f); - assertTrue(similar.Length == 1); - - similar = spellChecker.SuggestSimilar("fiv", 2); - assertTrue(similar.Length > 0); - assertEquals(similar[0], "five"); - - spellChecker.StringDistance = (new NGramDistance(2)); - spellChecker.Accuracy = (0.5f); - CheckCommonSuggestions(r); - CheckNGramSuggestions(); + spellChecker.StringDistance = (new JaroWinklerDistance()); + spellChecker.Accuracy = (0.8f); + CheckCommonSuggestions(r); + CheckJaroWinklerSuggestions(); + // the accuracy is set to 0.8 by default, but the best result has a score of 0.925 + string[] similar = spellChecker.SuggestSimilar("fvie", 2, 0.93f); + assertTrue(similar.Length == 0); + similar = spellChecker.SuggestSimilar("fvie", 2, 0.92f); + assertTrue(similar.Length == 1); - } + similar = spellChecker.SuggestSimilar("fiv", 2); + assertTrue(similar.Length > 0); + assertEquals(similar[0], "five"); + + spellChecker.StringDistance = (new NGramDistance(2)); + spellChecker.Accuracy = (0.5f); + CheckCommonSuggestions(r); + CheckNGramSuggestions(); } [Test] public void TestComparer() { - using (Directory compIdx = NewDirectory()) + using Directory compIdx = NewDirectory(); + SpellChecker compareSP = new SpellCheckerMock(compIdx, new LevensteinDistance(), new SuggestWordFrequencyComparer()); + try { - SpellChecker compareSP = new SpellCheckerMock(compIdx, new LevensteinDistance(), new SuggestWordFrequencyComparer()); - try - { - using (IndexReader r = DirectoryReader.Open(userindex)) - { + using IndexReader r = DirectoryReader.Open(userindex); + Addwords(r, compareSP, "field3"); - Addwords(r, compareSP, "field3"); - - string[] similar = compareSP.SuggestSimilar("fvie", 2, r, "field3", - SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX); - assertTrue(similar.Length == 2); - //five and fvei have the same score, but different frequencies. - assertEquals("fvei", similar[0]); - assertEquals("five", similar[1]); - } - } - finally - { - if (!compareSP.IsDisposed) - compareSP.Dispose(); - } + string[] similar = compareSP.SuggestSimilar("fvie", 2, r, "field3", + SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX); + assertTrue(similar.Length == 2); + //five and fvei have the same score, but different frequencies. + assertEquals("fvei", similar[0]); + assertEquals("five", similar[1]); + } + finally + { + if (!compareSP.IsDisposed) + compareSP.Dispose(); } } [Test] public void TestBogusField() { - using (Directory compIdx = NewDirectory()) + using Directory compIdx = NewDirectory(); + SpellChecker compareSP = new SpellCheckerMock(compIdx, new LevensteinDistance(), new SuggestWordFrequencyComparer()); + try { - SpellChecker compareSP = new SpellCheckerMock(compIdx, new LevensteinDistance(), new SuggestWordFrequencyComparer()); - try - { - using (IndexReader r = DirectoryReader.Open(userindex)) - { - - Addwords(r, compareSP, "field3"); + using IndexReader r = DirectoryReader.Open(userindex); + Addwords(r, compareSP, "field3"); - string[] similar = compareSP.SuggestSimilar("fvie", 2, r, - "bogusFieldBogusField", SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX); - assertEquals(0, similar.Length); - } - } - finally - { - if (!compareSP.IsDisposed) - compareSP.Dispose(); - } + string[] similar = compareSP.SuggestSimilar("fvie", 2, r, + "bogusFieldBogusField", SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX); + assertEquals(0, similar.Length); + } + finally + { + if (!compareSP.IsDisposed) + compareSP.Dispose(); } } [Test] public void TestSuggestModes() { - using (IndexReader r = DirectoryReader.Open(userindex)) - { - spellChecker.ClearIndex(); - Addwords(r, spellChecker, "field1"); + using IndexReader r = DirectoryReader.Open(userindex); + spellChecker.ClearIndex(); + Addwords(r, spellChecker, "field1"); - { - string[] similar = spellChecker.SuggestSimilar("eighty", 2, r, "field1", - SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX); - assertEquals(1, similar.Length); - assertEquals("eighty", similar[0]); - } + { + string[] similar = spellChecker.SuggestSimilar("eighty", 2, r, "field1", + SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX); + assertEquals(1, similar.Length); + assertEquals("eighty", similar[0]); + } - { - string[] similar = spellChecker.SuggestSimilar("eight", 2, r, "field1", - SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX); - assertEquals(1, similar.Length); - assertEquals("eight", similar[0]); - } + { + string[] similar = spellChecker.SuggestSimilar("eight", 2, r, "field1", + SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX); + assertEquals(1, similar.Length); + assertEquals("eight", similar[0]); + } - { - string[] similar = spellChecker.SuggestSimilar("eighty", 5, r, "field1", - SuggestMode.SUGGEST_MORE_POPULAR); - assertEquals(5, similar.Length); - assertEquals("eight", similar[0]); - } + { + string[] similar = spellChecker.SuggestSimilar("eighty", 5, r, "field1", + SuggestMode.SUGGEST_MORE_POPULAR); + assertEquals(5, similar.Length); + assertEquals("eight", similar[0]); + } - { - string[] similar = spellChecker.SuggestSimilar("twenty", 5, r, "field1", - SuggestMode.SUGGEST_MORE_POPULAR); - assertEquals(1, similar.Length); - assertEquals("twenty-one", similar[0]); - } + { + string[] similar = spellChecker.SuggestSimilar("twenty", 5, r, "field1", + SuggestMode.SUGGEST_MORE_POPULAR); + assertEquals(1, similar.Length); + assertEquals("twenty-one", similar[0]); + } - { - string[] similar = spellChecker.SuggestSimilar("eight", 5, r, "field1", - SuggestMode.SUGGEST_MORE_POPULAR); - assertEquals(0, similar.Length); - } + { + string[] similar = spellChecker.SuggestSimilar("eight", 5, r, "field1", + SuggestMode.SUGGEST_MORE_POPULAR); + assertEquals(0, similar.Length); + } - { - string[] similar = spellChecker.SuggestSimilar("eighty", 5, r, "field1", - SuggestMode.SUGGEST_ALWAYS); - assertEquals(5, similar.Length); - assertEquals("eight", similar[0]); - } + { + string[] similar = spellChecker.SuggestSimilar("eighty", 5, r, "field1", + SuggestMode.SUGGEST_ALWAYS); + assertEquals(5, similar.Length); + assertEquals("eight", similar[0]); + } - { - string[] similar = spellChecker.SuggestSimilar("eight", 5, r, "field1", - SuggestMode.SUGGEST_ALWAYS); - assertEquals(5, similar.Length); - assertEquals("eighty", similar[0]); - } + { + string[] similar = spellChecker.SuggestSimilar("eight", 5, r, "field1", + SuggestMode.SUGGEST_ALWAYS); + assertEquals(5, similar.Length); + assertEquals("eighty", similar[0]); } } private void CheckCommonSuggestions(IndexReader r) @@ -391,74 +376,72 @@ private int NumDoc() [Test] public void TestClose() { - using (IndexReader r = DirectoryReader.Open(userindex)) + using IndexReader r = DirectoryReader.Open(userindex); + spellChecker.ClearIndex(); + string field = "field1"; + Addwords(r, spellChecker, "field1"); + int num_field1 = this.NumDoc(); + Addwords(r, spellChecker, "field2"); + int num_field2 = this.NumDoc(); + assertEquals(num_field2, num_field1 + 1); + CheckCommonSuggestions(r); + AssertLastSearcherOpen(4); + spellChecker.Dispose(); + AssertSearchersClosed(); + // LUCENENET NOTE: Per MSDN, calling Dispose() multiple times + // should be a safe operation. http://stackoverflow.com/a/5306896/181087 + // Certainly, there shouldn't be a problem with calling Dispose() within + // a using block if you decide to free up resources early. + //try + //{ + // spellChecker.Dispose(); + // fail("spellchecker was already closed"); + //} + //catch (ObjectDisposedException e) + //{ + // // expected + //} + try { - spellChecker.ClearIndex(); - string field = "field1"; - Addwords(r, spellChecker, "field1"); - int num_field1 = this.NumDoc(); - Addwords(r, spellChecker, "field2"); - int num_field2 = this.NumDoc(); - assertEquals(num_field2, num_field1 + 1); CheckCommonSuggestions(r); - AssertLastSearcherOpen(4); - spellChecker.Dispose(); - AssertSearchersClosed(); - // LUCENENET NOTE: Per MSDN, calling Dispose() multiple times - // should be a safe operation. http://stackoverflow.com/a/5306896/181087 - // Certainly, there shouldn't be a problem with calling Dispose() within - // a using block if you decide to free up resources early. - //try - //{ - // spellChecker.Dispose(); - // fail("spellchecker was already closed"); - //} - //catch (ObjectDisposedException e) - //{ - // // expected - //} - try - { - CheckCommonSuggestions(r); - fail("spellchecker was already closed"); - } - catch (ObjectDisposedException /*e*/) - { - // expected - } + fail("spellchecker was already closed"); + } + catch (ObjectDisposedException /*e*/) + { + // expected + } - try - { - spellChecker.ClearIndex(); - fail("spellchecker was already closed"); - } - catch (ObjectDisposedException /*e*/) - { - // expected - } + try + { + spellChecker.ClearIndex(); + fail("spellchecker was already closed"); + } + catch (ObjectDisposedException /*e*/) + { + // expected + } - try - { - spellChecker.IndexDictionary(new LuceneDictionary(r, field), NewIndexWriterConfig(TEST_VERSION_CURRENT, null), false); - fail("spellchecker was already closed"); - } - catch (ObjectDisposedException /*e*/) - { - // expected - } + try + { + spellChecker.IndexDictionary(new LuceneDictionary(r, field), NewIndexWriterConfig(TEST_VERSION_CURRENT, null), false); + fail("spellchecker was already closed"); + } + catch (ObjectDisposedException /*e*/) + { + // expected + } - try - { - spellChecker.SetSpellIndex(spellindex); - fail("spellchecker was already closed"); - } - catch (ObjectDisposedException /*e*/) - { - // expected - } - assertEquals(4, searchers.Count); - AssertSearchersClosed(); + try + { + spellChecker.SetSpellIndex(spellindex); + fail("spellchecker was already closed"); } + catch (ObjectDisposedException /*e*/) + { + // expected + } + assertEquals(4, searchers.Count); + AssertSearchersClosed(); } /* @@ -469,58 +452,56 @@ public void TestClose() public void TestConcurrentAccess() { assertEquals(1, searchers.Count); - using (IndexReader r = DirectoryReader.Open(userindex)) + using IndexReader r = DirectoryReader.Open(userindex); + spellChecker.ClearIndex(); + assertEquals(2, searchers.Count); + Addwords(r, spellChecker, "field1"); + assertEquals(3, searchers.Count); + int num_field1 = this.NumDoc(); + Addwords(r, spellChecker, "field2"); + assertEquals(4, searchers.Count); + int num_field2 = this.NumDoc(); + assertEquals(num_field2, num_field1 + 1); + int numThreads = 5 + Random.nextInt(5); + SpellCheckWorker[] workers = new SpellCheckWorker[numThreads]; + var stop = new AtomicBoolean(false); + for (int i = 0; i < numThreads; i++) { - spellChecker.ClearIndex(); - assertEquals(2, searchers.Count); - Addwords(r, spellChecker, "field1"); - assertEquals(3, searchers.Count); - int num_field1 = this.NumDoc(); - Addwords(r, spellChecker, "field2"); - assertEquals(4, searchers.Count); - int num_field2 = this.NumDoc(); - assertEquals(num_field2, num_field1 + 1); - int numThreads = 5 + Random.nextInt(5); - SpellCheckWorker[] workers = new SpellCheckWorker[numThreads]; - var stop = new AtomicBoolean(false); - for (int i = 0; i < numThreads; i++) - { - SpellCheckWorker spellCheckWorker = new SpellCheckWorker(this, r, stop); - workers[i] = spellCheckWorker; - spellCheckWorker.Start(); - } - int iterations = 5 + Random.nextInt(5); - for (int i = 0; i < iterations; i++) - { - Thread.Sleep(100); - // concurrently reset the spell index - spellChecker.SetSpellIndex(this.spellindex); - // for debug - prints the internal open searchers - // showSearchersOpen(); - } + SpellCheckWorker spellCheckWorker = new SpellCheckWorker(this, r, stop); + workers[i] = spellCheckWorker; + spellCheckWorker.Start(); + } + int iterations = 5 + Random.nextInt(5); + for (int i = 0; i < iterations; i++) + { + Thread.Sleep(100); + // concurrently reset the spell index + spellChecker.SetSpellIndex(this.spellindex); + // for debug - prints the internal open searchers + // showSearchersOpen(); + } - spellChecker.Dispose(); - stop.Value = true; + spellChecker.Dispose(); + stop.Value = true; - // wait for 60 seconds - usually this is very fast but coverage runs could take quite long - //executor.awaitTermination(60L, TimeUnit.SECONDS); - foreach (SpellCheckWorker worker in workers) - { - worker.Join((long)TimeSpan.FromSeconds(60).TotalMilliseconds); - } + // wait for 60 seconds - usually this is very fast but coverage runs could take quite long + //executor.awaitTermination(60L, TimeUnit.SECONDS); + foreach (SpellCheckWorker worker in workers) + { + worker.Join((long)TimeSpan.FromSeconds(60).TotalMilliseconds); + } - for (int i = 0; i < workers.Length; i++) - { - assertFalse(string.Format(CultureInfo.InvariantCulture, "worker thread {0} failed \n" + workers[i].Error, i), workers[i].Error != null); - assertTrue(string.Format(CultureInfo.InvariantCulture, "worker thread {0} is still running but should be terminated", i), workers[i].terminated); - } - // 4 searchers more than iterations - // 1. at creation - // 2. clearIndex() - // 2. and 3. during addwords - assertEquals(iterations + 4, searchers.Count); - AssertSearchersClosed(); + for (int i = 0; i < workers.Length; i++) + { + assertFalse(string.Format(CultureInfo.InvariantCulture, "worker thread {0} failed \n" + workers[i].Error, i), workers[i].Error != null); + assertTrue(string.Format(CultureInfo.InvariantCulture, "worker thread {0} is still running but should be terminated", i), workers[i].terminated); } + // 4 searchers more than iterations + // 1. at creation + // 2. clearIndex() + // 2. and 3. during addwords + assertEquals(iterations + 4, searchers.Count); + AssertSearchersClosed(); } private void AssertLastSearcherOpen(int numSearchers) diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingInfixSuggesterTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingInfixSuggesterTest.cs index 7bc2f14906..4d76f77e9a 100644 --- a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingInfixSuggesterTest.cs +++ b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingInfixSuggesterTest.cs @@ -50,39 +50,36 @@ public void TestBasic() }; Analyzer a = new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false); - using (AnalyzingInfixSuggester suggester = new AnalyzingInfixSuggester(TEST_VERSION_CURRENT, NewDirectory(), a, a, 3)) - { - suggester.Build(new InputArrayEnumerator(keys)); - - IList results = suggester.DoLookup(TestUtil.StringToCharSequence("ear", Random).ToString(), 10, true, true); - assertEquals(2, results.size()); - assertEquals("a penny saved is a penny earned", results[0].Key); - assertEquals(10, results[0].Value); - assertEquals(new BytesRef("foobaz"), results[0].Payload); - - assertEquals("lend me your ear", results[1].Key); - assertEquals(8, results[1].Value); - assertEquals(new BytesRef("foobar"), results[1].Payload); - - results = suggester.DoLookup(TestUtil.StringToCharSequence("ear ", Random).ToString(), 10, true, true); - assertEquals(1, results.size()); - assertEquals("lend me your ear", results[0].Key); - assertEquals(8, results[0].Value); - assertEquals(new BytesRef("foobar"), results[0].Payload); - - results = suggester.DoLookup(TestUtil.StringToCharSequence("pen", Random).ToString(), 10, true, true); - assertEquals(1, results.size()); - assertEquals("a penny saved is a penny earned", results[0].Key); - assertEquals(10, results[0].Value); - assertEquals(new BytesRef("foobaz"), results[0].Payload); - - results = suggester.DoLookup(TestUtil.StringToCharSequence("p", Random).ToString(), 10, true, true); - assertEquals(1, results.size()); - assertEquals("a penny saved is a penny earned", results[0].Key); - assertEquals(10, results[0].Value); - assertEquals(new BytesRef("foobaz"), results[0].Payload); - - } + using AnalyzingInfixSuggester suggester = new AnalyzingInfixSuggester(TEST_VERSION_CURRENT, NewDirectory(), a, a, 3); + suggester.Build(new InputArrayEnumerator(keys)); + + IList results = suggester.DoLookup(TestUtil.StringToCharSequence("ear", Random).ToString(), 10, true, true); + assertEquals(2, results.size()); + assertEquals("a penny saved is a penny earned", results[0].Key); + assertEquals(10, results[0].Value); + assertEquals(new BytesRef("foobaz"), results[0].Payload); + + assertEquals("lend me your ear", results[1].Key); + assertEquals(8, results[1].Value); + assertEquals(new BytesRef("foobar"), results[1].Payload); + + results = suggester.DoLookup(TestUtil.StringToCharSequence("ear ", Random).ToString(), 10, true, true); + assertEquals(1, results.size()); + assertEquals("lend me your ear", results[0].Key); + assertEquals(8, results[0].Value); + assertEquals(new BytesRef("foobar"), results[0].Payload); + + results = suggester.DoLookup(TestUtil.StringToCharSequence("pen", Random).ToString(), 10, true, true); + assertEquals(1, results.size()); + assertEquals("a penny saved is a penny earned", results[0].Key); + assertEquals(10, results[0].Value); + assertEquals(new BytesRef("foobaz"), results[0].Payload); + + results = suggester.DoLookup(TestUtil.StringToCharSequence("p", Random).ToString(), 10, true, true); + assertEquals(1, results.size()); + assertEquals("a penny saved is a penny earned", results[0].Key); + assertEquals(10, results[0].Value); + assertEquals(new BytesRef("foobaz"), results[0].Payload); } [Test] @@ -215,17 +212,14 @@ public void TestHighlightAsObject() }; Analyzer a = new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false); - using (AnalyzingInfixSuggester suggester = new TestHighlightAnalyzingInfixSuggester(this, a)) - { - - suggester.Build(new InputArrayEnumerator(keys)); - - IList results = suggester.DoLookup(TestUtil.StringToCharSequence("ear", Random).ToString(), 10, true, true); - assertEquals(1, results.size()); - assertEquals("a penny saved is a penny earned", ToString((List)results[0].HighlightKey)); - assertEquals(10, results[0].Value); - assertEquals(new BytesRef("foobaz"), results[0].Payload); - } + using AnalyzingInfixSuggester suggester = new TestHighlightAnalyzingInfixSuggester(this, a); + suggester.Build(new InputArrayEnumerator(keys)); + + IList results = suggester.DoLookup(TestUtil.StringToCharSequence("ear", Random).ToString(), 10, true, true); + assertEquals(1, results.size()); + assertEquals("a penny saved is a penny earned", ToString((List)results[0].HighlightKey)); + assertEquals(10, results[0].Value); + assertEquals(new BytesRef("foobaz"), results[0].Payload); } private string ToString(IEnumerable fragments) @@ -352,13 +346,11 @@ public void TestHighlight() }; Analyzer a = new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false); - using (AnalyzingInfixSuggester suggester = new AnalyzingInfixSuggester(TEST_VERSION_CURRENT, NewDirectory(), a, a, 3)) - { - suggester.Build(new InputArrayEnumerator(keys)); - IList results = suggester.DoLookup(TestUtil.StringToCharSequence("penn", Random).ToString(), 10, true, true); - assertEquals(1, results.size()); - assertEquals("a penny saved is a penny earned", results[0].Key); - } + using AnalyzingInfixSuggester suggester = new AnalyzingInfixSuggester(TEST_VERSION_CURRENT, NewDirectory(), a, a, 3); + suggester.Build(new InputArrayEnumerator(keys)); + IList results = suggester.DoLookup(TestUtil.StringToCharSequence("penn", Random).ToString(), 10, true, true); + assertEquals(1, results.size()); + assertEquals("a penny saved is a penny earned", results[0].Key); } internal class TestHighlightChangeCaseAnalyzingInfixSuggester : AnalyzingInfixSuggester @@ -414,11 +406,9 @@ public void TestDoubleClose() }; Analyzer a = new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false); - using (AnalyzingInfixSuggester suggester = new AnalyzingInfixSuggester(TEST_VERSION_CURRENT, NewDirectory(), a, a, 3)) - { - suggester.Build(new InputArrayEnumerator(keys)); - suggester.Dispose(); - } + using AnalyzingInfixSuggester suggester = new AnalyzingInfixSuggester(TEST_VERSION_CURRENT, NewDirectory(), a, a, 3); + suggester.Build(new InputArrayEnumerator(keys)); + suggester.Dispose(); } [Test] @@ -438,77 +428,69 @@ public void TestSuggestStopFilter() new SuggestStopFilter(tokens, stopWords)); }); - using (AnalyzingInfixSuggester suggester = new AnalyzingInfixSuggester(TEST_VERSION_CURRENT, NewDirectory(), indexAnalyzer, queryAnalyzer, 3)) - { - - Input[] keys = new Input[] { + using AnalyzingInfixSuggester suggester = new AnalyzingInfixSuggester(TEST_VERSION_CURRENT, NewDirectory(), indexAnalyzer, queryAnalyzer, 3); + Input[] keys = new Input[] { new Input("a bob for apples", 10, new BytesRef("foobaz")), }; - suggester.Build(new InputArrayEnumerator(keys)); - IList results = suggester.DoLookup(TestUtil.StringToCharSequence("a", Random).ToString(), 10, true, true); - assertEquals(1, results.size()); - assertEquals("a bob for apples", results[0].Key); - } + suggester.Build(new InputArrayEnumerator(keys)); + IList results = suggester.DoLookup(TestUtil.StringToCharSequence("a", Random).ToString(), 10, true, true); + assertEquals(1, results.size()); + assertEquals("a bob for apples", results[0].Key); } [Test] public void TestEmptyAtStart() { Analyzer a = new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false); - using (AnalyzingInfixSuggester suggester = new AnalyzingInfixSuggester(TEST_VERSION_CURRENT, NewDirectory(), a, a, 3)) - { - suggester.Build(new InputArrayEnumerator(new Input[0])); - suggester.Add(new BytesRef("a penny saved is a penny earned"), null, 10, new BytesRef("foobaz")); - suggester.Add(new BytesRef("lend me your ear"), null, 8, new BytesRef("foobar")); - suggester.Refresh(); - IList results = suggester.DoLookup(TestUtil.StringToCharSequence("ear", Random).ToString(), 10, true, true); - assertEquals(2, results.size()); - assertEquals("a penny saved is a penny earned", results[0].Key); - assertEquals(10, results[0].Value); - assertEquals(new BytesRef("foobaz"), results[0].Payload); - - assertEquals("lend me your ear", results[1].Key); - assertEquals(8, results[1].Value); - assertEquals(new BytesRef("foobar"), results[1].Payload); - - results = suggester.DoLookup(TestUtil.StringToCharSequence("ear ", Random).ToString(), 10, true, true); - assertEquals(1, results.size()); - assertEquals("lend me your ear", results[0].Key); - assertEquals(8, results[0].Value); - assertEquals(new BytesRef("foobar"), results[0].Payload); - - results = suggester.DoLookup(TestUtil.StringToCharSequence("pen", Random).ToString(), 10, true, true); - assertEquals(1, results.size()); - assertEquals("a penny saved is a penny earned", results[0].Key); - assertEquals(10, results[0].Value); - assertEquals(new BytesRef("foobaz"), results[0].Payload); - - results = suggester.DoLookup(TestUtil.StringToCharSequence("p", Random).ToString(), 10, true, true); - assertEquals(1, results.size()); - assertEquals("a penny saved is a penny earned", results[0].Key); - assertEquals(10, results[0].Value); - assertEquals(new BytesRef("foobaz"), results[0].Payload); - - } + using AnalyzingInfixSuggester suggester = new AnalyzingInfixSuggester(TEST_VERSION_CURRENT, NewDirectory(), a, a, 3); + suggester.Build(new InputArrayEnumerator(new Input[0])); + suggester.Add(new BytesRef("a penny saved is a penny earned"), null, 10, new BytesRef("foobaz")); + suggester.Add(new BytesRef("lend me your ear"), null, 8, new BytesRef("foobar")); + suggester.Refresh(); + IList results = suggester.DoLookup(TestUtil.StringToCharSequence("ear", Random).ToString(), 10, true, true); + assertEquals(2, results.size()); + assertEquals("a penny saved is a penny earned", results[0].Key); + assertEquals(10, results[0].Value); + assertEquals(new BytesRef("foobaz"), results[0].Payload); + + assertEquals("lend me your ear", results[1].Key); + assertEquals(8, results[1].Value); + assertEquals(new BytesRef("foobar"), results[1].Payload); + + results = suggester.DoLookup(TestUtil.StringToCharSequence("ear ", Random).ToString(), 10, true, true); + assertEquals(1, results.size()); + assertEquals("lend me your ear", results[0].Key); + assertEquals(8, results[0].Value); + assertEquals(new BytesRef("foobar"), results[0].Payload); + + results = suggester.DoLookup(TestUtil.StringToCharSequence("pen", Random).ToString(), 10, true, true); + assertEquals(1, results.size()); + assertEquals("a penny saved is a penny earned", results[0].Key); + assertEquals(10, results[0].Value); + assertEquals(new BytesRef("foobaz"), results[0].Payload); + + results = suggester.DoLookup(TestUtil.StringToCharSequence("p", Random).ToString(), 10, true, true); + assertEquals(1, results.size()); + assertEquals("a penny saved is a penny earned", results[0].Key); + assertEquals(10, results[0].Value); + assertEquals(new BytesRef("foobaz"), results[0].Payload); } [Test] public void TestBothExactAndPrefix() { Analyzer a = new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false); - using (AnalyzingInfixSuggester suggester = new AnalyzingInfixSuggester(TEST_VERSION_CURRENT, NewDirectory(), a, a, 3)) - { - suggester.Build(new InputArrayEnumerator(new Input[0])); - suggester.Add(new BytesRef("the pen is pretty"), null, 10, new BytesRef("foobaz")); - suggester.Refresh(); - - IList results = suggester.DoLookup(TestUtil.StringToCharSequence("pen p", Random).ToString(), 10, true, true); - assertEquals(1, results.size()); - assertEquals("the pen is pretty", results[0].Key); - assertEquals(10, results[0].Value); - assertEquals(new BytesRef("foobaz"), results[0].Payload); - } + using AnalyzingInfixSuggester suggester = new AnalyzingInfixSuggester(TEST_VERSION_CURRENT, NewDirectory(), a, a, 3); + suggester.Build(new InputArrayEnumerator(new Input[0])); + suggester.Add(new BytesRef("the pen is pretty"), null, 10, new BytesRef("foobaz")); + suggester.Refresh(); + + IList results = suggester.DoLookup(TestUtil.StringToCharSequence("pen p", Random).ToString(), 10, true, true); + assertEquals(1, results.size()); + assertEquals("the pen is pretty", results[0].Key); + assertEquals(10, results[0].Value); + assertEquals(new BytesRef("foobaz"), results[0].Payload); } private static string RandomText() @@ -935,65 +917,63 @@ public void TestBasicNRT() }; Analyzer a = new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false); - using (AnalyzingInfixSuggester suggester = new AnalyzingInfixSuggester(TEST_VERSION_CURRENT, NewDirectory(), a, a, 3)) - { - suggester.Build(new InputArrayEnumerator(keys)); - - IList results = suggester.DoLookup(TestUtil.StringToCharSequence("ear", Random).ToString(), 10, true, true); - assertEquals(1, results.size()); - assertEquals("lend me your ear", results[0].Key); - assertEquals(8, results[0].Value); - assertEquals(new BytesRef("foobar"), results[0].Payload); - - // Add a new suggestion: - suggester.Add(new BytesRef("a penny saved is a penny earned"), null, 10, new BytesRef("foobaz")); - - // Must refresh to see any newly added suggestions: - suggester.Refresh(); - - results = suggester.DoLookup(TestUtil.StringToCharSequence("ear", Random).ToString(), 10, true, true); - assertEquals(2, results.size()); - assertEquals("a penny saved is a penny earned", results[0].Key); - assertEquals(10, results[0].Value); - assertEquals(new BytesRef("foobaz"), results[0].Payload); - - assertEquals("lend me your ear", results[1].Key); - assertEquals(8, results[1].Value); - assertEquals(new BytesRef("foobar"), results[1].Payload); - - results = suggester.DoLookup(TestUtil.StringToCharSequence("ear ", Random).ToString(), 10, true, true); - assertEquals(1, results.size()); - assertEquals("lend me your ear", results[0].Key); - assertEquals(8, results[0].Value); - assertEquals(new BytesRef("foobar"), results[0].Payload); - - results = suggester.DoLookup(TestUtil.StringToCharSequence("pen", Random).ToString(), 10, true, true); - assertEquals(1, results.size()); - assertEquals("a penny saved is a penny earned", results[0].Key); - assertEquals(10, results[0].Value); - assertEquals(new BytesRef("foobaz"), results[0].Payload); - - results = suggester.DoLookup(TestUtil.StringToCharSequence("p", Random).ToString(), 10, true, true); - assertEquals(1, results.size()); - assertEquals("a penny saved is a penny earned", results[0].Key); - assertEquals(10, results[0].Value); - assertEquals(new BytesRef("foobaz"), results[0].Payload); - - // Change the weight: - suggester.Update(new BytesRef("lend me your ear"), null, 12, new BytesRef("foobox")); - - // Must refresh to see any newly added suggestions: - suggester.Refresh(); - - results = suggester.DoLookup(TestUtil.StringToCharSequence("ear", Random).ToString(), 10, true, true); - assertEquals(2, results.size()); - assertEquals("lend me your ear", results[0].Key); - assertEquals(12, results[0].Value); - assertEquals(new BytesRef("foobox"), results[0].Payload); - assertEquals("a penny saved is a penny earned", results[1].Key); - assertEquals(10, results[1].Value); - assertEquals(new BytesRef("foobaz"), results[1].Payload); - } + using AnalyzingInfixSuggester suggester = new AnalyzingInfixSuggester(TEST_VERSION_CURRENT, NewDirectory(), a, a, 3); + suggester.Build(new InputArrayEnumerator(keys)); + + IList results = suggester.DoLookup(TestUtil.StringToCharSequence("ear", Random).ToString(), 10, true, true); + assertEquals(1, results.size()); + assertEquals("lend me your ear", results[0].Key); + assertEquals(8, results[0].Value); + assertEquals(new BytesRef("foobar"), results[0].Payload); + + // Add a new suggestion: + suggester.Add(new BytesRef("a penny saved is a penny earned"), null, 10, new BytesRef("foobaz")); + + // Must refresh to see any newly added suggestions: + suggester.Refresh(); + + results = suggester.DoLookup(TestUtil.StringToCharSequence("ear", Random).ToString(), 10, true, true); + assertEquals(2, results.size()); + assertEquals("a penny saved is a penny earned", results[0].Key); + assertEquals(10, results[0].Value); + assertEquals(new BytesRef("foobaz"), results[0].Payload); + + assertEquals("lend me your ear", results[1].Key); + assertEquals(8, results[1].Value); + assertEquals(new BytesRef("foobar"), results[1].Payload); + + results = suggester.DoLookup(TestUtil.StringToCharSequence("ear ", Random).ToString(), 10, true, true); + assertEquals(1, results.size()); + assertEquals("lend me your ear", results[0].Key); + assertEquals(8, results[0].Value); + assertEquals(new BytesRef("foobar"), results[0].Payload); + + results = suggester.DoLookup(TestUtil.StringToCharSequence("pen", Random).ToString(), 10, true, true); + assertEquals(1, results.size()); + assertEquals("a penny saved is a penny earned", results[0].Key); + assertEquals(10, results[0].Value); + assertEquals(new BytesRef("foobaz"), results[0].Payload); + + results = suggester.DoLookup(TestUtil.StringToCharSequence("p", Random).ToString(), 10, true, true); + assertEquals(1, results.size()); + assertEquals("a penny saved is a penny earned", results[0].Key); + assertEquals(10, results[0].Value); + assertEquals(new BytesRef("foobaz"), results[0].Payload); + + // Change the weight: + suggester.Update(new BytesRef("lend me your ear"), null, 12, new BytesRef("foobox")); + + // Must refresh to see any newly added suggestions: + suggester.Refresh(); + + results = suggester.DoLookup(TestUtil.StringToCharSequence("ear", Random).ToString(), 10, true, true); + assertEquals(2, results.size()); + assertEquals("lend me your ear", results[0].Key); + assertEquals(12, results[0].Value); + assertEquals(new BytesRef("foobox"), results[0].Payload); + assertEquals("a penny saved is a penny earned", results[1].Key); + assertEquals(10, results[1].Value); + assertEquals(new BytesRef("foobaz"), results[1].Payload); } private ISet AsSet(params string[] values) diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs index 386da4a19a..c869b8810f 100644 --- a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs +++ b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs @@ -1183,17 +1183,11 @@ public TestDupSurfaceFormsMissingResultsTokenStreamComponents(AnalyzingSuggester this.outerInstance = outerInstance; } - public override TokenStream TokenStream - { - get - { - return new CannedTokenStream(new Token[] { - NewToken("hairy", 1, 1), - NewToken("smelly", 0, 1), - NewToken("dog", 1, 1), - }); - } - } + public override TokenStream TokenStream => new CannedTokenStream(new Token[] { + NewToken("hairy", 1, 1), + NewToken("smelly", 0, 1), + NewToken("dog", 1, 1), + }); protected internal override void SetReader(TextReader reader) { diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Fst/BytesRefSortersTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Fst/BytesRefSortersTest.cs index 8e04265480..92a277c32a 100644 --- a/src/Lucene.Net.Tests.Suggest/Suggest/Fst/BytesRefSortersTest.cs +++ b/src/Lucene.Net.Tests.Suggest/Suggest/Fst/BytesRefSortersTest.cs @@ -26,8 +26,8 @@ public class BytesRefSortersTest : LuceneTestCase [Test] public void TestExternalRefSorter() { - using (ExternalRefSorter s = new ExternalRefSorter(new OfflineSorter())) - Check(s); + using ExternalRefSorter s = new ExternalRefSorter(new OfflineSorter()); + Check(s); } [Test] diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Fst/LargeInputFST.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Fst/LargeInputFST.cs index f686a61c13..aedaa40d17 100644 --- a/src/Lucene.Net.Tests.Suggest/Suggest/Fst/LargeInputFST.cs +++ b/src/Lucene.Net.Tests.Suggest/Suggest/Fst/LargeInputFST.cs @@ -27,7 +27,7 @@ namespace Lucene.Net.Search.Suggest.Fst /// Try to build a suggester from a large data set. The input is a simple text /// file, newline-delimited. /// - public class LargeInputFST + public static class LargeInputFST // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { // LUCENENET specific - renaming from Main() because we must only have 1 entry point. // Not sure why this utility is in a test project anyway - this seems like something that should diff --git a/src/Lucene.Net.Tests.TestFramework/Analysis/TestMockAnalyzer.cs b/src/Lucene.Net.Tests.TestFramework/Analysis/TestMockAnalyzer.cs index 26629816d0..fde5e30494 100644 --- a/src/Lucene.Net.Tests.TestFramework/Analysis/TestMockAnalyzer.cs +++ b/src/Lucene.Net.Tests.TestFramework/Analysis/TestMockAnalyzer.cs @@ -293,15 +293,13 @@ public void TestForwardOffsets() StringReader reader = new StringReader(s); MockCharFilter charfilter = new MockCharFilter(reader, 2); MockAnalyzer analyzer = new MockAnalyzer(Random); - using (TokenStream ts = analyzer.GetTokenStream("bogus", charfilter)) + using TokenStream ts = analyzer.GetTokenStream("bogus", charfilter); + ts.Reset(); + while (ts.IncrementToken()) { - ts.Reset(); - while (ts.IncrementToken()) - { - ; - } - ts.End(); + ; } + ts.End(); } } diff --git a/src/Lucene.Net.Tests.TestFramework/Store/TestMockDirectoryWrapper.cs b/src/Lucene.Net.Tests.TestFramework/Store/TestMockDirectoryWrapper.cs index 53108664a7..401e970b02 100644 --- a/src/Lucene.Net.Tests.TestFramework/Store/TestMockDirectoryWrapper.cs +++ b/src/Lucene.Net.Tests.TestFramework/Store/TestMockDirectoryWrapper.cs @@ -274,16 +274,14 @@ protected override void Dispose(bool disposing) [Test] public void TestAbuseClosedIndexInput() { - using (MockDirectoryWrapper dir = NewMockDirectory()) + using MockDirectoryWrapper dir = NewMockDirectory(); + using (IndexOutput @out = dir.CreateOutput("foo", IOContext.DEFAULT)) { - using (IndexOutput @out = dir.CreateOutput("foo", IOContext.DEFAULT)) - { - @out.WriteByte((byte)42); - } // @out.close(); - IndexInput @in = dir.OpenInput("foo", IOContext.DEFAULT); - @in.Dispose(); - Assert.Throws(() => @in.ReadByte()); - } // dir.close(); + @out.WriteByte((byte)42); + } // @out.close(); + IndexInput @in = dir.OpenInput("foo", IOContext.DEFAULT); + @in.Dispose(); + Assert.Throws(() => @in.ReadByte()); } // LUCENENET: This test compiles, but is not compatible with 4.8.0 (tested in Java Lucene), as it was ported from 8.2.0 diff --git a/src/Lucene.Net.Tests/Analysis/TestNumericTokenStream.cs b/src/Lucene.Net.Tests/Analysis/TestNumericTokenStream.cs index 98ff15c220..d67fea30cb 100644 --- a/src/Lucene.Net.Tests/Analysis/TestNumericTokenStream.cs +++ b/src/Lucene.Net.Tests/Analysis/TestNumericTokenStream.cs @@ -36,26 +36,25 @@ public class TestNumericTokenStream : BaseTokenStreamTestCase [NUnit.Framework.Test] public virtual void TestLongStream() { - using (NumericTokenStream stream = (new NumericTokenStream()).SetInt64Value(lvalue)) { - // use getAttribute to test if attributes really exist, if not an IAE will be throwed - ITermToBytesRefAttribute bytesAtt = stream.GetAttribute(); - ITypeAttribute typeAtt = stream.GetAttribute(); - NumericTokenStream.INumericTermAttribute numericAtt = stream.GetAttribute(); - BytesRef bytes = bytesAtt.BytesRef; - stream.Reset(); - Assert.AreEqual(64, numericAtt.ValueSize); - for (int shift = 0; shift < 64; shift += NumericUtils.PRECISION_STEP_DEFAULT) - { - Assert.IsTrue(stream.IncrementToken(), "New token is available"); - Assert.AreEqual(shift, numericAtt.Shift, "Shift value wrong"); - bytesAtt.FillBytesRef(); - Assert.AreEqual(lvalue & ~((1L << shift) - 1L), NumericUtils.PrefixCodedToInt64(bytes), "Term is incorrectly encoded"); - Assert.AreEqual(lvalue & ~((1L << shift) - 1L), numericAtt.RawValue, "Term raw value is incorrectly encoded"); - Assert.AreEqual((shift == 0) ? NumericTokenStream.TOKEN_TYPE_FULL_PREC : NumericTokenStream.TOKEN_TYPE_LOWER_PREC, typeAtt.Type, "Type incorrect"); - } - Assert.IsFalse(stream.IncrementToken(), "More tokens available"); - stream.End(); + using NumericTokenStream stream = (new NumericTokenStream()).SetInt64Value(lvalue); + // use getAttribute to test if attributes really exist, if not an IAE will be throwed + ITermToBytesRefAttribute bytesAtt = stream.GetAttribute(); + ITypeAttribute typeAtt = stream.GetAttribute(); + NumericTokenStream.INumericTermAttribute numericAtt = stream.GetAttribute(); + BytesRef bytes = bytesAtt.BytesRef; + stream.Reset(); + Assert.AreEqual(64, numericAtt.ValueSize); + for (int shift = 0; shift < 64; shift += NumericUtils.PRECISION_STEP_DEFAULT) + { + Assert.IsTrue(stream.IncrementToken(), "New token is available"); + Assert.AreEqual(shift, numericAtt.Shift, "Shift value wrong"); + bytesAtt.FillBytesRef(); + Assert.AreEqual(lvalue & ~((1L << shift) - 1L), NumericUtils.PrefixCodedToInt64(bytes), "Term is incorrectly encoded"); + Assert.AreEqual(lvalue & ~((1L << shift) - 1L), numericAtt.RawValue, "Term raw value is incorrectly encoded"); + Assert.AreEqual((shift == 0) ? NumericTokenStream.TOKEN_TYPE_FULL_PREC : NumericTokenStream.TOKEN_TYPE_LOWER_PREC, typeAtt.Type, "Type incorrect"); } + Assert.IsFalse(stream.IncrementToken(), "More tokens available"); + stream.End(); } [NUnit.Framework.Test] diff --git a/src/Lucene.Net.Tests/Codecs/Lucene3x/TestSurrogates.cs b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestSurrogates.cs index ac2b6db094..07ac1b32d8 100644 --- a/src/Lucene.Net.Tests/Codecs/Lucene3x/TestSurrogates.cs +++ b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestSurrogates.cs @@ -199,8 +199,7 @@ private void DoTestSeekExists(Random r, IList fieldTerms, IndexReader read } // seek to it - TermsEnum te; - if (!tes.TryGetValue(field, out te)) + if (!tes.TryGetValue(field, out TermsEnum te)) { te = MultiFields.GetTerms(reader, field).GetEnumerator(); tes[field] = te; @@ -276,8 +275,7 @@ private void DoTestSeekDoesNotExist(Random r, int numField, IList fieldTer } // term does not exist: - TermsEnum te; - if (!tes.TryGetValue(field, out te)) + if (!tes.TryGetValue(field, out TermsEnum te)) { te = MultiFields.GetTerms(reader, field).GetEnumerator(); tes[field] = te; diff --git a/src/Lucene.Net.Tests/Document/TestBinaryDocument.cs b/src/Lucene.Net.Tests/Document/TestBinaryDocument.cs index e45862b4ff..c89337c1c0 100644 --- a/src/Lucene.Net.Tests/Document/TestBinaryDocument.cs +++ b/src/Lucene.Net.Tests/Document/TestBinaryDocument.cs @@ -101,31 +101,26 @@ public virtual void TestCompressionTools() var doc = new Documents.Document {binaryFldCompressed, stringFldCompressed}; - using (Directory dir = NewDirectory()) - using (RandomIndexWriter writer = new RandomIndexWriter( + using Directory dir = NewDirectory(); + using RandomIndexWriter writer = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif - Random, dir)) - { - writer.AddDocument(doc); - - using (IndexReader reader = writer.GetReader()) - { - Documents.Document docFromReader = reader.Document(0); - Assert.IsTrue(docFromReader != null); - - string binaryFldCompressedTest = - Encoding.UTF8.GetString( - CompressionTools.Decompress(docFromReader.GetBinaryValue("binaryCompressed"))); - //new string(CompressionTools.Decompress(docFromReader.GetBinaryValue("binaryCompressed")), IOUtils.CHARSET_UTF_8); - Assert.IsTrue(binaryFldCompressedTest.Equals(binaryValCompressed, StringComparison.Ordinal)); - Assert.IsTrue( - CompressionTools.DecompressString(docFromReader.GetBinaryValue("stringCompressed")) - .Equals(binaryValCompressed, StringComparison.Ordinal)); - } - - } + Random, dir); + writer.AddDocument(doc); + + using IndexReader reader = writer.GetReader(); + Document docFromReader = reader.Document(0); + Assert.IsTrue(docFromReader != null); + + string binaryFldCompressedTest = + Encoding.UTF8.GetString( + CompressionTools.Decompress(docFromReader.GetBinaryValue("binaryCompressed"))); + //new string(CompressionTools.Decompress(docFromReader.GetBinaryValue("binaryCompressed")), IOUtils.CHARSET_UTF_8); + Assert.IsTrue(binaryFldCompressedTest.Equals(binaryValCompressed, StringComparison.Ordinal)); + Assert.IsTrue( + CompressionTools.DecompressString(docFromReader.GetBinaryValue("stringCompressed")) + .Equals(binaryValCompressed, StringComparison.Ordinal)); } } } \ No newline at end of file diff --git a/src/Lucene.Net.Tests/Document/TestDateTools.cs b/src/Lucene.Net.Tests/Document/TestDateTools.cs index 3756f7a08e..ecd7257a6f 100644 --- a/src/Lucene.Net.Tests/Document/TestDateTools.cs +++ b/src/Lucene.Net.Tests/Document/TestDateTools.cs @@ -31,7 +31,7 @@ public class TestDateTools : LuceneTestCase [Test] public virtual void TestStringToDate() { - DateTime d = default(DateTime); + DateTime d = default; d = DateTools.StringToDate("2004"); Assert.AreEqual("2004-01-01 00:00:00:000", IsoFormat(d)); d = DateTools.StringToDate("20040705"); diff --git a/src/Lucene.Net.Tests/Index/TestAddIndexes.cs b/src/Lucene.Net.Tests/Index/TestAddIndexes.cs index 450a9f9c90..44111033fc 100644 --- a/src/Lucene.Net.Tests/Index/TestAddIndexes.cs +++ b/src/Lucene.Net.Tests/Index/TestAddIndexes.cs @@ -1212,42 +1212,36 @@ public virtual void TestAddIndexMissingCodec() { IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); conf.SetCodec(new UnRegisteredCodec()); - using (var w = new IndexWriter(toAdd, conf)) - { - Document doc = new Document(); - FieldType customType = new FieldType(); - customType.IsIndexed = true; - doc.Add(NewField("foo", "bar", customType)); - w.AddDocument(doc); - } + using var w = new IndexWriter(toAdd, conf); + Document doc = new Document(); + FieldType customType = new FieldType(); + customType.IsIndexed = true; + doc.Add(NewField("foo", "bar", customType)); + w.AddDocument(doc); } { - using (Directory dir = NewDirectory()) + using Directory dir = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); + conf.SetCodec(TestUtil.AlwaysPostingsFormat(new Pulsing41PostingsFormat(1 + Random.Next(20)))); + IndexWriter w = new IndexWriter(dir, conf); + try { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); - conf.SetCodec(TestUtil.AlwaysPostingsFormat(new Pulsing41PostingsFormat(1 + Random.Next(20)))); - IndexWriter w = new IndexWriter(dir, conf); - try - { - w.AddIndexes(toAdd); - Assert.Fail("no such codec"); - } + w.AddIndexes(toAdd); + Assert.Fail("no such codec"); + } #pragma warning disable 168 - catch (ArgumentException ex) + catch (ArgumentException ex) #pragma warning restore 168 - { - // expected - } - finally - { - w.Dispose(); - } - using (IndexReader open = DirectoryReader.Open(dir)) - { - Assert.AreEqual(0, open.NumDocs); - } + { + // expected + } + finally + { + w.Dispose(); } + using IndexReader open = DirectoryReader.Open(dir); + Assert.AreEqual(0, open.NumDocs); } try diff --git a/src/Lucene.Net.Tests/Index/TestBagOfPositions.cs b/src/Lucene.Net.Tests/Index/TestBagOfPositions.cs index a3a4130e09..fbe7193057 100644 --- a/src/Lucene.Net.Tests/Index/TestBagOfPositions.cs +++ b/src/Lucene.Net.Tests/Index/TestBagOfPositions.cs @@ -192,8 +192,7 @@ public override void Run() int numTerms = threadRandom.Next(maxTermsPerDoc); for (int i = 0; i < numTerms; i++) { - string token; - if (!postings.TryDequeue(out token)) + if (!postings.TryDequeue(out string token)) { break; } diff --git a/src/Lucene.Net.Tests/Index/TestBagOfPostings.cs b/src/Lucene.Net.Tests/Index/TestBagOfPostings.cs index 9665e8e5d7..e26a81b2bc 100644 --- a/src/Lucene.Net.Tests/Index/TestBagOfPostings.cs +++ b/src/Lucene.Net.Tests/Index/TestBagOfPostings.cs @@ -169,8 +169,7 @@ public override void Run() ISet visited = new JCG.HashSet(); for (int i = 0; i < maxTermsPerDoc; i++) { - string token; - if (!postings.TryDequeue(out token)) + if (!postings.TryDequeue(out string token)) { break; } diff --git a/src/Lucene.Net.Tests/Index/TestCodecs.cs b/src/Lucene.Net.Tests/Index/TestCodecs.cs index dd856201ed..8c9e94ba65 100644 --- a/src/Lucene.Net.Tests/Index/TestCodecs.cs +++ b/src/Lucene.Net.Tests/Index/TestCodecs.cs @@ -350,51 +350,47 @@ public virtual void TestFixedPostings() FieldData[] fields = new FieldData[] { field }; FieldInfos fieldInfos = builder.Finish(); // LUCENENET specific - BUG: we must wrap this in a using block in case anything in the below loop throws - using (Directory dir = NewDirectory()) - { - this.Write(fieldInfos, dir, fields, true); - Codec codec = Codec.Default; - SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000, false, codec, null); - - // LUCENENET specific - BUG: we must wrap this in a using block in case anything in the below loop throws - using (FieldsProducer reader = codec.PostingsFormat.FieldsProducer(new SegmentReadState(dir, si, fieldInfos, NewIOContext(Random), DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR))) - { - IEnumerator fieldsEnum = reader.GetEnumerator(); - fieldsEnum.MoveNext(); - string fieldName = fieldsEnum.Current; - Assert.IsNotNull(fieldName); - Terms terms2 = reader.GetTerms(fieldName); - Assert.IsNotNull(terms2); - - TermsEnum termsEnum = terms2.GetEnumerator(); - - DocsEnum docsEnum = null; - for (int i = 0; i < NUM_TERMS; i++) - { - Assert.IsTrue(termsEnum.MoveNext()); - BytesRef term = termsEnum.Term; - Assert.AreEqual(terms[i].text2, term.Utf8ToString()); + using Directory dir = NewDirectory(); + this.Write(fieldInfos, dir, fields, true); + Codec codec = Codec.Default; + SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000, false, codec, null); - // do this twice to stress test the codec's reuse, ie, - // make sure it properly fully resets (rewinds) its - // internal state: - for (int iter = 0; iter < 2; iter++) - { - docsEnum = TestUtil.Docs(Random, termsEnum, null, docsEnum, DocsFlags.NONE); - Assert.AreEqual(terms[i].docs[0], docsEnum.NextDoc()); - Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, docsEnum.NextDoc()); - } - } - Assert.IsFalse(termsEnum.MoveNext()); + // LUCENENET specific - BUG: we must wrap this in a using block in case anything in the below loop throws + using FieldsProducer reader = codec.PostingsFormat.FieldsProducer(new SegmentReadState(dir, si, fieldInfos, NewIOContext(Random), DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR)); + IEnumerator fieldsEnum = reader.GetEnumerator(); + fieldsEnum.MoveNext(); + string fieldName = fieldsEnum.Current; + Assert.IsNotNull(fieldName); + Terms terms2 = reader.GetTerms(fieldName); + Assert.IsNotNull(terms2); - for (int i = 0; i < NUM_TERMS; i++) - { - Assert.AreEqual(termsEnum.SeekCeil(new BytesRef(terms[i].text2)), TermsEnum.SeekStatus.FOUND); - } + TermsEnum termsEnum = terms2.GetEnumerator(); - Assert.IsFalse(fieldsEnum.MoveNext()); + DocsEnum docsEnum = null; + for (int i = 0; i < NUM_TERMS; i++) + { + Assert.IsTrue(termsEnum.MoveNext()); + BytesRef term = termsEnum.Term; + Assert.AreEqual(terms[i].text2, term.Utf8ToString()); + + // do this twice to stress test the codec's reuse, ie, + // make sure it properly fully resets (rewinds) its + // internal state: + for (int iter = 0; iter < 2; iter++) + { + docsEnum = TestUtil.Docs(Random, termsEnum, null, docsEnum, DocsFlags.NONE); + Assert.AreEqual(terms[i].docs[0], docsEnum.NextDoc()); + Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, docsEnum.NextDoc()); } } + Assert.IsFalse(termsEnum.MoveNext()); + + for (int i = 0; i < NUM_TERMS; i++) + { + Assert.AreEqual(termsEnum.SeekCeil(new BytesRef(terms[i].text2)), TermsEnum.SeekStatus.FOUND); + } + + Assert.IsFalse(fieldsEnum.MoveNext()); } [Test] @@ -411,45 +407,39 @@ public virtual void TestRandomPostings() } // LUCENENET specific - BUG: we must wrap this in a using block in case anything in the below loop throws - using (Directory dir = NewDirectory()) - { - FieldInfos fieldInfos = builder.Finish(); - - if (Verbose) - { - Console.WriteLine("TEST: now write postings"); - } + using Directory dir = NewDirectory(); + FieldInfos fieldInfos = builder.Finish(); - this.Write(fieldInfos, dir, fields, false); - Codec codec = Codec.Default; - SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000, false, codec, null); + if (Verbose) + { + Console.WriteLine("TEST: now write postings"); + } - if (Verbose) - { - Console.WriteLine("TEST: now read postings"); - } + this.Write(fieldInfos, dir, fields, false); + Codec codec = Codec.Default; + SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000, false, codec, null); - // LUCENENET specific - BUG: we must wrap this in a using block in case anything in the below loop throws - using (FieldsProducer terms = codec.PostingsFormat.FieldsProducer(new SegmentReadState(dir, si, fieldInfos, NewIOContext(Random), DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR))) - { + if (Verbose) + { + Console.WriteLine("TEST: now read postings"); + } - Verify[] threads = new Verify[NUM_TEST_THREADS - 1]; - for (int i = 0; i < NUM_TEST_THREADS - 1; i++) - { - threads[i] = new Verify(this, si, fields, terms); - threads[i].IsBackground = (true); - threads[i].Start(); - } + // LUCENENET specific - BUG: we must wrap this in a using block in case anything in the below loop throws + using FieldsProducer terms = codec.PostingsFormat.FieldsProducer(new SegmentReadState(dir, si, fieldInfos, NewIOContext(Random), DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR)); + Verify[] threads = new Verify[NUM_TEST_THREADS - 1]; + for (int i = 0; i < NUM_TEST_THREADS - 1; i++) + { + threads[i] = new Verify(this, si, fields, terms); + threads[i].IsBackground = (true); + threads[i].Start(); + } (new Verify(this, si, fields, terms)).Run(); - for (int i = 0; i < NUM_TEST_THREADS - 1; i++) - { - threads[i].Join(); - if (Debugging.AssertsEnabled) Debugging.Assert(!threads[i].failed); - } - - } + for (int i = 0; i < NUM_TEST_THREADS - 1; i++) + { + threads[i].Join(); + if (Debugging.AssertsEnabled) Debugging.Assert(!threads[i].failed); } } @@ -808,20 +798,18 @@ private void Write(FieldInfos fieldInfos, Directory dir, FieldData[] fields, boo SegmentWriteState state = new SegmentWriteState((InfoStream)InfoStream.Default, dir, si, fieldInfos, termIndexInterval, null, NewIOContext(Random)); // LUCENENET specific - BUG: we must wrap this in a using block in case anything in the below loop throws - using (FieldsConsumer consumer = codec.PostingsFormat.FieldsConsumer(state)) + using FieldsConsumer consumer = codec.PostingsFormat.FieldsConsumer(state); + Array.Sort(fields); + foreach (FieldData field in fields) { - Array.Sort(fields); - foreach (FieldData field in fields) - { #pragma warning disable 612, 618 - if (!allowPreFlex && codec is Lucene3xCodec) + if (!allowPreFlex && codec is Lucene3xCodec) #pragma warning restore 612, 618 - { - // code below expects unicode sort order - continue; - } - field.Write(consumer); + { + // code below expects unicode sort order + continue; } + field.Write(consumer); } } @@ -830,34 +818,29 @@ public virtual void TestDocsOnlyFreq() { // tests that when fields are indexed with DOCS_ONLY, the Codec // returns 1 in docsEnum.Freq() - using (Directory dir = NewDirectory()) + using Directory dir = NewDirectory(); + Random random = Random; + using (IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)))) { - Random random = Random; - using (IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)))) + // we don't need many documents to assert this, but don't use one document either + int numDocs = AtLeast(random, 50); + for (int i = 0; i < numDocs; i++) { - // we don't need many documents to assert this, but don't use one document either - int numDocs = AtLeast(random, 50); - for (int i = 0; i < numDocs; i++) - { - Document doc = new Document(); - doc.Add(new StringField("f", "doc", Store.NO)); - writer.AddDocument(doc); - } + Document doc = new Document(); + doc.Add(new StringField("f", "doc", Store.NO)); + writer.AddDocument(doc); } + } - Term term = new Term("f", new BytesRef("doc")); - using (DirectoryReader reader = DirectoryReader.Open(dir)) + Term term = new Term("f", new BytesRef("doc")); + using DirectoryReader reader = DirectoryReader.Open(dir); + foreach (AtomicReaderContext ctx in reader.Leaves) + { + DocsEnum de = ((AtomicReader)ctx.Reader).GetTermDocsEnum(term); + while (de.NextDoc() != DocIdSetIterator.NO_MORE_DOCS) { - foreach (AtomicReaderContext ctx in reader.Leaves) - { - DocsEnum de = ((AtomicReader)ctx.Reader).GetTermDocsEnum(term); - while (de.NextDoc() != DocIdSetIterator.NO_MORE_DOCS) - { - Assert.AreEqual(1, de.Freq, "wrong freq for doc " + de.DocID); - } - } + Assert.AreEqual(1, de.Freq, "wrong freq for doc " + de.DocID); } - } } @@ -865,34 +848,31 @@ public virtual void TestDocsOnlyFreq() public virtual void TestDisableImpersonation() { Codec[] oldCodecs = new Codec[] { new Lucene40RWCodec(), new Lucene41RWCodec(), new Lucene42RWCodec() }; - using (Directory dir = NewDirectory()) - { - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); - conf.SetCodec(oldCodecs[Random.Next(oldCodecs.Length)]); - IndexWriter writer = new IndexWriter(dir, conf); + using Directory dir = NewDirectory(); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); + conf.SetCodec(oldCodecs[Random.Next(oldCodecs.Length)]); + IndexWriter writer = new IndexWriter(dir, conf); - Document doc = new Document(); - doc.Add(new StringField("f", "bar", Store.YES)); - doc.Add(new NumericDocValuesField("n", 18L)); - writer.AddDocument(doc); + Document doc = new Document(); + doc.Add(new StringField("f", "bar", Store.YES)); + doc.Add(new NumericDocValuesField("n", 18L)); + writer.AddDocument(doc); - OldFormatImpersonationIsActive = false; - try - { - writer.Dispose(); - Assert.Fail("should not have succeeded to impersonate an old format!"); - } + OldFormatImpersonationIsActive = false; + try + { + writer.Dispose(); + Assert.Fail("should not have succeeded to impersonate an old format!"); + } #pragma warning disable 168 - catch (NotSupportedException e) + catch (NotSupportedException e) #pragma warning restore 168 - { - writer.Rollback(); - } - finally - { - OldFormatImpersonationIsActive = true; - } - + { + writer.Rollback(); + } + finally + { + OldFormatImpersonationIsActive = true; } } } diff --git a/src/Lucene.Net.Tests/Index/TestConcurrentMergeScheduler.cs b/src/Lucene.Net.Tests/Index/TestConcurrentMergeScheduler.cs index d061d0c0cc..3da3ecee15 100644 --- a/src/Lucene.Net.Tests/Index/TestConcurrentMergeScheduler.cs +++ b/src/Lucene.Net.Tests/Index/TestConcurrentMergeScheduler.cs @@ -451,37 +451,33 @@ public override void Eval(MockDirectoryWrapper dir) [Test, LuceneNetSpecific] public void TestExceptionOnBackgroundThreadIsPropagatedToCallingThread() { - using (MockDirectoryWrapper dir = NewMockDirectory()) - { - dir.FailOn(new FailOnlyOnMerge()); - - Document doc = new Document(); - Field idField = NewStringField("id", "", Field.Store.YES); - doc.Add(idField); + using MockDirectoryWrapper dir = NewMockDirectory(); + dir.FailOn(new FailOnlyOnMerge()); - var mergeScheduler = new ConcurrentMergeScheduler(); - using (IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(mergeScheduler).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).SetMergePolicy(NewLogMergePolicy()))) - { - LogMergePolicy logMP = (LogMergePolicy)writer.Config.MergePolicy; - logMP.MergeFactor = 10; - for (int i = 0; i < 20; i++) - { - writer.AddDocument(doc); - } + Document doc = new Document(); + Field idField = NewStringField("id", "", Field.Store.YES); + doc.Add(idField); - bool exceptionHit = false; - try - { - mergeScheduler.Sync(); - } - catch (MergePolicy.MergeException) - { - exceptionHit = true; - } + var mergeScheduler = new ConcurrentMergeScheduler(); + using IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(mergeScheduler).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).SetMergePolicy(NewLogMergePolicy())); + LogMergePolicy logMP = (LogMergePolicy)writer.Config.MergePolicy; + logMP.MergeFactor = 10; + for (int i = 0; i < 20; i++) + { + writer.AddDocument(doc); + } - assertTrue(exceptionHit); - } + bool exceptionHit = false; + try + { + mergeScheduler.Sync(); } + catch (MergePolicy.MergeException) + { + exceptionHit = true; + } + + assertTrue(exceptionHit); } } } diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriter.cs b/src/Lucene.Net.Tests/Index/TestIndexWriter.cs index 5e196c7e53..25ff8369b9 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriter.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriter.cs @@ -1177,40 +1177,38 @@ internal IndexerThreadInterrupt(TestIndexWriter outerInstance) outerInstance, #endif this.random, TEST_VERSION_CURRENT, new MockAnalyzer(this.random)); - using (IndexWriter w = new IndexWriter(adder, conf)) + using IndexWriter w = new IndexWriter(adder, conf); + Document doc = new Document(); + doc.Add(NewStringField(this.random, "id", "500", Field.Store.NO)); + doc.Add(NewField(this.random, "field", "some prepackaged text contents", storedTextType)); + if (DefaultCodecSupportsDocValues) { - Document doc = new Document(); - doc.Add(NewStringField(this.random, "id", "500", Field.Store.NO)); - doc.Add(NewField(this.random, "field", "some prepackaged text contents", storedTextType)); - if (DefaultCodecSupportsDocValues) - { - doc.Add(new BinaryDocValuesField("binarydv", new BytesRef("500"))); - doc.Add(new NumericDocValuesField("numericdv", 500)); - doc.Add(new SortedDocValuesField("sorteddv", new BytesRef("500"))); - } - if (DefaultCodecSupportsSortedSet) - { - doc.Add(new SortedSetDocValuesField("sortedsetdv", new BytesRef("one"))); - doc.Add(new SortedSetDocValuesField("sortedsetdv", new BytesRef("two"))); - } - w.AddDocument(doc); - doc = new Document(); - doc.Add(NewStringField(this.random, "id", "501", Field.Store.NO)); - doc.Add(NewField(this.random, "field", "some more contents", storedTextType)); - if (DefaultCodecSupportsDocValues) - { - doc.Add(new BinaryDocValuesField("binarydv", new BytesRef("501"))); - doc.Add(new NumericDocValuesField("numericdv", 501)); - doc.Add(new SortedDocValuesField("sorteddv", new BytesRef("501"))); - } - if (DefaultCodecSupportsSortedSet) - { - doc.Add(new SortedSetDocValuesField("sortedsetdv", new BytesRef("two"))); - doc.Add(new SortedSetDocValuesField("sortedsetdv", new BytesRef("three"))); - } - w.AddDocument(doc); - w.DeleteDocuments(new Term("id", "500")); - } // w.Dispose(); + doc.Add(new BinaryDocValuesField("binarydv", new BytesRef("500"))); + doc.Add(new NumericDocValuesField("numericdv", 500)); + doc.Add(new SortedDocValuesField("sorteddv", new BytesRef("500"))); + } + if (DefaultCodecSupportsSortedSet) + { + doc.Add(new SortedSetDocValuesField("sortedsetdv", new BytesRef("one"))); + doc.Add(new SortedSetDocValuesField("sortedsetdv", new BytesRef("two"))); + } + w.AddDocument(doc); + doc = new Document(); + doc.Add(NewStringField(this.random, "id", "501", Field.Store.NO)); + doc.Add(NewField(this.random, "field", "some more contents", storedTextType)); + if (DefaultCodecSupportsDocValues) + { + doc.Add(new BinaryDocValuesField("binarydv", new BytesRef("501"))); + doc.Add(new NumericDocValuesField("numericdv", 501)); + doc.Add(new SortedDocValuesField("sorteddv", new BytesRef("501"))); + } + if (DefaultCodecSupportsSortedSet) + { + doc.Add(new SortedSetDocValuesField("sortedsetdv", new BytesRef("two"))); + doc.Add(new SortedSetDocValuesField("sortedsetdv", new BytesRef("three"))); + } + w.AddDocument(doc); + w.DeleteDocuments(new Term("id", "500")); } public override void Run() @@ -1410,10 +1408,8 @@ public override void Run() } try { - using (IndexReader r = DirectoryReader.Open(dir)) - { - //System.out.println("doc count=" + r.NumDocs); - } // r.Dispose(); + using IndexReader r = DirectoryReader.Open(dir); + //System.out.println("doc count=" + r.NumDocs); } catch (Exception e) { diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterWithThreads.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterWithThreads.cs index 2a25dff663..36c2d7ffdc 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriterWithThreads.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriterWithThreads.cs @@ -644,19 +644,17 @@ public override void Run() Document doc = new Document(); Field field = NewTextField("field", "testData", Field.Store.YES); doc.Add(field); - using (IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig( + using IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION outerInstance, #endif - TEST_VERSION_CURRENT, new MockAnalyzer(Random)))) + TEST_VERSION_CURRENT, new MockAnalyzer(Random))); + if (iwConstructed.CurrentCount > 0) { - if (iwConstructed.CurrentCount > 0) - { - iwConstructed.Signal(); - } - startIndexing.Wait(); - writer.AddDocument(doc); + iwConstructed.Signal(); } + startIndexing.Wait(); + writer.AddDocument(doc); } catch (Exception e) { diff --git a/src/Lucene.Net.Tests/Index/TestPayloads.cs b/src/Lucene.Net.Tests/Index/TestPayloads.cs index b2ae108ee3..bc21cd9d11 100644 --- a/src/Lucene.Net.Tests/Index/TestPayloads.cs +++ b/src/Lucene.Net.Tests/Index/TestPayloads.cs @@ -398,8 +398,7 @@ internal virtual void SetPayloadData(string field, byte[] data, int offset, int protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader) { - PayloadData payload; - fieldToData.TryGetValue(fieldName, out payload); + fieldToData.TryGetValue(fieldName, out PayloadData payload); Tokenizer ts = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); TokenStream tokenStream = (payload != null) ? (TokenStream)new PayloadFilter(ts, payload.Data, payload.Offset, payload.Length) : ts; return new TokenStreamComponents(ts, tokenStream); diff --git a/src/Lucene.Net.Tests/Index/TestSegmentReader.cs b/src/Lucene.Net.Tests/Index/TestSegmentReader.cs index 29bcd839f1..df9019cf67 100644 --- a/src/Lucene.Net.Tests/Index/TestSegmentReader.cs +++ b/src/Lucene.Net.Tests/Index/TestSegmentReader.cs @@ -119,13 +119,13 @@ public virtual void TestGetFieldNameVariations() Assert.IsTrue(allFieldNames.Count == DocHelper.All.Count); foreach (string s in allFieldNames) { - Assert.IsTrue(DocHelper.NameValues.ContainsKey(s) == true || s.Equals("", StringComparison.Ordinal)); + Assert.IsTrue(DocHelper.NameValues.ContainsKey(s) == true || s.Length == 0); // LUCENENET: CA1820: Test for empty strings using string length } Assert.IsTrue(indexedFieldNames.Count == DocHelper.Indexed.Count); foreach (string s in indexedFieldNames) { - Assert.IsTrue(DocHelper.Indexed.ContainsKey(s) == true || s.Equals("", StringComparison.Ordinal)); + Assert.IsTrue(DocHelper.Indexed.ContainsKey(s) == true || s.Length == 0); // LUCENENET: CA1820: Test for empty strings using string length } Assert.IsTrue(notIndexedFieldNames.Count == DocHelper.Unindexed.Count); diff --git a/src/Lucene.Net.Tests/Index/TestTermVectorsReader.cs b/src/Lucene.Net.Tests/Index/TestTermVectorsReader.cs index 0e77a26986..3f4579ac80 100644 --- a/src/Lucene.Net.Tests/Index/TestTermVectorsReader.cs +++ b/src/Lucene.Net.Tests/Index/TestTermVectorsReader.cs @@ -42,11 +42,6 @@ namespace Lucene.Net.Index public class TestTermVectorsReader : LuceneTestCase { public TestTermVectorsReader() - { - InitializeInstanceFields(); - } - - private void InitializeInstanceFields() { positions = new int[testTerms.Length][]; tokens = new TestToken[testTerms.Length * TERM_FREQ]; diff --git a/src/Lucene.Net.Tests/Index/TestTermsEnum.cs b/src/Lucene.Net.Tests/Index/TestTermsEnum.cs index 072c058a35..4180f80063 100644 --- a/src/Lucene.Net.Tests/Index/TestTermsEnum.cs +++ b/src/Lucene.Net.Tests/Index/TestTermsEnum.cs @@ -452,18 +452,16 @@ private IndexReader MakeIndex(Directory d, params string[] terms) iwc.SetCodec(new StandardCodec(minTermsInBlock, maxTermsInBlock)); */ - using (var w = new RandomIndexWriter(Random, d, iwc)) + using var w = new RandomIndexWriter(Random, d, iwc); + foreach (string term in terms) { - foreach (string term in terms) - { - var doc = new Document(); - var f = NewStringField(FIELD, term, Field.Store.NO); - doc.Add(f); - w.AddDocument(doc); - } - - return w.GetReader(); + var doc = new Document(); + var f = NewStringField(FIELD, term, Field.Store.NO); + doc.Add(f); + w.AddDocument(doc); } + + return w.GetReader(); } private int DocFreq(IndexReader r, string term) @@ -475,59 +473,57 @@ private int DocFreq(IndexReader r, string term) public virtual void TestEasy() { // No floor arcs: - using (var d = NewDirectory()) - using (var r = MakeIndex(d, "aa0", "aa1", "aa2", "aa3", "bb0", "bb1", "bb2", "bb3", "aa")) - { - // First term in block: - Assert.AreEqual(1, DocFreq(r, "aa0")); + using var d = NewDirectory(); + using var r = MakeIndex(d, "aa0", "aa1", "aa2", "aa3", "bb0", "bb1", "bb2", "bb3", "aa"); + // First term in block: + Assert.AreEqual(1, DocFreq(r, "aa0")); - // Scan forward to another term in same block - Assert.AreEqual(1, DocFreq(r, "aa2")); + // Scan forward to another term in same block + Assert.AreEqual(1, DocFreq(r, "aa2")); - Assert.AreEqual(1, DocFreq(r, "aa")); + Assert.AreEqual(1, DocFreq(r, "aa")); - // Reset same block then scan forwards - Assert.AreEqual(1, DocFreq(r, "aa1")); + // Reset same block then scan forwards + Assert.AreEqual(1, DocFreq(r, "aa1")); - // Not found, in same block - Assert.AreEqual(0, DocFreq(r, "aa5")); + // Not found, in same block + Assert.AreEqual(0, DocFreq(r, "aa5")); - // Found, in same block - Assert.AreEqual(1, DocFreq(r, "aa2")); + // Found, in same block + Assert.AreEqual(1, DocFreq(r, "aa2")); - // Not found in index: - Assert.AreEqual(0, DocFreq(r, "b0")); + // Not found in index: + Assert.AreEqual(0, DocFreq(r, "b0")); - // Found: - Assert.AreEqual(1, DocFreq(r, "aa2")); + // Found: + Assert.AreEqual(1, DocFreq(r, "aa2")); - // Found, rewind: - Assert.AreEqual(1, DocFreq(r, "aa0")); + // Found, rewind: + Assert.AreEqual(1, DocFreq(r, "aa0")); - // First term in block: - Assert.AreEqual(1, DocFreq(r, "bb0")); + // First term in block: + Assert.AreEqual(1, DocFreq(r, "bb0")); - // Scan forward to another term in same block - Assert.AreEqual(1, DocFreq(r, "bb2")); + // Scan forward to another term in same block + Assert.AreEqual(1, DocFreq(r, "bb2")); - // Reset same block then scan forwards - Assert.AreEqual(1, DocFreq(r, "bb1")); + // Reset same block then scan forwards + Assert.AreEqual(1, DocFreq(r, "bb1")); - // Not found, in same block - Assert.AreEqual(0, DocFreq(r, "bb5")); + // Not found, in same block + Assert.AreEqual(0, DocFreq(r, "bb5")); - // Found, in same block - Assert.AreEqual(1, DocFreq(r, "bb2")); + // Found, in same block + Assert.AreEqual(1, DocFreq(r, "bb2")); - // Not found in index: - Assert.AreEqual(0, DocFreq(r, "b0")); + // Not found in index: + Assert.AreEqual(0, DocFreq(r, "b0")); - // Found: - Assert.AreEqual(1, DocFreq(r, "bb2")); + // Found: + Assert.AreEqual(1, DocFreq(r, "bb2")); - // Found, rewind: - Assert.AreEqual(1, DocFreq(r, "bb0")); - } + // Found, rewind: + Assert.AreEqual(1, DocFreq(r, "bb0")); } // tests: @@ -539,55 +535,53 @@ public virtual void TestFloorBlocks() { var terms = new[] { "aa0", "aa1", "aa2", "aa3", "aa4", "aa5", "aa6", "aa7", "aa8", "aa9", "aa", "xx" }; - using (var d = NewDirectory()) - using (var r = MakeIndex(d, terms)) - { - // First term in first block: - Assert.AreEqual(1, DocFreq(r, "aa0")); - Assert.AreEqual(1, DocFreq(r, "aa4")); + using var d = NewDirectory(); + using var r = MakeIndex(d, terms); + // First term in first block: + Assert.AreEqual(1, DocFreq(r, "aa0")); + Assert.AreEqual(1, DocFreq(r, "aa4")); - // No block - Assert.AreEqual(0, DocFreq(r, "bb0")); + // No block + Assert.AreEqual(0, DocFreq(r, "bb0")); - // Second block - Assert.AreEqual(1, DocFreq(r, "aa4")); + // Second block + Assert.AreEqual(1, DocFreq(r, "aa4")); - // Backwards to prior floor block: - Assert.AreEqual(1, DocFreq(r, "aa0")); + // Backwards to prior floor block: + Assert.AreEqual(1, DocFreq(r, "aa0")); - // Forwards to last floor block: - Assert.AreEqual(1, DocFreq(r, "aa9")); + // Forwards to last floor block: + Assert.AreEqual(1, DocFreq(r, "aa9")); - Assert.AreEqual(0, DocFreq(r, "a")); - Assert.AreEqual(1, DocFreq(r, "aa")); - Assert.AreEqual(0, DocFreq(r, "a")); - Assert.AreEqual(1, DocFreq(r, "aa")); + Assert.AreEqual(0, DocFreq(r, "a")); + Assert.AreEqual(1, DocFreq(r, "aa")); + Assert.AreEqual(0, DocFreq(r, "a")); + Assert.AreEqual(1, DocFreq(r, "aa")); - // Forwards to last floor block: - Assert.AreEqual(1, DocFreq(r, "xx")); - Assert.AreEqual(1, DocFreq(r, "aa1")); - Assert.AreEqual(0, DocFreq(r, "yy")); + // Forwards to last floor block: + Assert.AreEqual(1, DocFreq(r, "xx")); + Assert.AreEqual(1, DocFreq(r, "aa1")); + Assert.AreEqual(0, DocFreq(r, "yy")); - Assert.AreEqual(1, DocFreq(r, "xx")); - Assert.AreEqual(1, DocFreq(r, "aa9")); + Assert.AreEqual(1, DocFreq(r, "xx")); + Assert.AreEqual(1, DocFreq(r, "aa9")); - Assert.AreEqual(1, DocFreq(r, "xx")); - Assert.AreEqual(1, DocFreq(r, "aa4")); + Assert.AreEqual(1, DocFreq(r, "xx")); + Assert.AreEqual(1, DocFreq(r, "aa4")); - TermsEnum te = MultiFields.GetTerms(r, FIELD).GetEnumerator(); - while (te.MoveNext()) - { - //System.out.println("TEST: next term=" + te.Term().Utf8ToString()); - } + TermsEnum te = MultiFields.GetTerms(r, FIELD).GetEnumerator(); + while (te.MoveNext()) + { + //System.out.println("TEST: next term=" + te.Term().Utf8ToString()); + } - Assert.IsTrue(SeekExact(te, "aa1")); - Assert.AreEqual("aa2", Next(te)); - Assert.IsTrue(SeekExact(te, "aa8")); - Assert.AreEqual("aa9", Next(te)); - Assert.AreEqual("xx", Next(te)); + Assert.IsTrue(SeekExact(te, "aa1")); + Assert.AreEqual("aa2", Next(te)); + Assert.IsTrue(SeekExact(te, "aa8")); + Assert.AreEqual("aa9", Next(te)); + Assert.AreEqual("xx", Next(te)); - TestRandomSeeks(r, terms); - } + TestRandomSeeks(r, terms); } [Test] @@ -673,11 +667,9 @@ public virtual void TestRandomTerms() } } - using (var d = NewDirectory()) - using (var r = MakeIndex(d, terms)) - { - TestRandomSeeks(r, terms); - } + using var d = NewDirectory(); + using var r = MakeIndex(d, terms); + TestRandomSeeks(r, terms); } // sugar diff --git a/src/Lucene.Net.Tests/Search/FuzzyTermOnShortTermsTest.cs b/src/Lucene.Net.Tests/Search/FuzzyTermOnShortTermsTest.cs index 2f15922ca8..1bba5fdee2 100644 --- a/src/Lucene.Net.Tests/Search/FuzzyTermOnShortTermsTest.cs +++ b/src/Lucene.Net.Tests/Search/FuzzyTermOnShortTermsTest.cs @@ -2,6 +2,7 @@ using Lucene.Net.Documents; using Lucene.Net.Index.Extensions; using NUnit.Framework; +using System.Diagnostics.CodeAnalysis; using Assert = Lucene.Net.TestFramework.Assert; namespace Lucene.Net.Search @@ -77,6 +78,7 @@ private void CountHits(Analyzer analyzer, string[] docs, Query q, int expected) d.Dispose(); } + [SuppressMessage("Style", "IDE0025:Use expression body for properties", Justification = "Multiple lines")] public static Analyzer Analyzer { get diff --git a/src/Lucene.Net.Tests/Search/TestElevationComparator.cs b/src/Lucene.Net.Tests/Search/TestElevationComparator.cs index 02021294c1..7bb7e7aaa9 100644 --- a/src/Lucene.Net.Tests/Search/TestElevationComparator.cs +++ b/src/Lucene.Net.Tests/Search/TestElevationComparator.cs @@ -201,8 +201,7 @@ private int DocVal(int doc) else { idIndex.LookupOrd(ord, tempBR); - int? prio; - if (outerInstance.priority.TryGetValue(tempBR, out prio)) + if (outerInstance.priority.TryGetValue(tempBR, out int? prio)) { return (int)prio; } diff --git a/src/Lucene.Net.Tests/Search/TestMultiTermConstantScore.cs b/src/Lucene.Net.Tests/Search/TestMultiTermConstantScore.cs index e4b1c54de6..225e5ac1a2 100644 --- a/src/Lucene.Net.Tests/Search/TestMultiTermConstantScore.cs +++ b/src/Lucene.Net.Tests/Search/TestMultiTermConstantScore.cs @@ -62,25 +62,22 @@ public override void BeforeClass() string[] data = new string[] { "A 1 2 3 4 5 6", "Z 4 5 6", null, "B 2 4 5 6", "Y 3 5 6", null, "C 3 6", "X 4 5 6" }; small = NewDirectory(); - using (RandomIndexWriter writer = new RandomIndexWriter(Random, small, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetMergePolicy(NewLogMergePolicy()))) + using RandomIndexWriter writer = new RandomIndexWriter(Random, small, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetMergePolicy(NewLogMergePolicy())); + FieldType customType = new FieldType(TextField.TYPE_STORED); + customType.IsTokenized = false; + for (int i = 0; i < data.Length; i++) { - - FieldType customType = new FieldType(TextField.TYPE_STORED); - customType.IsTokenized = false; - for (int i = 0; i < data.Length; i++) + Document doc = new Document(); + doc.Add(NewField("id", Convert.ToString(i), customType)); // Field.Keyword("id",String.valueOf(i))); + doc.Add(NewField("all", "all", customType)); // Field.Keyword("all","all")); + if (null != data[i]) { - Document doc = new Document(); - doc.Add(NewField("id", Convert.ToString(i), customType)); // Field.Keyword("id",String.valueOf(i))); - doc.Add(NewField("all", "all", customType)); // Field.Keyword("all","all")); - if (null != data[i]) - { - doc.Add(NewTextField("data", data[i], Field.Store.YES)); // Field.Text("data",data[i])); - } - writer.AddDocument(doc); + doc.Add(NewTextField("data", data[i], Field.Store.YES)); // Field.Text("data",data[i])); } - - reader = writer.GetReader(); + writer.AddDocument(doc); } + + reader = writer.GetReader(); } [OneTimeTearDown] diff --git a/src/Lucene.Net.Tests/Search/TestPositionIncrement.cs b/src/Lucene.Net.Tests/Search/TestPositionIncrement.cs index cbac305304..fe6a24a9a3 100644 --- a/src/Lucene.Net.Tests/Search/TestPositionIncrement.cs +++ b/src/Lucene.Net.Tests/Search/TestPositionIncrement.cs @@ -181,17 +181,14 @@ public AnalyzerAnonymousInnerClassHelper(TestPositionIncrement outerInstance) protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader) { - return new TokenStreamComponents(new TokenizerAnonymousInnerClassHelper(this, reader)); + return new TokenStreamComponents(new TokenizerAnonymousInnerClassHelper(reader)); } private class TokenizerAnonymousInnerClassHelper : Tokenizer { - private readonly AnalyzerAnonymousInnerClassHelper outerInstance; - - public TokenizerAnonymousInnerClassHelper(AnalyzerAnonymousInnerClassHelper outerInstance, TextReader reader) + public TokenizerAnonymousInnerClassHelper(TextReader reader) : base(reader) { - this.outerInstance = outerInstance; TOKENS = new string[] { "1", "2", "3", "4", "5" }; INCREMENTS = new int[] { 1, 2, 1, 0, 1 }; i = 0; diff --git a/src/Lucene.Net.Tests/Search/TestRegexpRandom2.cs b/src/Lucene.Net.Tests/Search/TestRegexpRandom2.cs index b032b1368b..08a4e9e695 100644 --- a/src/Lucene.Net.Tests/Search/TestRegexpRandom2.cs +++ b/src/Lucene.Net.Tests/Search/TestRegexpRandom2.cs @@ -126,11 +126,6 @@ protected override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts) private sealed class SimpleAutomatonTermsEnum : FilteredTermsEnum { - private void InitializeInstanceFields() - { - runAutomaton = new CharacterRunAutomaton(outerInstance.automaton); - } - private readonly TestRegexpRandom2.DumbRegexpQuery outerInstance; private CharacterRunAutomaton runAutomaton; @@ -141,7 +136,7 @@ internal SimpleAutomatonTermsEnum(TestRegexpRandom2.DumbRegexpQuery outerInstanc { this.outerInstance = outerInstance; - InitializeInstanceFields(); + runAutomaton = new CharacterRunAutomaton(outerInstance.automaton); SetInitialSeekTerm(new BytesRef("")); } diff --git a/src/Lucene.Net.Tests/Search/TestShardSearching.cs b/src/Lucene.Net.Tests/Search/TestShardSearching.cs index 2ea2e9c05f..c268c66c34 100644 --- a/src/Lucene.Net.Tests/Search/TestShardSearching.cs +++ b/src/Lucene.Net.Tests/Search/TestShardSearching.cs @@ -309,7 +309,8 @@ public virtual void TestSimple() } finally { - m_nodes[myNodeID].Release(localShardSearcher); + //m_nodes[myNodeID].Release(localShardSearcher); + NodeState.Release(localShardSearcher); // LUCENENET: Made Release() static per CA1822 for performance foreach (IndexReader sub in subs) { if (sub != null) diff --git a/src/Lucene.Net.Tests/Store/TestBufferedIndexInput.cs b/src/Lucene.Net.Tests/Store/TestBufferedIndexInput.cs index 6d4663c7fd..188ad4b88c 100644 --- a/src/Lucene.Net.Tests/Store/TestBufferedIndexInput.cs +++ b/src/Lucene.Net.Tests/Store/TestBufferedIndexInput.cs @@ -43,13 +43,12 @@ public class TestBufferedIndexInput : LuceneTestCase { private static void WriteBytes(FileInfo aFile, long size) { - using (FileStream ostream = new FileStream(aFile.FullName, FileMode.Create)) { - for (int i = 0; i < size; i++) - { - ostream.WriteByte(Byten(i)); - } - ostream.Flush(); + using FileStream ostream = new FileStream(aFile.FullName, FileMode.Create); + for (int i = 0; i < size; i++) + { + ostream.WriteByte(Byten(i)); } + ostream.Flush(); } private const long TEST_FILE_LENGTH = 100 * 1024; diff --git a/src/Lucene.Net.Tests/Store/TestDirectory.cs b/src/Lucene.Net.Tests/Store/TestDirectory.cs index a56b3aa5a3..4fb2d95b0c 100644 --- a/src/Lucene.Net.Tests/Store/TestDirectory.cs +++ b/src/Lucene.Net.Tests/Store/TestDirectory.cs @@ -139,7 +139,7 @@ public override void Run() { try { - using (IndexInput input = outerBDWrapper.OpenInput(file, NewIOContext(Random))) { } + using IndexInput input = outerBDWrapper.OpenInput(file, NewIOContext(Random)); } #pragma warning disable 168 catch (FileNotFoundException fne) @@ -381,51 +381,49 @@ public virtual void TestFsyncDoesntCreateNewFiles() var path = CreateTempDir("nocreate"); Console.WriteLine(path.FullName); - using (Directory fsdir = new SimpleFSDirectory(path)) + using Directory fsdir = new SimpleFSDirectory(path); + // write a file + using (var o = fsdir.CreateOutput("afile", NewIOContext(Random))) { - // write a file - using (var o = fsdir.CreateOutput("afile", NewIOContext(Random))) - { - o.WriteString("boo"); - } - - // delete it - try - { - File.Delete(Path.Combine(path.FullName, "afile")); - } - catch (Exception e) - { - Assert.Fail("Deletion of new Directory should never fail.\nException thrown: {0}", e); - } + o.WriteString("boo"); + } - // directory is empty - Assert.AreEqual(0, fsdir.ListAll().Length); - - - // LUCENENET specific: Since FSDirectory.Sync() does not actually do anything in .NET - // we decided to remove the exception as well. This is safe to ignore here. - //// fsync it - //try - //{ - // fsdir.Sync(Collections.Singleton("afile")); - // Assert.Fail("didn't get expected exception, instead fsync created new files: " + - // Collections.ToString(fsdir.ListAll())); - //} - //catch (FileNotFoundException) - //{ - // // ok - //} - //// LUCENENET specific - since NoSuchDirectoryException subclasses FileNotFoundException - //// in Lucene, we need to catch it here to be on the safe side. - //catch (DirectoryNotFoundException) - //{ - // // ok - //} - - // directory is still empty - Assert.AreEqual(0, fsdir.ListAll().Length); + // delete it + try + { + File.Delete(Path.Combine(path.FullName, "afile")); + } + catch (Exception e) + { + Assert.Fail("Deletion of new Directory should never fail.\nException thrown: {0}", e); } + + // directory is empty + Assert.AreEqual(0, fsdir.ListAll().Length); + + + // LUCENENET specific: Since FSDirectory.Sync() does not actually do anything in .NET + // we decided to remove the exception as well. This is safe to ignore here. + //// fsync it + //try + //{ + // fsdir.Sync(Collections.Singleton("afile")); + // Assert.Fail("didn't get expected exception, instead fsync created new files: " + + // Collections.ToString(fsdir.ListAll())); + //} + //catch (FileNotFoundException) + //{ + // // ok + //} + //// LUCENENET specific - since NoSuchDirectoryException subclasses FileNotFoundException + //// in Lucene, we need to catch it here to be on the safe side. + //catch (DirectoryNotFoundException) + //{ + // // ok + //} + + // directory is still empty + Assert.AreEqual(0, fsdir.ListAll().Length); } [Test] @@ -434,51 +432,49 @@ public virtual void TestFsyncDoesntCreateNewFiles() public virtual void ConcurrentIndexAccessThrowsWithoutSynchronizedStaleFiles() { DirectoryInfo tempDir = CreateTempDir(GetType().Name); - using (Directory dir = new SimpleFSDirectory(tempDir)) + using Directory dir = new SimpleFSDirectory(tempDir); + var ioContext = NewIOContext(Random); + var threads = new Thread[Environment.ProcessorCount]; + int file = 0; + Exception exception = null; + bool stopped = false; + + using (var @event = new ManualResetEvent(false)) { - var ioContext = NewIOContext(Random); - var threads = new Thread[Environment.ProcessorCount]; - int file = 0; - Exception exception = null; - bool stopped = false; - - using (var @event = new ManualResetEvent(false)) + for (int i = 0; i < threads.Length; i++) { - for (int i = 0; i < threads.Length; i++) + var thread = new Thread(() => { - var thread = new Thread(() => + while (!stopped) { - while (!stopped) + int nextFile = Interlocked.Increment(ref file); + try { - int nextFile = Interlocked.Increment(ref file); - try - { - dir.CreateOutput("test" + nextFile, ioContext).Dispose(); - } - catch (Exception ex) - { - exception = ex; - @event.Set(); - break; - } + dir.CreateOutput("test" + nextFile, ioContext).Dispose(); } - }); - thread.Start(); - threads[i] = thread; - } + catch (Exception ex) + { + exception = ex; + @event.Set(); + break; + } + } + }); + thread.Start(); + threads[i] = thread; + } - bool raised = @event.WaitOne(TimeSpan.FromSeconds(5)); + bool raised = @event.WaitOne(TimeSpan.FromSeconds(5)); - stopped = true; + stopped = true; - if (raised) - throw new Exception("Test failed", exception); - } + if (raised) + throw new Exception("Test failed", exception); + } - foreach (var thread in threads) - { - thread.Join(); - } + foreach (var thread in threads) + { + thread.Join(); } } diff --git a/src/Lucene.Net.Tests/Store/TestFileSwitchDirectory.cs b/src/Lucene.Net.Tests/Store/TestFileSwitchDirectory.cs index b92e6830cc..903c44ac05 100644 --- a/src/Lucene.Net.Tests/Store/TestFileSwitchDirectory.cs +++ b/src/Lucene.Net.Tests/Store/TestFileSwitchDirectory.cs @@ -114,17 +114,15 @@ public virtual void TestNoDir() DirectoryInfo secondDir = CreateTempDir("bar"); System.IO.Directory.Delete(primDir.FullName, true); System.IO.Directory.Delete(secondDir.FullName, true); - using (Directory dir = NewFSSwitchDirectory(primDir, secondDir, Collections.EmptySet())) + using Directory dir = NewFSSwitchDirectory(primDir, secondDir, Collections.EmptySet()); + try + { + DirectoryReader.Open(dir); + Assert.Fail("did not hit expected exception"); + } + catch (DirectoryNotFoundException) { - try - { - DirectoryReader.Open(dir); - Assert.Fail("did not hit expected exception"); - } - catch (DirectoryNotFoundException) - { - // expected - } + // expected } } diff --git a/src/Lucene.Net.Tests/Store/TestHugeRamFile.cs b/src/Lucene.Net.Tests/Store/TestHugeRamFile.cs index 2f98dc567c..22f9558e30 100644 --- a/src/Lucene.Net.Tests/Store/TestHugeRamFile.cs +++ b/src/Lucene.Net.Tests/Store/TestHugeRamFile.cs @@ -46,8 +46,7 @@ protected override byte[] NewBuffer(int size) if (capacity <= MAX_VALUE) { // below maxint we reuse buffers - byte[] buf; - singleBuffers.TryGetValue(Convert.ToInt32(size), out buf); + singleBuffers.TryGetValue(Convert.ToInt32(size), out byte[] buf); if (buf == null) { buf = new byte[size]; diff --git a/src/Lucene.Net.Tests/Store/TestNRTCachingDirectory.cs b/src/Lucene.Net.Tests/Store/TestNRTCachingDirectory.cs index 6d04f002ad..113f490f0d 100644 --- a/src/Lucene.Net.Tests/Store/TestNRTCachingDirectory.cs +++ b/src/Lucene.Net.Tests/Store/TestNRTCachingDirectory.cs @@ -144,18 +144,16 @@ public virtual void TestNoDir() { var tempDir = CreateTempDir("doesnotexist").FullName; System.IO.Directory.Delete(tempDir, true); - using (Directory dir = new NRTCachingDirectory(NewFSDirectory(new DirectoryInfo(tempDir)), 2.0, 25.0)) + using Directory dir = new NRTCachingDirectory(NewFSDirectory(new DirectoryInfo(tempDir)), 2.0, 25.0); + try { - try - { - Assert.False(System.IO.Directory.Exists(tempDir)); - DirectoryReader.Open(dir); - Assert.Fail("did not hit expected exception"); - } - catch (DirectoryNotFoundException) - { - // expected - } + Assert.False(System.IO.Directory.Exists(tempDir)); + DirectoryReader.Open(dir); + Assert.Fail("did not hit expected exception"); + } + catch (DirectoryNotFoundException) + { + // expected } } diff --git a/src/Lucene.Net.Tests/Store/TestWindowsMMap.cs b/src/Lucene.Net.Tests/Store/TestWindowsMMap.cs index 2efb24965e..e7d3d2c9d9 100644 --- a/src/Lucene.Net.Tests/Store/TestWindowsMMap.cs +++ b/src/Lucene.Net.Tests/Store/TestWindowsMMap.cs @@ -79,30 +79,26 @@ public virtual void TestMmapIndex() // plan to add a set of useful stopwords, consider changing some of the // interior filters. - using (var analyzer = new MockAnalyzer(Random)) + using var analyzer = new MockAnalyzer(Random); + // TODO: something about lock timeouts and leftover locks. + using (var writer = new IndexWriter(dir, + new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetOpenMode( + OpenMode.CREATE))) { - // TODO: something about lock timeouts and leftover locks. - using (var writer = new IndexWriter(dir, - new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetOpenMode( - OpenMode.CREATE))) + writer.Commit(); + using IndexReader reader = DirectoryReader.Open(dir); + var searcher = NewSearcher(reader); + var num = AtLeast(1000); + for (int dx = 0; dx < num; dx++) { - writer.Commit(); - using (IndexReader reader = DirectoryReader.Open(dir)) - { - var searcher = NewSearcher(reader); - var num = AtLeast(1000); - for (int dx = 0; dx < num; dx++) - { - var f = RandomField(); - var doc = new Document(); - doc.Add(NewTextField("data", f, Field.Store.YES)); - writer.AddDocument(doc); - } - } + var f = RandomField(); + var doc = new Document(); + doc.Add(NewTextField("data", f, Field.Store.YES)); + writer.AddDocument(doc); } - - RmDir(dirPath.FullName); } + + RmDir(dirPath.FullName); } private static void RmDir(string dir) diff --git a/src/Lucene.Net.Tests/Support/Index/TestTaskMergeScheduler.cs b/src/Lucene.Net.Tests/Support/Index/TestTaskMergeScheduler.cs index 2ae7ddb6a7..de579b5cb1 100644 --- a/src/Lucene.Net.Tests/Support/Index/TestTaskMergeScheduler.cs +++ b/src/Lucene.Net.Tests/Support/Index/TestTaskMergeScheduler.cs @@ -146,37 +146,33 @@ public void TestCustomMergeScheduler() [Test, LuceneNetSpecific] public void TestExceptionOnBackgroundThreadIsPropagatedToCallingThread() { - using (MockDirectoryWrapper dir = NewMockDirectory()) - { - dir.FailOn(new FailOnlyOnMerge()); - - Document doc = new Document(); - Field idField = NewStringField("id", "", Field.Store.YES); - doc.Add(idField); + using MockDirectoryWrapper dir = NewMockDirectory(); + dir.FailOn(new FailOnlyOnMerge()); - var mergeScheduler = new TaskMergeScheduler(); - using (IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(mergeScheduler).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).SetMergePolicy(NewLogMergePolicy()))) - { - LogMergePolicy logMP = (LogMergePolicy)writer.Config.MergePolicy; - logMP.MergeFactor = 10; - for (int i = 0; i < 20; i++) - { - writer.AddDocument(doc); - } + Document doc = new Document(); + Field idField = NewStringField("id", "", Field.Store.YES); + doc.Add(idField); - bool exceptionHit = false; - try - { - mergeScheduler.Sync(); - } - catch (MergePolicy.MergeException) - { - exceptionHit = true; - } + var mergeScheduler = new TaskMergeScheduler(); + using IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(mergeScheduler).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).SetMergePolicy(NewLogMergePolicy())); + LogMergePolicy logMP = (LogMergePolicy)writer.Config.MergePolicy; + logMP.MergeFactor = 10; + for (int i = 0; i < 20; i++) + { + writer.AddDocument(doc); + } - assertTrue(exceptionHit); - } + bool exceptionHit = false; + try + { + mergeScheduler.Sync(); } + catch (MergePolicy.MergeException) + { + exceptionHit = true; + } + + assertTrue(exceptionHit); } } } \ No newline at end of file diff --git a/src/Lucene.Net.Tests/Support/TestApiConsistency.cs b/src/Lucene.Net.Tests/Support/TestApiConsistency.cs index 1bf1bdaa39..f10100f2dc 100644 --- a/src/Lucene.Net.Tests/Support/TestApiConsistency.cs +++ b/src/Lucene.Net.Tests/Support/TestApiConsistency.cs @@ -45,7 +45,7 @@ public override void TestPrivateFieldNames(Type typeFromTargetAssembly) [TestCase(typeof(Lucene.Net.Analysis.Analyzer))] public override void TestPublicFields(Type typeFromTargetAssembly) { - base.TestPublicFields(typeFromTargetAssembly, @"^Lucene\.Net\.Util\.(?:LightWeight|Disposable)ThreadLocal`1\+(?:LocalState|CurrentThreadState)"); + base.TestPublicFields(typeFromTargetAssembly, @"^Lucene\.Net\.Util\.(?:LightWeight|Disposable)ThreadLocal`1\+(?:LocalState|CurrentThreadState)|^System\.Runtime\.CompilerServices"); } [Test, LuceneNetSpecific] diff --git a/src/Lucene.Net.Tests/Support/TestCase.cs b/src/Lucene.Net.Tests/Support/TestCase.cs index c995fe7ddb..2ce1a8613f 100644 --- a/src/Lucene.Net.Tests/Support/TestCase.cs +++ b/src/Lucene.Net.Tests/Support/TestCase.cs @@ -21,7 +21,7 @@ namespace Lucene.Net /// Support for junit.framework.TestCase.getName(). /// {{Lucene.Net-2.9.1}} Move to another location after LUCENENET-266 /// - public class TestCase + public static class TestCase // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { public static string GetName() { diff --git a/src/Lucene.Net.Tests/Support/TestSerialization.cs b/src/Lucene.Net.Tests/Support/TestSerialization.cs index 41231a84d4..5630a5aca1 100644 --- a/src/Lucene.Net.Tests/Support/TestSerialization.cs +++ b/src/Lucene.Net.Tests/Support/TestSerialization.cs @@ -88,20 +88,18 @@ public void TestBooleanQuerySerialization() Assert.AreEqual(lucQuery, lucQuery2, "Error in serialization"); - using (var reader = DirectoryReader.Open(dir)) - { - //Lucene.Net.Search.IndexSearcher searcher = new Lucene.Net.Search.IndexSearcher(dir, true); - Lucene.Net.Search.IndexSearcher searcher = new Lucene.Net.Search.IndexSearcher(reader); + using var reader = DirectoryReader.Open(dir); + //Lucene.Net.Search.IndexSearcher searcher = new Lucene.Net.Search.IndexSearcher(dir, true); + Lucene.Net.Search.IndexSearcher searcher = new Lucene.Net.Search.IndexSearcher(reader); - int hitCount = searcher.Search(lucQuery, 20).TotalHits; + int hitCount = searcher.Search(lucQuery, 20).TotalHits; - //searcher.Close(); - searcher = new Lucene.Net.Search.IndexSearcher(reader); + //searcher.Close(); + searcher = new Lucene.Net.Search.IndexSearcher(reader); - int hitCount2 = searcher.Search(lucQuery2, 20).TotalHits; + int hitCount2 = searcher.Search(lucQuery2, 20).TotalHits; - Assert.AreEqual(hitCount, hitCount2, "Error in serialization - different hit counts"); - } + Assert.AreEqual(hitCount, hitCount2, "Error in serialization - different hit counts"); } } } diff --git a/src/Lucene.Net.Tests/TestDemo.cs b/src/Lucene.Net.Tests/TestDemo.cs index 1c2713a57c..e991346e5a 100644 --- a/src/Lucene.Net.Tests/TestDemo.cs +++ b/src/Lucene.Net.Tests/TestDemo.cs @@ -46,44 +46,39 @@ public virtual void TestDemo() Analyzer analyzer = new MockAnalyzer(Random); // Store the index in memory: - using (Directory directory = NewDirectory()) - { - string longTerm = "longtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongterm"; - string text = "this is the text to be indexed. " + longTerm; - - // To store an index on disk, use this instead: - // Directory directory = FSDirectory.open(new File("/tmp/testindex")); - using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, NewIndexWriterConfig(Random, TEST_VERSION_CURRENT, analyzer))) - { - Documents.Document doc = new Documents.Document(); - doc.Add(NewTextField("fieldname", text, Field.Store.YES)); - iwriter.AddDocument(doc); - } + using Directory directory = NewDirectory(); + string longTerm = "longtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongterm"; + string text = "this is the text to be indexed. " + longTerm; - // Now search the index: - using (IndexReader ireader = DirectoryReader.Open(directory)) // read-only=true - { - IndexSearcher isearcher = NewSearcher(ireader); - - Assert.AreEqual(1, isearcher.Search(new TermQuery(new Term("fieldname", longTerm)), 1).TotalHits); - Query query = new TermQuery(new Term("fieldname", "text")); - TopDocs hits = isearcher.Search(query, null, 1); - Assert.AreEqual(1, hits.TotalHits); - // Iterate through the results: - for (int i = 0; i < hits.ScoreDocs.Length; i++) - { - Documents.Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); - Assert.AreEqual(text, hitDoc.Get("fieldname")); - } + // To store an index on disk, use this instead: + // Directory directory = FSDirectory.open(new File("/tmp/testindex")); + using (RandomIndexWriter iwriter = new RandomIndexWriter(Random, directory, NewIndexWriterConfig(Random, TEST_VERSION_CURRENT, analyzer))) + { + Documents.Document doc = new Documents.Document(); + doc.Add(NewTextField("fieldname", text, Field.Store.YES)); + iwriter.AddDocument(doc); + } - // Test simple phrase query - PhraseQuery phraseQuery = new PhraseQuery(); - phraseQuery.Add(new Term("fieldname", "to")); - phraseQuery.Add(new Term("fieldname", "be")); - Assert.AreEqual(1, isearcher.Search(phraseQuery, null, 1).TotalHits); + // Now search the index: + using IndexReader ireader = DirectoryReader.Open(directory); + IndexSearcher isearcher = NewSearcher(ireader); - } + Assert.AreEqual(1, isearcher.Search(new TermQuery(new Term("fieldname", longTerm)), 1).TotalHits); + Query query = new TermQuery(new Term("fieldname", "text")); + TopDocs hits = isearcher.Search(query, null, 1); + Assert.AreEqual(1, hits.TotalHits); + // Iterate through the results: + for (int i = 0; i < hits.ScoreDocs.Length; i++) + { + Documents.Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc); + Assert.AreEqual(text, hitDoc.Get("fieldname")); } + + // Test simple phrase query + PhraseQuery phraseQuery = new PhraseQuery(); + phraseQuery.Add(new Term("fieldname", "to")); + phraseQuery.Add(new Term("fieldname", "be")); + Assert.AreEqual(1, isearcher.Search(phraseQuery, null, 1).TotalHits); } } } \ No newline at end of file diff --git a/src/Lucene.Net.Tests/TestExternalCodecs.cs b/src/Lucene.Net.Tests/TestExternalCodecs.cs index d982644284..9e99e5de9a 100644 --- a/src/Lucene.Net.Tests/TestExternalCodecs.cs +++ b/src/Lucene.Net.Tests/TestExternalCodecs.cs @@ -80,75 +80,71 @@ public virtual void TestPerFieldCodec() Console.WriteLine("TEST: NUM_DOCS=" + NUM_DOCS); } - using (BaseDirectoryWrapper dir = NewDirectory()) + using BaseDirectoryWrapper dir = NewDirectory(); + dir.CheckIndexOnDispose = false; // we use a custom codec provider + using IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetCodec(new CustomPerFieldCodec()).SetMergePolicy(NewLogMergePolicy(3))); + Documents.Document doc = new Documents.Document(); + // uses default codec: + doc.Add(NewTextField("field1", "this field uses the standard codec as the test", Field.Store.NO)); + // uses pulsing codec: + Field field2 = NewTextField("field2", "this field uses the pulsing codec as the test", Field.Store.NO); + doc.Add(field2); + + Field idField = NewStringField("id", "", Field.Store.NO); + + doc.Add(idField); + for (int i = 0; i < NUM_DOCS; i++) { - dir.CheckIndexOnDispose = false; // we use a custom codec provider - using (IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetCodec(new CustomPerFieldCodec()).SetMergePolicy(NewLogMergePolicy(3)))) + idField.SetStringValue("" + i); + w.AddDocument(doc); + if ((i + 1) % 10 == 0) { - Documents.Document doc = new Documents.Document(); - // uses default codec: - doc.Add(NewTextField("field1", "this field uses the standard codec as the test", Field.Store.NO)); - // uses pulsing codec: - Field field2 = NewTextField("field2", "this field uses the pulsing codec as the test", Field.Store.NO); - doc.Add(field2); - - Field idField = NewStringField("id", "", Field.Store.NO); - - doc.Add(idField); - for (int i = 0; i < NUM_DOCS; i++) - { - idField.SetStringValue("" + i); - w.AddDocument(doc); - if ((i + 1) % 10 == 0) - { - w.Commit(); - } - } - if (Verbose) - { - Console.WriteLine("TEST: now delete id=77"); - } - w.DeleteDocuments(new Term("id", "77")); - - using (IndexReader r = DirectoryReader.Open(w, true)) - { - Assert.AreEqual(NUM_DOCS - 1, r.NumDocs); - IndexSearcher s = NewSearcher(r); - Assert.AreEqual(NUM_DOCS - 1, s.Search(new TermQuery(new Term("field1", "standard")), 1).TotalHits); - Assert.AreEqual(NUM_DOCS - 1, s.Search(new TermQuery(new Term("field2", "pulsing")), 1).TotalHits); - } - - if (Verbose) - { - Console.WriteLine("\nTEST: now delete 2nd doc"); - } - w.DeleteDocuments(new Term("id", "44")); - - if (Verbose) - { - Console.WriteLine("\nTEST: now force merge"); - } - w.ForceMerge(1); - if (Verbose) - { - Console.WriteLine("\nTEST: now open reader"); - } - using (IndexReader r = DirectoryReader.Open(w, true)) - { - Assert.AreEqual(NUM_DOCS - 2, r.MaxDoc); - Assert.AreEqual(NUM_DOCS - 2, r.NumDocs); - IndexSearcher s = NewSearcher(r); - Assert.AreEqual(NUM_DOCS - 2, s.Search(new TermQuery(new Term("field1", "standard")), 1).TotalHits); - Assert.AreEqual(NUM_DOCS - 2, s.Search(new TermQuery(new Term("field2", "pulsing")), 1).TotalHits); - Assert.AreEqual(1, s.Search(new TermQuery(new Term("id", "76")), 1).TotalHits); - Assert.AreEqual(0, s.Search(new TermQuery(new Term("id", "77")), 1).TotalHits); - Assert.AreEqual(0, s.Search(new TermQuery(new Term("id", "44")), 1).TotalHits); - - if (Verbose) - { - Console.WriteLine("\nTEST: now close NRT reader"); - } - } + w.Commit(); + } + } + if (Verbose) + { + Console.WriteLine("TEST: now delete id=77"); + } + w.DeleteDocuments(new Term("id", "77")); + + using (IndexReader r = DirectoryReader.Open(w, true)) + { + Assert.AreEqual(NUM_DOCS - 1, r.NumDocs); + IndexSearcher s = NewSearcher(r); + Assert.AreEqual(NUM_DOCS - 1, s.Search(new TermQuery(new Term("field1", "standard")), 1).TotalHits); + Assert.AreEqual(NUM_DOCS - 1, s.Search(new TermQuery(new Term("field2", "pulsing")), 1).TotalHits); + } + + if (Verbose) + { + Console.WriteLine("\nTEST: now delete 2nd doc"); + } + w.DeleteDocuments(new Term("id", "44")); + + if (Verbose) + { + Console.WriteLine("\nTEST: now force merge"); + } + w.ForceMerge(1); + if (Verbose) + { + Console.WriteLine("\nTEST: now open reader"); + } + using (IndexReader r = DirectoryReader.Open(w, true)) + { + Assert.AreEqual(NUM_DOCS - 2, r.MaxDoc); + Assert.AreEqual(NUM_DOCS - 2, r.NumDocs); + IndexSearcher s = NewSearcher(r); + Assert.AreEqual(NUM_DOCS - 2, s.Search(new TermQuery(new Term("field1", "standard")), 1).TotalHits); + Assert.AreEqual(NUM_DOCS - 2, s.Search(new TermQuery(new Term("field2", "pulsing")), 1).TotalHits); + Assert.AreEqual(1, s.Search(new TermQuery(new Term("id", "76")), 1).TotalHits); + Assert.AreEqual(0, s.Search(new TermQuery(new Term("id", "77")), 1).TotalHits); + Assert.AreEqual(0, s.Search(new TermQuery(new Term("id", "44")), 1).TotalHits); + + if (Verbose) + { + Console.WriteLine("\nTEST: now close NRT reader"); } } } diff --git a/src/Lucene.Net.Tests/Util/TestMathUtil.cs b/src/Lucene.Net.Tests/Util/TestMathUtil.cs index 25cf6e5f65..eeb8c8b3d8 100644 --- a/src/Lucene.Net.Tests/Util/TestMathUtil.cs +++ b/src/Lucene.Net.Tests/Util/TestMathUtil.cs @@ -79,8 +79,7 @@ public virtual void TestGCD() long l1 = RandomLong(); long l2 = RandomLong(); long gcd = MathUtil.Gcd(l1, l2); - long actualGcd; - if (TryGetGcd(l1, l2, out actualGcd)) + if (TryGetGcd(l1, l2, out long actualGcd)) { Assert.AreEqual(actualGcd, gcd); if (gcd != 0) diff --git a/src/Lucene.Net.Tests/Util/TestNumericUtils.cs b/src/Lucene.Net.Tests/Util/TestNumericUtils.cs index 7db858a770..27caf2bea7 100644 --- a/src/Lucene.Net.Tests/Util/TestNumericUtils.cs +++ b/src/Lucene.Net.Tests/Util/TestNumericUtils.cs @@ -265,8 +265,8 @@ private void AssertLongRangeSplit(long lower, long upper, int precisionStep, boo { // Cannot use FixedBitSet since the range could be long: Int64BitSet bits = useBitSet ? new Int64BitSet(upper - lower + 1) : null; - IEnumerator neededBounds = (expectedBounds == null) ? null : expectedBounds.GetEnumerator(); - IEnumerator neededShifts = (expectedShifts == null) ? null : expectedShifts.GetEnumerator(); + using IEnumerator neededBounds = expectedBounds?.GetEnumerator(); + using IEnumerator neededShifts = expectedShifts?.GetEnumerator(); NumericUtils.SplitInt64Range(new LongRangeBuilderAnonymousInnerClassHelper(lower, upper, useBitSet, bits, neededBounds, neededShifts), precisionStep, lower, upper); diff --git a/src/Lucene.Net.Tests/Util/TestOfflineSorter.cs b/src/Lucene.Net.Tests/Util/TestOfflineSorter.cs index 3739ad31ba..d267658fdf 100644 --- a/src/Lucene.Net.Tests/Util/TestOfflineSorter.cs +++ b/src/Lucene.Net.Tests/Util/TestOfflineSorter.cs @@ -159,16 +159,14 @@ private void AssertFilesIdentical(FileInfo golden, FileInfo sorted) byte[] buf1 = new byte[64 * 1024]; byte[] buf2 = new byte[64 * 1024]; int len; - using (Stream is1 = golden.Open(FileMode.Open, FileAccess.Read, FileShare.Delete)) - using (Stream is2 = sorted.Open(FileMode.Open, FileAccess.Read, FileShare.Delete)) + using Stream is1 = golden.Open(FileMode.Open, FileAccess.Read, FileShare.Delete); + using Stream is2 = sorted.Open(FileMode.Open, FileAccess.Read, FileShare.Delete); + while ((len = is1.Read(buf1, 0, buf1.Length)) > 0) { - while ((len = is1.Read(buf1, 0, buf1.Length)) > 0) + is2.Read(buf2, 0, len); + for (int i = 0; i < len; i++) { - is2.Read(buf2, 0, len); - for (int i = 0; i < len; i++) - { - Assert.AreEqual(buf1[i], buf2[i]); - } + Assert.AreEqual(buf1[i], buf2[i]); } } } diff --git a/src/Lucene.Net.Tests/Util/TestPriorityQueue.cs b/src/Lucene.Net.Tests/Util/TestPriorityQueue.cs index 7f90d2f217..da598ae4b8 100644 --- a/src/Lucene.Net.Tests/Util/TestPriorityQueue.cs +++ b/src/Lucene.Net.Tests/Util/TestPriorityQueue.cs @@ -253,7 +253,7 @@ public static void TestPrepopulation() // Does not populate it pq = new IntegerQueue(maxSize, false); - Assert.AreEqual(pq.Top, default(int?)); + Assert.AreEqual(pq.Top, default); Assert.AreEqual(pq.Count, 0); } diff --git a/src/Lucene.Net/Analysis/Analyzer.cs b/src/Lucene.Net/Analysis/Analyzer.cs index 978658c705..077a3ecb7c 100644 --- a/src/Lucene.Net/Analysis/Analyzer.cs +++ b/src/Lucene.Net/Analysis/Analyzer.cs @@ -75,7 +75,7 @@ public abstract class Analyzer : IDisposable /// Create a new , reusing the same set of components per-thread /// across calls to . /// - public Analyzer() + protected Analyzer() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : this(GLOBAL_REUSE_STRATEGY) { } @@ -88,7 +88,7 @@ public Analyzer() /// Lucene.Net.Analysis.Common.Miscellaneous.PerFieldAnalyzerWrapper /// instead. /// - public Analyzer(ReuseStrategy reuseStrategy) + protected Analyzer(ReuseStrategy reuseStrategy) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.reuseStrategy = reuseStrategy; } @@ -447,8 +447,7 @@ public override TokenStreamComponents GetReusableComponents(Analyzer analyzer, s var componentsPerField = (IDictionary)GetStoredValue(analyzer); if (componentsPerField != null) { - TokenStreamComponents ret; - componentsPerField.TryGetValue(fieldName, out ret); + componentsPerField.TryGetValue(fieldName, out TokenStreamComponents ret); return ret; } return null; @@ -608,11 +607,11 @@ public abstract class ReuseStrategy /// /// Currently stored value or null if no value is stored /// if the is closed. - protected internal object GetStoredValue(Analyzer analyzer) + protected internal static object GetStoredValue(Analyzer analyzer) // LUCENENET: CA1822: Mark members as static { if (analyzer.storedValue == null) { - throw new ObjectDisposedException(this.GetType().FullName, "this Analyzer is closed"); + throw new ObjectDisposedException(analyzer.GetType().FullName, "this Analyzer is closed"); } return analyzer.storedValue.Value; } @@ -623,11 +622,11 @@ protected internal object GetStoredValue(Analyzer analyzer) /// Analyzer /// Value to store /// if the is closed. - protected internal void SetStoredValue(Analyzer analyzer, object storedValue) + protected internal static void SetStoredValue(Analyzer analyzer, object storedValue) // LUCENENET: CA1822: Mark members as static { if (analyzer.storedValue == null) { - throw new ObjectDisposedException("this Analyzer is closed"); + throw new ObjectDisposedException(analyzer.GetType().FullName, "this Analyzer is closed"); } analyzer.storedValue.Value = storedValue; } diff --git a/src/Lucene.Net/Analysis/CachingTokenFilter.cs b/src/Lucene.Net/Analysis/CachingTokenFilter.cs index aa3bb85a99..db474a2541 100644 --- a/src/Lucene.Net/Analysis/CachingTokenFilter.cs +++ b/src/Lucene.Net/Analysis/CachingTokenFilter.cs @@ -99,5 +99,29 @@ private void FillCache() m_input.End(); finalState = CaptureState(); } + + /// + /// Releases resources used by the and + /// if overridden in a derived class, optionally releases unmanaged resources. + /// + /// true to release both managed and unmanaged resources; + /// false to release only unmanaged resources. + + // LUCENENET specific + protected override void Dispose(bool disposing) + { + try + { + if (disposing) + { + iterator?.Dispose(); + iterator = null; + } + } + finally + { + base.Dispose(disposing); + } + } } } \ No newline at end of file diff --git a/src/Lucene.Net/Analysis/CharFilter.cs b/src/Lucene.Net/Analysis/CharFilter.cs index ff4b8b8108..90d1cf9771 100644 --- a/src/Lucene.Net/Analysis/CharFilter.cs +++ b/src/Lucene.Net/Analysis/CharFilter.cs @@ -47,7 +47,7 @@ public abstract class CharFilter : TextReader /// /// Create a new wrapping the provided reader. /// a , can also be a for chaining. - public CharFilter(TextReader input) + protected CharFilter(TextReader input) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.m_input = input; } @@ -82,7 +82,7 @@ protected override void Dispose(bool disposing) public int CorrectOffset(int currentOff) { int corrected = Correct(currentOff); - return (m_input is CharFilter) ? ((CharFilter)m_input).CorrectOffset(corrected) : corrected; + return (m_input is CharFilter charFilter) ? charFilter.CorrectOffset(corrected) : corrected; } // LUCENENET specific - force subclasses to implement Read(char[] buffer, int index, int count), diff --git a/src/Lucene.Net/Analysis/Token.cs b/src/Lucene.Net/Analysis/Token.cs index a9d867cd5b..42da6f9cee 100644 --- a/src/Lucene.Net/Analysis/Token.cs +++ b/src/Lucene.Net/Analysis/Token.cs @@ -399,16 +399,15 @@ public override bool Equals(object obj) return true; } - - if (obj is Token) + + if (obj is Token other) { - var other = (Token)obj; - return (startOffset == other.startOffset && - endOffset == other.endOffset && - flags == other.flags && - positionIncrement == other.positionIncrement && - (type == null ? other.type == null : type.Equals(other.type, StringComparison.Ordinal)) && - (payload == null ? other.payload == null : payload.Equals(other.payload)) && + return (startOffset == other.startOffset && + endOffset == other.endOffset && + flags == other.flags && + positionIncrement == other.positionIncrement && + (type == null ? other.type == null : type.Equals(other.type, StringComparison.Ordinal)) && + (payload == null ? other.payload == null : payload.Equals(other.payload)) && base.Equals(obj) ); } @@ -598,8 +597,7 @@ public virtual void Reinit(Token prototype, char[] newTermBuffer, int offset, in public override void CopyTo(IAttribute target) { - var to = target as Token; - if (to != null) + if (target is Token to) { to.Reinit(this); // reinit shares the payload, so clone it: @@ -677,8 +675,7 @@ public override bool Equals(object other) return true; } - var af = other as TokenAttributeFactory; - if (af != null) + if (other is TokenAttributeFactory af) { return this.@delegate.Equals(af.@delegate); } diff --git a/src/Lucene.Net/Analysis/TokenAttributes/CharTermAttribute.cs b/src/Lucene.Net/Analysis/TokenAttributes/CharTermAttribute.cs index 5a57488b02..bb94ceaca3 100644 --- a/src/Lucene.Net/Analysis/TokenAttributes/CharTermAttribute.cs +++ b/src/Lucene.Net/Analysis/TokenAttributes/CharTermAttribute.cs @@ -126,15 +126,7 @@ public virtual void FillBytesRef() // *** CharSequence interface *** - // LUCENENET specific: Replaced with this[int] to .NETify - //public char CharAt(int index) - //{ - // if (index >= TermLength) - // { - // throw new IndexOutOfRangeException(); - // } - // return TermBuffer[index]; - //} + // LUCENENET specific: Replaced CharAt(int) with this[int] to .NETify char ICharSequence.this[int index] => this[index]; diff --git a/src/Lucene.Net/Analysis/TokenAttributes/FlagsAttribute.cs b/src/Lucene.Net/Analysis/TokenAttributes/FlagsAttribute.cs index 2f4e4c2be5..b1699bcd91 100644 --- a/src/Lucene.Net/Analysis/TokenAttributes/FlagsAttribute.cs +++ b/src/Lucene.Net/Analysis/TokenAttributes/FlagsAttribute.cs @@ -55,9 +55,9 @@ public override bool Equals(object other) return true; } - if (other is FlagsAttribute) + if (other is FlagsAttribute flagsAttribute) { - return ((FlagsAttribute)other).flags == flags; + return flagsAttribute.flags == flags; } return false; diff --git a/src/Lucene.Net/Analysis/TokenAttributes/ICharTermAttribute.cs b/src/Lucene.Net/Analysis/TokenAttributes/ICharTermAttribute.cs index 5803df0a96..05f449c0a8 100644 --- a/src/Lucene.Net/Analysis/TokenAttributes/ICharTermAttribute.cs +++ b/src/Lucene.Net/Analysis/TokenAttributes/ICharTermAttribute.cs @@ -1,6 +1,7 @@ using J2N.Text; using Lucene.Net.Util; using System; +using System.Diagnostics.CodeAnalysis; using System.Text; namespace Lucene.Net.Analysis.TokenAttributes @@ -35,6 +36,7 @@ public interface ICharTermAttribute : IAttribute, ICharSequence, IAppendable /// the number of characters to copy void CopyBuffer(char[] buffer, int offset, int length); + /// /// Returns the internal termBuffer character array which /// you can then directly alter. If the array is too @@ -48,6 +50,7 @@ public interface ICharTermAttribute : IAttribute, ICharSequence, IAppendable /// the valid . /// /// + [SuppressMessage("Performance", "CA1819:Properties should not return arrays", Justification = "Lucene's design requires some array properties")] char[] Buffer { get; } /// diff --git a/src/Lucene.Net/Analysis/TokenAttributes/OffsetAttribute.cs b/src/Lucene.Net/Analysis/TokenAttributes/OffsetAttribute.cs index a1c1d126db..501101390d 100644 --- a/src/Lucene.Net/Analysis/TokenAttributes/OffsetAttribute.cs +++ b/src/Lucene.Net/Analysis/TokenAttributes/OffsetAttribute.cs @@ -74,9 +74,8 @@ public override bool Equals(object other) return true; } - if (other is OffsetAttribute) + if (other is OffsetAttribute o) { - OffsetAttribute o = (OffsetAttribute)other; return o.startOffset == startOffset && o.endOffset == endOffset; } diff --git a/src/Lucene.Net/Analysis/TokenAttributes/PayloadAttribute.cs b/src/Lucene.Net/Analysis/TokenAttributes/PayloadAttribute.cs index 25c5cb09b3..6de7693e19 100644 --- a/src/Lucene.Net/Analysis/TokenAttributes/PayloadAttribute.cs +++ b/src/Lucene.Net/Analysis/TokenAttributes/PayloadAttribute.cs @@ -73,9 +73,8 @@ public override bool Equals(object other) return true; } - if (other is PayloadAttribute) + if (other is PayloadAttribute o) { - PayloadAttribute o = (PayloadAttribute)other; if (o.payload == null || payload == null) { return o.payload == null && payload == null; diff --git a/src/Lucene.Net/Analysis/TokenAttributes/PositionIncrementAttribute.cs b/src/Lucene.Net/Analysis/TokenAttributes/PositionIncrementAttribute.cs index 90dfdbcec3..578b30606b 100644 --- a/src/Lucene.Net/Analysis/TokenAttributes/PositionIncrementAttribute.cs +++ b/src/Lucene.Net/Analysis/TokenAttributes/PositionIncrementAttribute.cs @@ -62,9 +62,8 @@ public override bool Equals(object other) return true; } - if (other is PositionIncrementAttribute) + if (other is PositionIncrementAttribute _other) { - PositionIncrementAttribute _other = (PositionIncrementAttribute)other; return positionIncrement == _other.positionIncrement; } diff --git a/src/Lucene.Net/Analysis/TokenAttributes/PositionLengthAttribute.cs b/src/Lucene.Net/Analysis/TokenAttributes/PositionLengthAttribute.cs index 171608a143..cccf078ef3 100644 --- a/src/Lucene.Net/Analysis/TokenAttributes/PositionLengthAttribute.cs +++ b/src/Lucene.Net/Analysis/TokenAttributes/PositionLengthAttribute.cs @@ -62,9 +62,8 @@ public override bool Equals(object other) return true; } - if (other is PositionLengthAttribute) + if (other is PositionLengthAttribute _other) { - PositionLengthAttribute _other = (PositionLengthAttribute)other; return positionLength == _other.positionLength; } diff --git a/src/Lucene.Net/Analysis/TokenAttributes/TypeAttribute.cs b/src/Lucene.Net/Analysis/TokenAttributes/TypeAttribute.cs index 6e5674b2be..8fb4c7408b 100644 --- a/src/Lucene.Net/Analysis/TokenAttributes/TypeAttribute.cs +++ b/src/Lucene.Net/Analysis/TokenAttributes/TypeAttribute.cs @@ -63,9 +63,8 @@ public override bool Equals(object other) return true; } - if (other is TypeAttribute) + if (other is TypeAttribute o) { - TypeAttribute o = (TypeAttribute)other; return (this.type == null ? o.type == null : this.type.Equals(o.type, StringComparison.Ordinal)); } diff --git a/src/Lucene.Net/Analysis/TokenStreamToAutomaton.cs b/src/Lucene.Net/Analysis/TokenStreamToAutomaton.cs index c015135e3d..17b8d9e436 100644 --- a/src/Lucene.Net/Analysis/TokenStreamToAutomaton.cs +++ b/src/Lucene.Net/Analysis/TokenStreamToAutomaton.cs @@ -225,10 +225,10 @@ public virtual Automaton ToAutomaton(TokenStream @in) termUnicode[j++] = cp = Character.CodePointAt(utf16, i); } } - else - { - termLen = termUTF8.Length; - } + //else + //{ + // termLen = termUTF8.Length; // LUCENENET: IDE0059: Remove unnecessary value assignment + //} for (int byteIDX = 0; byteIDX < termLen; byteIDX++) { diff --git a/src/Lucene.Net/Analysis/Tokenizer.cs b/src/Lucene.Net/Analysis/Tokenizer.cs index 29fc581203..195b8b9b2c 100644 --- a/src/Lucene.Net/Analysis/Tokenizer.cs +++ b/src/Lucene.Net/Analysis/Tokenizer.cs @@ -73,6 +73,7 @@ protected override void Dispose(bool disposing) if (disposing) { m_input.Dispose(); + inputPending.Dispose(); // LUCENENET specific: call dispose on input pending // LUCENE-2387: don't hold onto TextReader after close, so // GC can reclaim inputPending = ILLEGAL_STATE_READER; @@ -88,7 +89,7 @@ protected override void Dispose(bool disposing) /// protected internal int CorrectOffset(int currentOff) { - return (m_input is CharFilter) ? ((CharFilter)m_input).CorrectOffset(currentOff) : currentOff; + return (m_input is CharFilter charFilter) ? charFilter.CorrectOffset(currentOff) : currentOff; } /// @@ -131,7 +132,7 @@ public override int Read(char[] cbuf, int off, int len) { throw new InvalidOperationException("TokenStream contract violation: Reset()/Dispose() call missing, " + "Reset() called multiple times, or subclass does not call base.Reset(). " - + "Please see Javadocs of TokenStream class for more information about the correct consuming workflow."); + + "Please see the documentation of TokenStream class for more information about the correct consuming workflow."); } protected override void Dispose(bool disposing) diff --git a/src/Lucene.Net/ChangeNotes.txt b/src/Lucene.Net/ChangeNotes.txt index c61ef98b30..66b056c211 100644 --- a/src/Lucene.Net/ChangeNotes.txt +++ b/src/Lucene.Net/ChangeNotes.txt @@ -29,7 +29,7 @@ TODO: Comments need to be written for WeakDictionary TODO: Tests need to be written for IdentityDictionary -> Verify behavior -PriorityQueue in InsertWithOverflow, java returns null, I set it to return default(T). I don't think it's an issue. We should, at least, document +PriorityQueue in InsertWithOverflow, java returns null, I set it to return default. I don't think it's an issue. We should, at least, document that is may have unexpected results if used with a non-nullable type. BooleanClause.java - Can't override ToString on Enum or replace with Extension Method. Leave type-safe, override with extension method, or create static class? diff --git a/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs b/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs index 0f45c18735..3d712ca882 100644 --- a/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs +++ b/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs @@ -6,6 +6,7 @@ using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; using System.Text; using JCG = J2N.Collections.Generic; @@ -86,13 +87,10 @@ namespace Lucene.Net.Codecs /// public class BlockTreeTermsReader : FieldsProducer { - private void InitializeInstanceFields() - { - NO_OUTPUT = fstOutputs.NoOutput; - } - // Open input to the main terms dict file (_X.tib) +#pragma warning disable CA2213 // Disposable fields should be disposed private readonly IndexInput @in; +#pragma warning restore CA2213 // Disposable fields should be disposed //private static final boolean DEBUG = BlockTreeTermsWriter.DEBUG; @@ -111,7 +109,7 @@ private void InitializeInstanceFields() /// File offset where the directory starts in the index file. private long indexDirOffset; - private string segment; + private readonly string segment; // LUCENENET: marked readonly private readonly int version; @@ -119,8 +117,7 @@ private void InitializeInstanceFields() /// Sole constructor. public BlockTreeTermsReader(Directory dir, FieldInfos fieldInfos, SegmentInfo info, PostingsReaderBase postingsReader, IOContext ioContext, string segmentSuffix, int indexDivisor) { - InitializeInstanceFields(); - + NO_OUTPUT = fstOutputs.NoOutput; this.postingsReader = postingsReader; this.segment = info.Name; @@ -293,8 +290,7 @@ public override IEnumerator GetEnumerator() public override Terms GetTerms(string field) { if (Debugging.AssertsEnabled) Debugging.Assert(field != null); - FieldReader ret; - fields.TryGetValue(field, out ret); + fields.TryGetValue(field, out FieldReader ret); return ret; } @@ -606,6 +602,7 @@ internal FieldReader(BlockTreeTermsReader outerInstance, FieldInfo fieldInfo, lo /// /// For debugging -- used by CheckIndex too // TODO: maybe push this into Terms? + [MethodImpl(MethodImplOptions.AggressiveInlining)] public Stats ComputeStats() { return (new SegmentTermsEnum(this)).ComputeBlockStats(); @@ -1048,12 +1045,14 @@ public IntersectEnum(BlockTreeTermsReader.FieldReader outerInstance, CompiledAut } // only for assert: + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal bool SetSavedStartTerm(BytesRef startTerm) { savedStartTerm = startTerm == null ? null : BytesRef.DeepCopyOf(startTerm); return true; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override TermState GetTermState() { currentFrame.DecodeMetaData(); @@ -1149,12 +1148,14 @@ public override long TotalTermFreq } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override DocsEnum Docs(IBits skipDocs, DocsEnum reuse, DocsFlags flags) { currentFrame.DecodeMetaData(); return outerInstance.outerInstance.postingsReader.Docs(outerInstance.fieldInfo, currentFrame.termState, skipDocs, reuse, flags); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override DocsAndPositionsEnum DocsAndPositions(IBits skipDocs, DocsAndPositionsEnum reuse, DocsAndPositionsFlags flags) { // LUCENENET specific - to avoid boxing, changed from CompareTo() to IndexOptionsComparer.Compare() @@ -1549,10 +1550,10 @@ public SegmentTermsEnum(BlockTreeTermsReader.FieldReader outerInstance) // Empty string prefix must have an output in the index! if (Debugging.AssertsEnabled) Debugging.Assert(arc.IsFinal); } - else - { - arc = null; - } + //else + //{ + // arc = null; // LUCENENET: IDE0059: Remove unnecessary value assignment + //} currentFrame = staticFrame; //currentFrame = pushFrame(arc, rootCode, 0); //currentFrame.loadBlock(); @@ -1567,6 +1568,7 @@ public SegmentTermsEnum(BlockTreeTermsReader.FieldReader outerInstance) } // Not private to avoid synthetic access$NNN methods + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal void InitIndexInput() { if (this.@in == null) @@ -2365,71 +2367,7 @@ public override SeekStatus SeekCeil(BytesRef target) } } - // LUCENENET NOTE: Not in use - - //private void PrintSeekState(PrintStream @out) - //{ - // if (CurrentFrame == StaticFrame) - // { - // @out.println(" no prior seek"); - // } - // else - // { - // @out.println(" prior seek state:"); - // int ord = 0; - // bool isSeekFrame = true; - // while (true) - // { - // Frame f = GetFrame(ord); - // if (Debugging.AssertsEnabled) Debugging.Assert(f != null); - // BytesRef prefix = new BytesRef(term.Bytes, 0, f.Prefix); - // if (f.NextEnt == -1) - // { - // @out.println(" frame " + (isSeekFrame ? "(seek)" : "(next)") + " ord=" + ord + " fp=" + f.Fp + (f.IsFloor ? (" (fpOrig=" + f.FpOrig + ")") : "") + " prefixLen=" + f.Prefix + " prefix=" + prefix + (f.NextEnt == -1 ? "" : (" (of " + f.EntCount + ")")) + " hasTerms=" + f.HasTerms + " isFloor=" + f.IsFloor + " code=" + ((f.Fp << BlockTreeTermsWriter.OUTPUT_FLAGS_NUM_BITS) + (f.HasTerms ? BlockTreeTermsWriter.OUTPUT_FLAG_HAS_TERMS : 0) + (f.IsFloor ? BlockTreeTermsWriter.OUTPUT_FLAG_IS_FLOOR : 0)) + " isLastInFloor=" + f.IsLastInFloor + " mdUpto=" + f.MetaDataUpto + " tbOrd=" + f.TermBlockOrd); - // } - // else - // { - // @out.println(" frame " + (isSeekFrame ? "(seek, loaded)" : "(next, loaded)") + " ord=" + ord + " fp=" + f.Fp + (f.IsFloor ? (" (fpOrig=" + f.FpOrig + ")") : "") + " prefixLen=" + f.Prefix + " prefix=" + prefix + " nextEnt=" + f.NextEnt + (f.NextEnt == -1 ? "" : (" (of " + f.EntCount + ")")) + " hasTerms=" + f.HasTerms + " isFloor=" + f.IsFloor + " code=" + ((f.Fp << BlockTreeTermsWriter.OUTPUT_FLAGS_NUM_BITS) + (f.HasTerms ? BlockTreeTermsWriter.OUTPUT_FLAG_HAS_TERMS : 0) + (f.IsFloor ? BlockTreeTermsWriter.OUTPUT_FLAG_IS_FLOOR : 0)) + " lastSubFP=" + f.LastSubFP + " isLastInFloor=" + f.IsLastInFloor + " mdUpto=" + f.MetaDataUpto + " tbOrd=" + f.TermBlockOrd); - // } - // if (OuterInstance.Index != null) - // { - // if (Debugging.AssertsEnabled) Debugging.Assert(!isSeekFrame || f.Arc != null, "isSeekFrame={0} f.arc={1}", isSeekFrame, f.Arc); - // if (f.Prefix > 0 && isSeekFrame && f.Arc.Label != (term.Bytes[f.Prefix - 1] & 0xFF)) - // { - // @out.println(" broken seek state: arc.label=" + (char)f.Arc.Label + " vs term byte=" + (char)(term.Bytes[f.Prefix - 1] & 0xFF)); - // throw new Exception("seek state is broken"); - // } - // BytesRef output = Util.Get(OuterInstance.Index, prefix); - // if (output == null) - // { - // @out.println(" broken seek state: prefix is not final in index"); - // throw new Exception("seek state is broken"); - // } - // else if (isSeekFrame && !f.IsFloor) - // { - // ByteArrayDataInput reader = new ByteArrayDataInput(output.Bytes, output.Offset, output.Length); - // long codeOrig = reader.ReadVLong(); - // long code = (f.Fp << BlockTreeTermsWriter.OUTPUT_FLAGS_NUM_BITS) | (f.HasTerms ? BlockTreeTermsWriter.OUTPUT_FLAG_HAS_TERMS : 0) | (f.IsFloor ? BlockTreeTermsWriter.OUTPUT_FLAG_IS_FLOOR : 0); - // if (codeOrig != code) - // { - // @out.println(" broken seek state: output code=" + codeOrig + " doesn't match frame code=" + code); - // throw new Exception("seek state is broken"); - // } - // } - // } - // if (f == CurrentFrame) - // { - // break; - // } - // if (f.Prefix == ValidIndexPrefix) - // { - // isSeekFrame = false; - // } - // ord++; - // } - // } - //} - + // LUCENENET specific - Removed private void PrintSeekState(PrintStream @out) because it is not referenced /* Decodes only the term bytes of the next term. If caller then asks for metadata, ie docFreq, totalTermFreq or pulls a D/&PEnum, we then (lazily) @@ -2914,6 +2852,7 @@ internal void Rewind() */ } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public bool Next() { return isLeafBlock ? NextLeaf() : NextNonLeaf(); @@ -3002,7 +2941,7 @@ public void ScanToFloorFrame(BytesRef target) if (Debugging.AssertsEnabled) Debugging.Assert(numFollowFloorBlocks != 0); - long newFP = fpOrig; + long newFP/* = fpOrig*/; // LUCENENET: IDE0059: Remove unnecessary value assignment while (true) { long code = floorDataReader.ReadVInt64(); diff --git a/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs b/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs index 2909c27e83..2b2e54d01a 100644 --- a/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs +++ b/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs @@ -4,8 +4,8 @@ using Lucene.Net.Util.Fst; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; +using System.Runtime.CompilerServices; using System.Text; namespace Lucene.Net.Codecs @@ -233,8 +233,10 @@ public class BlockTreeTermsWriter : FieldsConsumer internal const string TERMS_INDEX_CODEC_NAME = "BLOCK_TREE_TERMS_INDEX"; +#pragma warning disable CA2213 // Disposable fields should be disposed private readonly IndexOutput @out; private readonly IndexOutput indexOut; +#pragma warning restore CA2213 // Disposable fields should be disposed internal readonly int minItemsInBlock; internal readonly int maxItemsInBlock; @@ -338,6 +340,7 @@ public BlockTreeTermsWriter(SegmentWriteState state, PostingsWriterBase postings /// /// Writes the terms file header. + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected internal virtual void WriteHeader(IndexOutput @out) { CodecUtil.WriteHeader(@out, TERMS_CODEC_NAME, VERSION_CURRENT); @@ -345,6 +348,7 @@ protected internal virtual void WriteHeader(IndexOutput @out) /// /// Writes the index file header. + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected internal virtual void WriteIndexHeader(IndexOutput @out) { CodecUtil.WriteHeader(@out, TERMS_INDEX_CODEC_NAME, VERSION_CURRENT); @@ -352,6 +356,7 @@ protected internal virtual void WriteIndexHeader(IndexOutput @out) /// /// Writes the terms file trailer. + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected internal virtual void WriteTrailer(IndexOutput @out, long dirStart) { @out.WriteInt64(dirStart); @@ -359,6 +364,7 @@ protected internal virtual void WriteTrailer(IndexOutput @out, long dirStart) /// /// Writes the index file trailer. + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected internal virtual void WriteIndexTrailer(IndexOutput indexOut, long dirStart) { indexOut.WriteInt64(dirStart); @@ -373,6 +379,7 @@ public override TermsConsumer AddField(FieldInfo field) return new TermsWriter(this, field); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static long EncodeOutput(long fp, bool hasTerms, bool isFloor) { if (Debugging.AssertsEnabled) Debugging.Assert(fp < (1L << 62)); @@ -459,31 +466,29 @@ public override string ToString() // For assert if (blocks.Count == 0) return "[]"; - using (var it = blocks.GetEnumerator()) + using var it = blocks.GetEnumerator(); + StringBuilder sb = new StringBuilder(); + sb.Append('['); + it.MoveNext(); + while (true) { - StringBuilder sb = new StringBuilder(); - sb.Append('['); - it.MoveNext(); - while (true) + var e = it.Current; + // There is a chance that the Prefix will contain invalid UTF8, + // so we catch that and use the alternative way of displaying it + try { - var e = it.Current; - // There is a chance that the Prefix will contain invalid UTF8, - // so we catch that and use the alternative way of displaying it - try - { - sb.Append(e.ToString()); - } - catch (IndexOutOfRangeException) - { - sb.Append("BLOCK: "); - sb.Append(e.Prefix.ToString()); - } - if (!it.MoveNext()) - { - return sb.Append(']').ToString(); - } - sb.Append(',').Append(' '); + sb.Append(e.ToString()); + } + catch (IndexOutOfRangeException) + { + sb.Append("BLOCK: "); + sb.Append(e.Prefix.ToString()); + } + if (!it.MoveNext()) + { + return sb.Append(']').ToString(); } + sb.Append(',').Append(' '); } } } @@ -580,7 +585,9 @@ private void Append(Builder builder, FST subIndex) } } +#pragma warning disable CA2213 // Disposable fields should be disposed internal readonly RAMOutputStream scratchBytes = new RAMOutputStream(); +#pragma warning restore CA2213 // Disposable fields should be disposed internal class TermsWriter : TermsConsumer { @@ -688,7 +695,7 @@ internal virtual void WriteBlocks(Int32sRef prevTerm, int prefixLength, int coun // and we found 30 terms/sub-blocks starting w/ that // prefix, and minItemsInBlock <= 30 <= // maxItemsInBlock. - PendingBlock nonFloorBlock = WriteBlock(prevTerm, prefixLength, prefixLength, count, count, 0, false, -1, true); + PendingBlock nonFloorBlock = WriteBlock(prevTerm, prefixLength, prefixLength, count, count, /*0, LUCENENET: Never read */ false, -1, true); nonFloorBlock.CompileIndex(null, outerInstance.scratchBytes); pending.Add(nonFloorBlock); } @@ -864,7 +871,7 @@ internal virtual void WriteBlocks(Int32sRef prevTerm, int prefixLength, int coun prevTerm.Int32s[prevTerm.Offset + prefixLength] = startLabel; } //System.out.println(" " + subCount + " subs"); - PendingBlock floorBlock = WriteBlock(prevTerm, prefixLength, curPrefixLength, curStart, pendingCount, subTermCountSums[1 + sub], true, startLabel, curStart == pendingCount); + PendingBlock floorBlock = WriteBlock(prevTerm, prefixLength, curPrefixLength, curStart, pendingCount, /*subTermCountSums[1 + sub], LUCENENET: Never read */ true, startLabel, curStart == pendingCount); if (firstBlock == null) { firstBlock = floorBlock; @@ -908,7 +915,7 @@ internal virtual void WriteBlocks(Int32sRef prevTerm, int prefixLength, int coun System.out.println(" **"); } */ - floorBlocks.Add(WriteBlock(prevTerm, prefixLength, prefixLength + 1, curStart, curStart, 0, true, startLabel, true)); + floorBlocks.Add(WriteBlock(prevTerm, prefixLength, prefixLength + 1, curStart,curStart, /* 0, LUCENENET: Never read */ true, startLabel, true)); break; } } @@ -926,7 +933,9 @@ internal virtual void WriteBlocks(Int32sRef prevTerm, int prefixLength, int coun } // for debugging +#pragma warning disable IDE0051 // Remove unused private members private string ToString(BytesRef b) +#pragma warning restore IDE0051 // Remove unused private members { try { @@ -943,7 +952,9 @@ private string ToString(BytesRef b) // Writes all entries in the pending slice as a single // block: - private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexPrefixLength, int startBackwards, int length, int futureTermCount, bool isFloor, int floorLeadByte, bool isLastInFloor) + private PendingBlock WriteBlock(Int32sRef prevTerm, int prefixLength, int indexPrefixLength, + int startBackwards, int length, /*int futureTermCount, // LUCENENET: Not used*/ + bool isFloor, int floorLeadByte, bool isLastInFloor) { if (Debugging.AssertsEnabled) Debugging.Assert(length > 0); @@ -1180,6 +1191,7 @@ internal TermsWriter(BlockTreeTermsWriter outerInstance, FieldInfo fieldInfo) public override IComparer Comparer => BytesRef.UTF8SortedAsUnicodeComparer; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override PostingsConsumer StartTerm(BytesRef text) { //if (DEBUG) System.out.println("\nBTTW.startTerm term=" + fieldInfo.name + ":" + toString(text) + " seg=" + segment); @@ -1306,7 +1318,7 @@ protected override void Dispose(bool disposing) } finally { - IOUtils.DisposeWhileHandlingException(ioe, @out, indexOut, postingsWriter); + IOUtils.DisposeWhileHandlingException(ioe, @out, indexOut, postingsWriter, scratchBytes); // LUCENENET: Added scratchBytes } } } diff --git a/src/Lucene.Net/Codecs/Codec.cs b/src/Lucene.Net/Codecs/Codec.cs index ea731f40d6..1a26fefee3 100644 --- a/src/Lucene.Net/Codecs/Codec.cs +++ b/src/Lucene.Net/Codecs/Codec.cs @@ -144,9 +144,9 @@ public static ICollection AvailableCodecs { get { - if (codecFactory is IServiceListable) + if (codecFactory is IServiceListable serviceListable) { - return ((IServiceListable)codecFactory).AvailableServices; + return serviceListable.AvailableServices; } else { diff --git a/src/Lucene.Net/Codecs/CodecUtil.cs b/src/Lucene.Net/Codecs/CodecUtil.cs index f78b5b9d6b..7bbdaa44b5 100644 --- a/src/Lucene.Net/Codecs/CodecUtil.cs +++ b/src/Lucene.Net/Codecs/CodecUtil.cs @@ -2,8 +2,8 @@ using Lucene.Net.Store; using Lucene.Net.Util; using System; -using System.Diagnostics; using System.IO; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs { @@ -32,12 +32,8 @@ namespace Lucene.Net.Codecs /// /// @lucene.experimental /// - public sealed class CodecUtil + public static class CodecUtil // LUCENENET specific - marked static because all members are static { - private CodecUtil() // no instance - { - } - /// /// Constant to identify the start of a codec header. /// @@ -73,6 +69,7 @@ private CodecUtil() // no instance /// less than 128 characters in length. /// Version number /// If there is an I/O error writing to the underlying medium. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void WriteHeader(DataOutput @out, string codec, int version) { BytesRef bytes = new BytesRef(codec); @@ -91,6 +88,7 @@ public static void WriteHeader(DataOutput @out, string codec, int version) /// Codec name. /// Length of the entire codec header. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static int HeaderLength(string codec) { return 9 + codec.Length; @@ -122,6 +120,7 @@ public static int HeaderLength(string codec) /// than . /// If there is an I/O error reading from the underlying medium. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static int CheckHeader(DataInput @in, string codec, int minVersion, int maxVersion) { // Safety to guard against reading a bogus string: @@ -180,6 +179,7 @@ public static int CheckHeaderNoMagic(DataInput @in, string codec, int minVersion /// /// Output stream /// If there is an I/O error writing to the underlying medium. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void WriteFooter(IndexOutput @out) { @out.WriteInt32(FOOTER_MAGIC); @@ -192,6 +192,7 @@ public static void WriteFooter(IndexOutput @out) /// /// Length of the entire codec footer. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static int FooterLength() { return 16; @@ -223,6 +224,7 @@ public static long CheckFooter(ChecksumIndexInput @in) /// Returns (but does not validate) the checksum previously written by . /// actual checksum value /// If the footer is invalid. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long RetrieveChecksum(IndexInput @in) { @in.Seek(@in.Length - FooterLength()); diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsFormat.cs b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsFormat.cs index fafc1296f0..487cb5523a 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsFormat.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsFormat.cs @@ -1,4 +1,5 @@ using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Compressing { @@ -101,16 +102,19 @@ public CompressingStoredFieldsFormat(string formatName, string segmentSuffix, Co this.chunkSize = chunkSize; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override StoredFieldsReader FieldsReader(Directory directory, SegmentInfo si, FieldInfos fn, IOContext context) { return new CompressingStoredFieldsReader(directory, si, segmentSuffix, fn, context, formatName, compressionMode); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override StoredFieldsWriter FieldsWriter(Directory directory, SegmentInfo si, IOContext context) { return new CompressingStoredFieldsWriter(directory, si, segmentSuffix, context, formatName, compressionMode, chunkSize); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override string ToString() { return this.GetType().Name + "(compressionMode=" + compressionMode + ", chunkSize=" + chunkSize + ")"; diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexReader.cs b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexReader.cs index 50c0cbdaf8..576e195438 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexReader.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexReader.cs @@ -1,5 +1,6 @@ using Lucene.Net.Support; using System; +using System.Runtime.CompilerServices; using ArrayUtil = Lucene.Net.Util.ArrayUtil; namespace Lucene.Net.Codecs.Compressing @@ -138,6 +139,7 @@ private int Block(int docID) return hi; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private int RelativeDocBase(int block, int relativeChunk) { int expected = avgChunkDocs[block] * relativeChunk; @@ -145,6 +147,7 @@ private int RelativeDocBase(int block, int relativeChunk) return expected + (int)delta; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private long RelativeStartPointer(int block, int relativeChunk) { long expected = avgChunkSizes[block] * relativeChunk; @@ -186,6 +189,7 @@ internal long GetStartPointer(int docID) return startPointers[block] + RelativeStartPointer(block, relativeChunk); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public object Clone() { return this; diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs index 21b7f2511a..c57cf33f8f 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexWriter.cs @@ -1,6 +1,6 @@ using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Compressing { @@ -73,6 +73,7 @@ public sealed class CompressingStoredFieldsIndexWriter : IDisposable { internal const int BLOCK_SIZE = 1024; // number of chunks to serialize at once + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static long MoveSignToLowOrderBit(long n) { return (n >> 63) ^ (n << 1); @@ -97,6 +98,7 @@ internal CompressingStoredFieldsIndexWriter(IndexOutput indexOutput) fieldsIndexOut.WriteVInt32(PackedInt32s.VERSION_CURRENT); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void Reset() { blockChunks = 0; @@ -223,6 +225,7 @@ internal void Finish(int numDocs, long maxPointer) CodecUtil.WriteFooter(fieldsIndexOut); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Dispose() { fieldsIndexOut.Dispose(); diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs index 99e38a1758..742169b80a 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs @@ -2,8 +2,7 @@ using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; -using System.Reflection; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Compressing { @@ -51,13 +50,15 @@ namespace Lucene.Net.Codecs.Compressing public sealed class CompressingStoredFieldsReader : StoredFieldsReader { // Do not reuse the decompression buffer when there is more than 32kb to decompress - private static readonly int BUFFER_REUSE_THRESHOLD = 1 << 15; + private const int BUFFER_REUSE_THRESHOLD = 1 << 15; private readonly int version; private readonly FieldInfos fieldInfos; private readonly CompressingStoredFieldsIndexReader indexReader; private readonly long maxPointer; +#pragma warning disable CA2213 // Disposable fields should be disposed private readonly IndexInput fieldsStream; +#pragma warning restore CA2213 // Disposable fields should be disposed private readonly int chunkSize; private readonly int packedIntsVersion; private readonly CompressionMode compressionMode; @@ -166,6 +167,7 @@ public CompressingStoredFieldsReader(Directory d, SegmentInfo si, string segment } /// If this FieldsReader is disposed. + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void EnsureOpen() { if (closed) @@ -177,6 +179,7 @@ private void EnsureOpen() /// /// Dispose the underlying s. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Dispose(bool disposing) { if (!closed) @@ -340,7 +343,7 @@ public override void VisitDocument(int docID, StoredFieldVisitor visitor) } decompressor.Decompress(fieldsStream, chunkSize, offset, Math.Min(length, chunkSize - offset), bytes); - documentInput = new DataInputAnonymousInnerClassHelper(this, offset, length); + documentInput = new DataInputAnonymousInnerClassHelper(this, length); } else { @@ -379,13 +382,11 @@ private class DataInputAnonymousInnerClassHelper : DataInput { private readonly CompressingStoredFieldsReader outerInstance; - private int offset; - private int length; + private readonly int length; - public DataInputAnonymousInnerClassHelper(CompressingStoredFieldsReader outerInstance, int offset, int length) + public DataInputAnonymousInnerClassHelper(CompressingStoredFieldsReader outerInstance, int length) { this.outerInstance = outerInstance; - this.offset = offset; this.length = length; decompressed = outerInstance.bytes.Length; } diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsWriter.cs b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsWriter.cs index 3791f741f7..8cc1dd6dfa 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsWriter.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsWriter.cs @@ -8,8 +8,6 @@ using Lucene.Net.Util; using Lucene.Net.Util.Packed; using System; -using System.Diagnostics; -using System.Globalization; using System.Runtime.CompilerServices; using Document = Lucene.Net.Documents.Document; @@ -74,8 +72,10 @@ public sealed class CompressingStoredFieldsWriter : StoredFieldsWriter private readonly Directory directory; private readonly string segment; private readonly string segmentSuffix; +#pragma warning disable CA2213 // Disposable fields should be disposed private CompressingStoredFieldsIndexWriter indexWriter; private IndexOutput fieldsStream; +#pragma warning restore CA2213 // Disposable fields should be disposed private readonly CompressionMode compressionMode; private readonly Compressor compressor; @@ -234,6 +234,7 @@ private void WriteHeader(int docBase, int numBufferedDocs, int[] numStoredFields SaveInt32s(lengths, numBufferedDocs, fieldsStream); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private bool TriggerFlush() { return bufferedDocs.Length >= chunkSize || numBufferedDocs >= MAX_DOCUMENTS_PER_CHUNK; // chunks of at least chunkSize bytes @@ -275,7 +276,7 @@ private void Flush() public override void WriteField(FieldInfo info, IIndexableField field) { - int bits = 0; + int bits/* = 0*/; // LUCENENET: IDE0059: Remove unnecessary value assignment BytesRef bytes; string @string; @@ -402,9 +403,9 @@ public override int Merge(MergeState mergeState) { StoredFieldsReader fieldsReader = matchingSegmentReader.FieldsReader; // we can only bulk-copy if the matching reader is also a CompressingStoredFieldsReader - if (fieldsReader != null && fieldsReader is CompressingStoredFieldsReader) + if (fieldsReader != null && fieldsReader is CompressingStoredFieldsReader compressingStoredFieldsReader) { - matchingFieldsReader = (CompressingStoredFieldsReader)fieldsReader; + matchingFieldsReader = compressingStoredFieldsReader; } } @@ -429,7 +430,7 @@ public override int Merge(MergeState mergeState) { // not all docs were deleted CompressingStoredFieldsReader.ChunkIterator it = matchingFieldsReader.GetChunkIterator(docID); - int[] startOffsets = new int[0]; + int[] startOffsets = Arrays.Empty(); do { // go to the next chunk that contains docID @@ -486,6 +487,7 @@ public override int Merge(MergeState mergeState) return docCount; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static int NextLiveDoc(int doc, IBits liveDocs, int maxDoc) { if (liveDocs == null) @@ -499,6 +501,7 @@ private static int NextLiveDoc(int doc, IBits liveDocs, int maxDoc) return doc; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static int NextDeletedDoc(int doc, IBits liveDocs, int maxDoc) { if (liveDocs == null) diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsFormat.cs b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsFormat.cs index 56c353a17a..e01c3f04ea 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsFormat.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsFormat.cs @@ -1,4 +1,5 @@ using System; +using System.Runtime.CompilerServices; using Directory = Lucene.Net.Store.Directory; namespace Lucene.Net.Codecs.Compressing @@ -74,11 +75,13 @@ public CompressingTermVectorsFormat(string formatName, string segmentSuffix, Com this.chunkSize = chunkSize; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override sealed TermVectorsReader VectorsReader(Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos, IOContext context) { return new CompressingTermVectorsReader(directory, segmentInfo, segmentSuffix, fieldInfos, context, formatName, compressionMode); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override sealed TermVectorsWriter VectorsWriter(Directory directory, SegmentInfo segmentInfo, IOContext context) { return new CompressingTermVectorsWriter(directory, segmentInfo, segmentSuffix, context, formatName, compressionMode, chunkSize); diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs index 182ea54d20..0e4f03f519 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs @@ -6,6 +6,7 @@ using System; using System.Collections.Generic; using System.Diagnostics; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Compressing { @@ -31,11 +32,13 @@ namespace Lucene.Net.Codecs.Compressing /// /// @lucene.experimental /// - public sealed class CompressingTermVectorsReader : TermVectorsReader, IDisposable + public sealed class CompressingTermVectorsReader : TermVectorsReader // LUCENENET specific - removed IDisposable, it is already implemented in base class { private readonly FieldInfos fieldInfos; internal readonly CompressingStoredFieldsIndexReader indexReader; +#pragma warning disable CA2213 // Disposable fields should be disposed internal readonly IndexInput vectorsStream; +#pragma warning restore CA2213 // Disposable fields should be disposed private readonly int version; private readonly int packedIntsVersion; private readonly CompressionMode compressionMode; @@ -138,6 +141,7 @@ public CompressingTermVectorsReader(Directory d, SegmentInfo si, string segmentS internal IndexInput VectorsStream => vectorsStream; /// if this is disposed. + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void EnsureOpen() { if (closed) @@ -146,6 +150,7 @@ private void EnsureOpen() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Dispose(bool disposing) { if (!closed) @@ -155,6 +160,7 @@ protected override void Dispose(bool disposing) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object Clone() { return new CompressingTermVectorsReader(this); @@ -313,7 +319,7 @@ public override Fields Get(int doc) reader.Reset(vectorsStream, totalTerms); // skip - toSkip = 0; + //toSkip = 0; // LUCENENET: IDE0059: Remove unnecessary value assignment for (int i = 0; i < skip; ++i) { for (int j = 0; j < numTerms.Get(i); ++j) @@ -681,6 +687,7 @@ public TVFields(CompressingTermVectorsReader outerInstance, int[] fieldNums, int this.suffixBytes = suffixBytes; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override IEnumerator GetEnumerator() { return GetFieldInfoNameEnumerable().GetEnumerator(); @@ -733,7 +740,7 @@ public override Terms GetTerms(string field) } } if (Debugging.AssertsEnabled) Debugging.Assert(fieldLen >= 0); - return new TVTerms(outerInstance, numTerms[idx], fieldFlags[idx], prefixLengths[idx], suffixLengths[idx], termFreqs[idx], positionIndex[idx], positions[idx], startOffsets[idx], lengths[idx], payloadIndex[idx], payloadBytes, new BytesRef(suffixBytes.Bytes, suffixBytes.Offset + fieldOff, fieldLen)); + return new TVTerms(numTerms[idx], fieldFlags[idx], prefixLengths[idx], suffixLengths[idx], termFreqs[idx], positionIndex[idx], positions[idx], startOffsets[idx], lengths[idx], payloadIndex[idx], payloadBytes, new BytesRef(suffixBytes.Bytes, suffixBytes.Offset + fieldOff, fieldLen)); } public override int Count => fieldNumOffs.Length; @@ -741,15 +748,12 @@ public override Terms GetTerms(string field) private class TVTerms : Terms { - private readonly CompressingTermVectorsReader outerInstance; - private readonly int numTerms, flags; private readonly int[] prefixLengths, suffixLengths, termFreqs, positionIndex, positions, startOffsets, lengths, payloadIndex; private readonly BytesRef termBytes, payloadBytes; - internal TVTerms(CompressingTermVectorsReader outerInstance, int numTerms, int flags, int[] prefixLengths, int[] suffixLengths, int[] termFreqs, int[] positionIndex, int[] positions, int[] startOffsets, int[] lengths, int[] payloadIndex, BytesRef payloadBytes, BytesRef termBytes) + internal TVTerms(int numTerms, int flags, int[] prefixLengths, int[] suffixLengths, int[] termFreqs, int[] positionIndex, int[] positions, int[] startOffsets, int[] lengths, int[] payloadIndex, BytesRef payloadBytes, BytesRef termBytes) { - this.outerInstance = outerInstance; this.numTerms = numTerms; this.flags = flags; this.prefixLengths = prefixLengths; @@ -764,6 +768,7 @@ internal TVTerms(CompressingTermVectorsReader outerInstance, int numTerms, int f this.termBytes = termBytes; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override TermsEnum GetEnumerator() { var termsEnum = new TVTermsEnum(); @@ -771,12 +776,10 @@ public override TermsEnum GetEnumerator() return termsEnum; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override TermsEnum GetEnumerator(TermsEnum reuse) { - TVTermsEnum termsEnum; - if (!(reuse is null) && reuse is TVTermsEnum) - termsEnum = (TVTermsEnum)reuse; - else + if (reuse is null || !(reuse is TVTermsEnum termsEnum)) termsEnum = new TVTermsEnum(); termsEnum.Reset(numTerms, flags, prefixLengths, suffixLengths, termFreqs, positionIndex, positions, startOffsets, lengths, payloadIndex, payloadBytes, new ByteArrayDataInput(termBytes.Bytes, termBytes.Offset, termBytes.Length)); @@ -832,6 +835,7 @@ internal virtual void Reset(int numTerms, int flags, int[] prefixLengths, int[] Reset(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void Reset() { term.Length = 0; @@ -916,22 +920,17 @@ public override void SeekExact(long ord) public override long TotalTermFreq => termFreqs[ord]; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override sealed DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) { - TVDocsEnum docsEnum; - if (reuse != null && reuse is TVDocsEnum) - { - docsEnum = (TVDocsEnum)reuse; - } - else - { + if (reuse is null || !(reuse is TVDocsEnum docsEnum)) docsEnum = new TVDocsEnum(); - } docsEnum.Reset(liveDocs, termFreqs[ord], positionIndex[ord], positions, startOffsets, lengths, payloads, payloadIndex); return docsEnum; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPositionsEnum reuse, DocsAndPositionsFlags flags) { if (positions == null && startOffsets == null) @@ -978,6 +977,7 @@ public virtual void Reset(IBits liveDocs, int freq, int positionIndex, int[] pos doc = i = -1; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void CheckDoc() { if (doc == NO_MORE_DOCS) @@ -990,6 +990,7 @@ private void CheckDoc() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void CheckPosition() { CheckDoc(); @@ -1100,17 +1101,20 @@ public override int NextDoc() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int Advance(int target) { return SlowAdvance(target); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long GetCost() { return 1; } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static int Sum(int[] arr) { int sum = 0; @@ -1119,11 +1123,13 @@ private static int Sum(int[] arr) return sum; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long RamBytesUsed() { return indexReader.RamBytesUsed(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void CheckIntegrity() { if (version >= CompressingTermVectorsWriter.VERSION_CHECKSUM) diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs index c30bc4591a..1faa1c1dbd 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs @@ -79,8 +79,10 @@ public sealed class CompressingTermVectorsWriter : TermVectorsWriter private readonly Directory directory; private readonly string segment; private readonly string segmentSuffix; +#pragma warning disable CA2213 // Disposable fields should be disposed private CompressingStoredFieldsIndexWriter indexWriter; private IndexOutput vectorsStream; +#pragma warning restore CA2213 // Disposable fields should be disposed private readonly CompressionMode compressionMode; private readonly Compressor compressor; @@ -190,6 +192,7 @@ internal FieldData(CompressingTermVectorsWriter outerInstance, int fieldNum, int ord = 0; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void AddTerm(int freq, int prefixLength, int suffixLength) { freqs[ord] = freq; @@ -299,6 +302,7 @@ public CompressingTermVectorsWriter(Directory directory, SegmentInfo si, string } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Dispose(bool disposing) { if (disposing) @@ -322,6 +326,7 @@ public override void Abort() IOUtils.DeleteFilesIgnoringExceptions(directory, IndexFileNames.SegmentFileName(segment, segmentSuffix, VECTORS_EXTENSION), IndexFileNames.SegmentFileName(segment, segmentSuffix, VECTORS_INDEX_EXTENSION)); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void StartDocument(int numVectorFields) { curDoc = AddDocData(numVectorFields); @@ -341,12 +346,14 @@ public override void FinishDocument() curDoc = null; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void StartField(FieldInfo info, int numTerms, bool positions, bool offsets, bool payloads) { curField = curDoc.AddField(info.Number, numTerms, positions, offsets, payloads); lastTerm.Length = 0; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void FinishField() { curField = null; @@ -378,6 +385,7 @@ public override void AddPosition(int position, int startOffset, int endOffset, B } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private bool TriggerFlush() { return termSuffixes.Length >= chunkSize || pendingDocs.Count >= MAX_DOCUMENTS_PER_CHUNK; @@ -886,9 +894,9 @@ public override int Merge(MergeState mergeState) { TermVectorsReader vectorsReader = matchingSegmentReader.TermVectorsReader; // we can only bulk-copy if the matching reader is also a CompressingTermVectorsReader - if (vectorsReader != null && vectorsReader is CompressingTermVectorsReader) + if (vectorsReader != null && vectorsReader is CompressingTermVectorsReader compressingTermVectorsReader) { - matchingVectorsReader = (CompressingTermVectorsReader)vectorsReader; + matchingVectorsReader = compressingTermVectorsReader; } } @@ -969,6 +977,7 @@ public override int Merge(MergeState mergeState) return docCount; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static int NextLiveDoc(int doc, IBits liveDocs, int maxDoc) { if (liveDocs == null) @@ -982,6 +991,7 @@ private static int NextLiveDoc(int doc, IBits liveDocs, int maxDoc) return doc; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static int NextDeletedDoc(int doc, IBits liveDocs, int maxDoc) { if (liveDocs == null) diff --git a/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs b/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs index 5bed637e1c..3f489cbb1c 100644 --- a/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs +++ b/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs @@ -1,7 +1,7 @@ using Lucene.Net.Diagnostics; -using System.Diagnostics; using System.IO; using System.IO.Compression; +using System.Runtime.CompilerServices; using ArrayUtil = Lucene.Net.Util.ArrayUtil; using BytesRef = Lucene.Net.Util.BytesRef; @@ -46,20 +46,19 @@ public abstract class CompressionMode private class CompressionModeAnonymousInnerClassHelper : CompressionMode { - public CompressionModeAnonymousInnerClassHelper() - { - } - + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override Compressor NewCompressor() { return new LZ4FastCompressor(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override Decompressor NewDecompressor() { return LZ4_DECOMPRESSOR; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override string ToString() { return "FAST"; @@ -76,20 +75,19 @@ public override string ToString() private class CompressionModeAnonymousInnerClassHelper2 : CompressionMode { - public CompressionModeAnonymousInnerClassHelper2() - { - } - + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override Compressor NewCompressor() { return new DeflateCompressor(CompressionLevel.Optimal); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override Decompressor NewDecompressor() { return new DeflateDecompressor(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override string ToString() { return "HIGH_COMPRESSION"; @@ -106,20 +104,19 @@ public override string ToString() private class CompressionModeAnonymousInnerClassHelper3 : CompressionMode { - public CompressionModeAnonymousInnerClassHelper3() - { - } - + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override Compressor NewCompressor() { return new LZ4HighCompressor(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override Decompressor NewDecompressor() { return LZ4_DECOMPRESSOR; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override string ToString() { return "FAST_DECOMPRESSION"; @@ -146,10 +143,6 @@ protected internal CompressionMode() private class DecompressorAnonymousInnerClassHelper : Decompressor { - public DecompressorAnonymousInnerClassHelper() - { - } - public override void Decompress(DataInput @in, int originalLength, int offset, int length, BytesRef bytes) { if (Debugging.AssertsEnabled) Debugging.Assert(offset + length <= originalLength); @@ -167,6 +160,7 @@ public override void Decompress(DataInput @in, int originalLength, int offset, i bytes.Length = length; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object Clone() { return this; @@ -182,6 +176,7 @@ internal LZ4FastCompressor() ht = new LZ4.HashTable(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Compress(byte[] bytes, int off, int len, DataOutput @out) { LZ4.Compress(bytes, off, len, @out, ht); @@ -197,6 +192,7 @@ internal LZ4HighCompressor() ht = new LZ4.HCHashTable(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Compress(byte[] bytes, int off, int len, DataOutput @out) { LZ4.CompressHC(bytes, off, len, @out, ht); @@ -205,11 +201,6 @@ public override void Compress(byte[] bytes, int off, int len, DataOutput @out) private sealed class DeflateDecompressor : Decompressor { - - internal DeflateDecompressor() - { - } - public override void Decompress(DataInput input, int originalLength, int offset, int length, BytesRef bytes) { if (Debugging.AssertsEnabled) Debugging.Assert(offset + length <= originalLength); @@ -224,14 +215,10 @@ public override void Decompress(DataInput input, int originalLength, int offset, byte[] decompressedBytes = null; using (MemoryStream decompressedStream = new MemoryStream()) + using (MemoryStream compressedStream = new MemoryStream(compressedBytes)) { - using (MemoryStream compressedStream = new MemoryStream(compressedBytes)) - { - using (DeflateStream dStream = new DeflateStream(compressedStream, System.IO.Compression.CompressionMode.Decompress)) - { - dStream.CopyTo(decompressedStream); - } - } + using DeflateStream dStream = new DeflateStream(compressedStream, System.IO.Compression.CompressionMode.Decompress); + dStream.CopyTo(decompressedStream); decompressedBytes = decompressedStream.ToArray(); } @@ -242,9 +229,10 @@ public override void Decompress(DataInput input, int originalLength, int offset, bytes.Bytes = decompressedBytes; bytes.Offset = offset; - bytes.Length = length; + bytes.Length = length; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object Clone() { return new DeflateDecompressor(); @@ -253,7 +241,7 @@ public override object Clone() private class DeflateCompressor : Compressor { - private CompressionLevel compressionLevel; + private readonly CompressionLevel compressionLevel; // LUCENENET: marked readonly internal DeflateCompressor(CompressionLevel level) { compressionLevel = level; diff --git a/src/Lucene.Net/Codecs/Compressing/LZ4.cs b/src/Lucene.Net/Codecs/Compressing/LZ4.cs index 8e2f61d30a..7df057543e 100644 --- a/src/Lucene.Net/Codecs/Compressing/LZ4.cs +++ b/src/Lucene.Net/Codecs/Compressing/LZ4.cs @@ -2,7 +2,7 @@ using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Compressing { @@ -33,25 +33,23 @@ namespace Lucene.Net.Codecs.Compressing /// http://code.google.com/p/lz4/ /// http://fastcompression.blogspot.fr/p/lz4.html /// - public sealed class LZ4 + public static class LZ4 // LUCENENET specific - made static { - private LZ4() - { - } - internal const int MEMORY_USAGE = 14; internal const int MIN_MATCH = 4; // minimum length of a match - internal static readonly int MAX_DISTANCE = 1 << 16; // maximum distance of a reference + internal const int MAX_DISTANCE = 1 << 16; // maximum distance of a reference internal const int LAST_LITERALS = 5; // the last 5 bytes must be encoded as literals internal const int HASH_LOG_HC = 15; // log size of the dictionary for compressHC - internal static readonly int HASH_TABLE_SIZE_HC = 1 << HASH_LOG_HC; - internal static readonly int OPTIMAL_ML = 0x0F + 4 - 1; // match length that doesn't require an additional byte + internal const int HASH_TABLE_SIZE_HC = 1 << HASH_LOG_HC; + internal const int OPTIMAL_ML = 0x0F + 4 - 1; // match length that doesn't require an additional byte + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static int Hash(int i, int hashBits) { return (i * -1640531535).TripleShift(32 - hashBits); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static int HashHC(int i) { return Hash(i, HASH_LOG_HC); @@ -60,6 +58,7 @@ private static int HashHC(int i) /// /// NOTE: This was readInt() in Lucene. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static int ReadInt32(byte[] buf, int i) { return ((((sbyte)buf[i]) & 0xFF) << 24) | ((((sbyte)buf[i + 1]) & 0xFF) << 16) | ((((sbyte)buf[i + 2]) & 0xFF) << 8) | @@ -69,11 +68,13 @@ private static int ReadInt32(byte[] buf, int i) /// /// NOTE: This was readIntEquals() in Lucene. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static bool ReadInt32Equals(byte[] buf, int i, int j) { return ReadInt32(buf, i) == ReadInt32(buf, j); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static int CommonBytes(byte[] b, int o1, int o2, int limit) { if (Debugging.AssertsEnabled) Debugging.Assert(o1 < o2); @@ -85,6 +86,7 @@ private static int CommonBytes(byte[] b, int o1, int o2, int limit) return count; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static int CommonBytesBackward(byte[] b, int o1, int o2, int l1, int l2) { int count = 0; @@ -170,6 +172,7 @@ public static int Decompress(DataInput compressed, int decompressedLen, byte[] d return dOff; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void EncodeLen(int l, DataOutput @out) { while (l >= 0xFF) @@ -180,6 +183,7 @@ private static void EncodeLen(int l, DataOutput @out) @out.WriteByte((byte)(sbyte)l); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void EncodeLiterals(byte[] bytes, int token, int anchor, int literalLen, DataOutput @out) { @out.WriteByte((byte)(sbyte)token); @@ -194,6 +198,7 @@ private static void EncodeLiterals(byte[] bytes, int token, int anchor, int lite @out.WriteBytes(bytes, anchor, literalLen); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void EncodeLastLiterals(byte[] bytes, int anchor, int literalLen, DataOutput @out) { int token = Math.Min(literalLen, 0x0F) << 4; @@ -305,6 +310,7 @@ public class Match { internal int start, @ref, len; + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void Fix(int correction) { start += correction; @@ -312,12 +318,14 @@ internal virtual void Fix(int correction) len -= correction; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual int End() { return start + len; } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void CopyTo(Match m1, Match m2) { m2.len = m1.len; @@ -328,7 +336,7 @@ private static void CopyTo(Match m1, Match m2) public sealed class HCHashTable { internal const int MAX_ATTEMPTS = 256; - internal static readonly int MASK = MAX_DISTANCE - 1; + internal const int MASK = MAX_DISTANCE - 1; internal int nextToUpdate; private int @base; private readonly int[] hashTable; @@ -340,6 +348,7 @@ internal HCHashTable() chainTable = new short[MAX_DISTANCE]; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal void Reset(int @base) { this.@base = @base; @@ -348,6 +357,7 @@ internal void Reset(int @base) Arrays.Fill(chainTable, (short)0); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private int HashPointer(byte[] bytes, int off) { int v = ReadInt32(bytes, off); @@ -355,6 +365,7 @@ private int HashPointer(byte[] bytes, int off) return hashTable[h]; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private int Next(int off) { return off - (chainTable[off & MASK] & 0xFFFF); @@ -374,6 +385,7 @@ private void AddHash(byte[] bytes, int off) hashTable[h] = off; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal void Insert(int off, byte[] bytes) { for (; nextToUpdate < off; ++nextToUpdate) @@ -566,7 +578,7 @@ public static void CompressHC(byte[] src, int srcOff, int srcLen, DataOutput @ou } // encode seq 1 EncodeSequence(src, anchor, match1.@ref, match1.start, match1.len, @out); - anchor = sOff = match1.End(); + anchor = /*sOff =*/ match1.End(); // LUCENENET: IDE0059: Remove unnecessary value assignment // encode seq 2 EncodeSequence(src, anchor, match2.@ref, match2.start, match2.len, @out); anchor = sOff = match2.End(); @@ -588,7 +600,7 @@ public static void CompressHC(byte[] src, int srcOff, int srcLen, DataOutput @ou } EncodeSequence(src, anchor, match1.@ref, match1.start, match1.len, @out); - anchor = sOff = match1.End(); + anchor = /*sOff =*/ match1.End(); // LUCENENET: IDE0059: Remove unnecessary value assignment CopyTo(match3, match1); CopyTo(match2, match0); @@ -623,7 +635,7 @@ public static void CompressHC(byte[] src, int srcOff, int srcLen, DataOutput @ou } EncodeSequence(src, anchor, match1.@ref, match1.start, match1.len, @out); - anchor = sOff = match1.End(); + anchor = /*sOff =*/ match1.End(); // LUCENENET: IDE0059: Remove unnecessary value assignment CopyTo(match2, match1); CopyTo(match3, match2); diff --git a/src/Lucene.Net/Codecs/DocValuesConsumer.cs b/src/Lucene.Net/Codecs/DocValuesConsumer.cs index 9a0cbdd9ec..bc1c2513cd 100644 --- a/src/Lucene.Net/Codecs/DocValuesConsumer.cs +++ b/src/Lucene.Net/Codecs/DocValuesConsumer.cs @@ -2,7 +2,6 @@ using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; namespace Lucene.Net.Codecs @@ -27,10 +26,10 @@ namespace Lucene.Net.Codecs using ArrayUtil = Lucene.Net.Util.ArrayUtil; using AtomicReader = Lucene.Net.Index.AtomicReader; using BinaryDocValues = Lucene.Net.Index.BinaryDocValues; - using IBits = Lucene.Net.Util.IBits; using BytesRef = Lucene.Net.Util.BytesRef; using FieldInfo = Lucene.Net.Index.FieldInfo; using FilteredTermsEnum = Lucene.Net.Index.FilteredTermsEnum; + using IBits = Lucene.Net.Util.IBits; using Int64BitSet = Lucene.Net.Util.Int64BitSet; using MergeState = Lucene.Net.Index.MergeState; using NumericDocValues = Lucene.Net.Index.NumericDocValues; @@ -114,10 +113,10 @@ protected internal DocValuesConsumer() /// public virtual void MergeNumericField(FieldInfo fieldInfo, MergeState mergeState, IList toMerge, IList docsWithField) { - AddNumericField(fieldInfo, GetMergeNumericFieldEnumerable(fieldInfo, mergeState, toMerge, docsWithField)); + AddNumericField(fieldInfo, GetMergeNumericFieldEnumerable(/* fieldInfo, // LUCENENET: Never read */ mergeState, toMerge, docsWithField)); } - private IEnumerable GetMergeNumericFieldEnumerable(FieldInfo fieldinfo, MergeState mergeState, IList toMerge, IList docsWithField) + private IEnumerable GetMergeNumericFieldEnumerable(/*FieldInfo fieldinfo, // LUCENENET: Never read */ MergeState mergeState, IList toMerge, IList docsWithField) { int readerUpto = -1; int docIDUpto = 0; @@ -176,10 +175,10 @@ public virtual void MergeNumericField(FieldInfo fieldInfo, MergeState mergeState /// public virtual void MergeBinaryField(FieldInfo fieldInfo, MergeState mergeState, IList toMerge, IList docsWithField) { - AddBinaryField(fieldInfo, GetMergeBinaryFieldEnumerable(fieldInfo, mergeState, toMerge, docsWithField)); + AddBinaryField(fieldInfo, GetMergeBinaryFieldEnumerable(/*fieldInfo, // LUCENENET: Never read */ mergeState, toMerge, docsWithField)); } - private IEnumerable GetMergeBinaryFieldEnumerable(FieldInfo fieldInfo, MergeState mergeState, IList toMerge, IList docsWithField) + private IEnumerable GetMergeBinaryFieldEnumerable(/*FieldInfo fieldInfo, // LUCENENET: Never read */ MergeState mergeState, IList toMerge, IList docsWithField) { int readerUpto = -1; int docIDUpto = 0; diff --git a/src/Lucene.Net/Codecs/DocValuesFormat.cs b/src/Lucene.Net/Codecs/DocValuesFormat.cs index 91708bde6a..919855af4b 100644 --- a/src/Lucene.Net/Codecs/DocValuesFormat.cs +++ b/src/Lucene.Net/Codecs/DocValuesFormat.cs @@ -152,9 +152,9 @@ public static ICollection AvailableDocValuesFormats { get { - if (docValuesFormatFactory is IServiceListable) + if (docValuesFormatFactory is IServiceListable serviceListable) { - return ((IServiceListable)docValuesFormatFactory).AvailableServices; + return serviceListable.AvailableServices; } else { diff --git a/src/Lucene.Net/Codecs/DocValuesProducer.cs b/src/Lucene.Net/Codecs/DocValuesProducer.cs index 900d1b3b3f..f4da0f8553 100644 --- a/src/Lucene.Net/Codecs/DocValuesProducer.cs +++ b/src/Lucene.Net/Codecs/DocValuesProducer.cs @@ -95,7 +95,7 @@ protected internal DocValuesProducer() /// /// Disposes all resources used by this object. /// - public virtual void Dispose() + public void Dispose() { Dispose(true); GC.SuppressFinalize(this); diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xFields.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xFields.cs index a0fa9d4a75..dc0d5b3a0b 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xFields.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xFields.cs @@ -1,12 +1,11 @@ using J2N.Text; using Lucene.Net.Diagnostics; using Lucene.Net.Index; +using Lucene.Net.Util; using System; using System.Collections.Generic; -using System.Diagnostics; -using JCG = J2N.Collections.Generic; using Console = Lucene.Net.Util.SystemConsole; -using Lucene.Net.Util; +using JCG = J2N.Collections.Generic; namespace Lucene.Net.Codecs.Lucene3x { @@ -27,20 +26,19 @@ namespace Lucene.Net.Codecs.Lucene3x * limitations under the License. */ - using IBits = Lucene.Net.Util.IBits; using BytesRef = Lucene.Net.Util.BytesRef; using Directory = Lucene.Net.Store.Directory; using DocsAndPositionsEnum = Lucene.Net.Index.DocsAndPositionsEnum; using DocsEnum = Lucene.Net.Index.DocsEnum; using FieldInfo = Lucene.Net.Index.FieldInfo; using FieldInfos = Lucene.Net.Index.FieldInfos; + using IBits = Lucene.Net.Util.IBits; using IndexFileNames = Lucene.Net.Index.IndexFileNames; using IndexInput = Lucene.Net.Store.IndexInput; using IndexOptions = Lucene.Net.Index.IndexOptions; using IOContext = Lucene.Net.Store.IOContext; using IOUtils = Lucene.Net.Util.IOUtils; using SegmentInfo = Lucene.Net.Index.SegmentInfo; - using StringHelper = Lucene.Net.Util.StringHelper; using Term = Lucene.Net.Index.Term; using Terms = Lucene.Net.Index.Terms; using TermsEnum = Lucene.Net.Index.TermsEnum; @@ -54,7 +52,9 @@ namespace Lucene.Net.Codecs.Lucene3x [Obsolete("(4.0)")] internal class Lucene3xFields : FieldsProducer { - private static bool DEBUG_SURROGATES = false; +#pragma warning disable CA1802 // Use literals where appropriate + private static readonly bool DEBUG_SURROGATES = false; +#pragma warning restore CA1802 // Use literals where appropriate public TermInfosReader Tis { get; set; } public TermInfosReader TisNoIndex { get; private set; } @@ -62,18 +62,18 @@ internal class Lucene3xFields : FieldsProducer public IndexInput FreqStream { get; private set; } public IndexInput ProxStream { get; private set; } private readonly FieldInfos fieldInfos; - private readonly SegmentInfo si; + //private readonly SegmentInfo si; // LUCENENET: Never read // LUCENENET specific: Use StringComparer.Ordinal to get the same ordering as Java internal readonly IDictionary fields = new JCG.SortedDictionary(StringComparer.Ordinal); internal readonly IDictionary preTerms = new Dictionary(); - private readonly Directory dir; - private readonly IOContext context; + //private readonly Directory dir; // LUCENENET: Never read + //private readonly IOContext context; // LUCENENET: Never read //private Directory cfsReader; // LUCENENET NOTE: cfsReader not used public Lucene3xFields(Directory dir, FieldInfos fieldInfos, SegmentInfo info, IOContext context, int indexDivisor) { - si = info; + //si = info; // LUCENENET: Never read // NOTE: we must always load terms index, even for // "sequential" scan during merging, because what is @@ -97,7 +97,7 @@ public Lucene3xFields(Directory dir, FieldInfos fieldInfos, SegmentInfo info, IO TisNoIndex = null; Tis = r; } - this.context = context; + //this.context = context; // LUCENENET: Never read this.fieldInfos = fieldInfos; // make sure that all index files have been read or are kept open @@ -139,7 +139,7 @@ public Lucene3xFields(Directory dir, FieldInfos fieldInfos, SegmentInfo info, IO Dispose(); } } - this.dir = dir; + //this.dir = dir; // LUCENENET: Never read } // If this returns, we do the surrogates dance so that the @@ -156,8 +156,7 @@ public override IEnumerator GetEnumerator() public override Terms GetTerms(string field) { - Terms result; - preTerms.TryGetValue(field, out result); + preTerms.TryGetValue(field, out Terms result); return result; } @@ -280,8 +279,8 @@ public PreTermsEnum(Lucene3xFields outerInstance) private SegmentTermEnum seekTermEnum; - private static readonly sbyte UTF8_NON_BMP_LEAD = unchecked((sbyte) 0xf0); - private static readonly sbyte UTF8_HIGH_BMP_LEAD = unchecked((sbyte) 0xee); + private const sbyte UTF8_NON_BMP_LEAD = unchecked((sbyte)0xf0); + private const sbyte UTF8_HIGH_BMP_LEAD = unchecked((sbyte)0xee); // Returns true if the unicode char is "after" the // surrogates in UTF16, ie >= U+E000 and <= U+FFFF: @@ -667,7 +666,7 @@ private void DoPushes() scratch[1] = (sbyte)scratchTerm.Bytes[upTo + 1]; scratch[2] = (sbyte)scratchTerm.Bytes[upTo + 2]; - scratchTerm.Bytes[upTo] = (byte)UTF8_HIGH_BMP_LEAD; + scratchTerm.Bytes[upTo] = unchecked((byte)UTF8_HIGH_BMP_LEAD); scratchTerm.Bytes[upTo + 1] = 0x80; scratchTerm.Bytes[upTo + 2] = 0x80; scratchTerm.Length = upTo + 3; @@ -697,7 +696,7 @@ private void DoPushes() } else { - Console.WriteLine(" hit term=" + UnicodeUtil.ToHexString(t2.Text()) + " " + (t2 == null ? null : t2.Bytes)); + Console.WriteLine($" hit term={UnicodeUtil.ToHexString(t2.Text())} {t2?.Bytes}"); } } @@ -1062,41 +1061,20 @@ public override BytesRef Next() public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) { - PreDocsEnum docsEnum; - if (reuse == null || !(reuse is PreDocsEnum)) - { + if (reuse == null || !(reuse is PreDocsEnum docsEnum) || docsEnum.FreqStream != outerInstance.FreqStream) docsEnum = new PreDocsEnum(outerInstance); - } - else - { - docsEnum = (PreDocsEnum)reuse; - if (docsEnum.FreqStream != outerInstance.FreqStream) - { - docsEnum = new PreDocsEnum(outerInstance); - } - } + return docsEnum.Reset(termEnum, liveDocs); } public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPositionsEnum reuse, DocsAndPositionsFlags flags) { - PreDocsAndPositionsEnum docsPosEnum; if (fieldInfo.IndexOptions != IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) - { return null; - } - else if (reuse == null || !(reuse is PreDocsAndPositionsEnum)) - { + + if (reuse is null || !(reuse is PreDocsAndPositionsEnum docsPosEnum) || docsPosEnum.FreqStream != outerInstance.FreqStream) docsPosEnum = new PreDocsAndPositionsEnum(outerInstance); - } - else - { - docsPosEnum = (PreDocsAndPositionsEnum)reuse; - if (docsPosEnum.FreqStream != outerInstance.FreqStream) - { - docsPosEnum = new PreDocsAndPositionsEnum(outerInstance); - } - } + return docsPosEnum.Reset(termEnum, liveDocs); } } diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xNormsFormat.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xNormsFormat.cs index 58bdf695e6..58432c7777 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xNormsFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xNormsFormat.cs @@ -1,4 +1,5 @@ using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene3x { @@ -35,6 +36,7 @@ public override DocValuesConsumer NormsConsumer(SegmentWriteState state) throw new NotSupportedException("this codec can only be used for reading"); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override DocValuesProducer NormsProducer(SegmentReadState state) { return new Lucene3xNormsProducer(state.Directory, state.SegmentInfo, state.FieldInfos, state.Context); diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xNormsProducer.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xNormsProducer.cs index a6548bf10d..607e00d7bb 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xNormsProducer.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xNormsProducer.cs @@ -1,10 +1,10 @@ -using J2N.Threading.Atomic; using J2N.Runtime.CompilerServices; +using J2N.Threading.Atomic; using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; +using System.Runtime.CompilerServices; using JCG = J2N.Collections.Generic; namespace Lucene.Net.Codecs.Lucene3x @@ -27,10 +27,10 @@ namespace Lucene.Net.Codecs.Lucene3x */ using BinaryDocValues = Lucene.Net.Index.BinaryDocValues; - using IBits = Lucene.Net.Util.IBits; using Directory = Lucene.Net.Store.Directory; using FieldInfo = Lucene.Net.Index.FieldInfo; using FieldInfos = Lucene.Net.Index.FieldInfos; + using IBits = Lucene.Net.Util.IBits; using IndexFileNames = Lucene.Net.Index.IndexFileNames; using IndexInput = Lucene.Net.Store.IndexInput; using IOContext = Lucene.Net.Store.IOContext; @@ -81,7 +81,7 @@ public Lucene3xNormsProducer(Directory dir, SegmentInfo info, FieldInfos fields, { Directory separateNormsDir = info.Dir; // separate norms are never inside CFS maxdoc = info.DocCount; - string segmentName = info.Name; + //string segmentName = info.Name; // LUCENENET: IDE0059: Remove unnecessary value assignment bool success = false; try { @@ -161,6 +161,8 @@ protected override void Dispose(bool disposing) { norms.Clear(); openFiles.Clear(); + singleNormStream?.Dispose(); // LUCENENET: Dispose singleNormStream and set to null + singleNormStream = null; } } } @@ -179,6 +181,7 @@ private static string GetNormFilename(SegmentInfo info, int number) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static bool HasSeparateNorms(SegmentInfo info, int number) { string v = info.GetAttribute(Lucene3xSegmentInfoFormat.NORMGEN_PREFIX + number); @@ -232,7 +235,7 @@ internal NumericDocValues Instance file.Dispose(); } outerInstance.ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(bytes)); - instance = new NumericDocValuesAnonymousInnerClassHelper(this, bytes); + instance = new NumericDocValuesAnonymousInnerClassHelper(bytes); } return instance; } @@ -243,11 +246,12 @@ private class NumericDocValuesAnonymousInnerClassHelper : NumericDocValues { private readonly byte[] bytes; - public NumericDocValuesAnonymousInnerClassHelper(NormsDocValues outerInstance, byte[] bytes) + public NumericDocValuesAnonymousInnerClassHelper(byte[] bytes) { this.bytes = bytes; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long Get(int docID) { return bytes[docID]; @@ -255,6 +259,7 @@ public override long Get(int docID) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override NumericDocValues GetNumeric(FieldInfo field) { var dv = norms[field.Name]; @@ -282,6 +287,7 @@ public override IBits GetDocsWithField(FieldInfo field) throw new InvalidOperationException(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long RamBytesUsed() => ramBytesUsed; public override void CheckIntegrity() { } diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xPostingsFormat.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xPostingsFormat.cs index 1088fb3449..40c204b7b8 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xPostingsFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xPostingsFormat.cs @@ -1,4 +1,5 @@ using System; +using System.Runtime.CompilerServices; using SegmentReadState = Lucene.Net.Index.SegmentReadState; using SegmentWriteState = Lucene.Net.Index.SegmentWriteState; @@ -58,6 +59,7 @@ public override FieldsConsumer FieldsConsumer(SegmentWriteState state) throw new NotSupportedException("this codec can only be used for reading"); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override FieldsProducer FieldsProducer(SegmentReadState state) { return new Lucene3xFields(state.Directory, state.FieldInfos, state.SegmentInfo, state.Context, state.TermsIndexDivisor); diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSegmentInfoFormat.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSegmentInfoFormat.cs index c72a8a1d7b..b3dbf877cf 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSegmentInfoFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSegmentInfoFormat.cs @@ -1,5 +1,6 @@ using System; using System.Globalization; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene3x { @@ -70,6 +71,7 @@ public class Lucene3xSegmentInfoFormat : SegmentInfoFormat /// If this segment shares stored fields & vectors, this /// offset is where in that file this segment's docs begin. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static int GetDocStoreOffset(SegmentInfo si) { string v = si.GetAttribute(DS_OFFSET_KEY); @@ -77,13 +79,15 @@ public static int GetDocStoreOffset(SegmentInfo si) } /// Name used to derive fields/vectors file we share with other segments. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static string GetDocStoreSegment(SegmentInfo si) { string v = si.GetAttribute(DS_NAME_KEY); - return v == null ? si.Name : v; + return v ?? si.Name; } /// Whether doc store files are stored in compound file (*.cfx). + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool GetDocStoreIsCompoundFile(SegmentInfo si) { string v = si.GetAttribute(DS_COMPOUND_KEY); diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSegmentInfoReader.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSegmentInfoReader.cs index 9618b3a178..175b038503 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSegmentInfoReader.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSegmentInfoReader.cs @@ -7,6 +7,7 @@ using JCG = J2N.Collections.Generic; using CompoundFileDirectory = Lucene.Net.Store.CompoundFileDirectory; using Directory = Lucene.Net.Store.Directory; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene3x { @@ -132,6 +133,7 @@ public override SegmentInfo Read(Directory directory, string segmentName, IOCont } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void AddIfExists(Directory dir, ISet files, string fileName) { if (dir.FileExists(fileName)) @@ -213,14 +215,16 @@ private SegmentCommitInfo ReadLegacySegmentInfo(Directory dir, int format, Index int delCount = input.ReadInt32(); if (Debugging.AssertsEnabled) Debugging.Assert(delCount <= docCount); - bool hasProx = input.ReadByte() == 1; + //bool hasProx = input.ReadByte() == 1; + input.ReadByte(); // LUCENENET: IDE0059: Remove unnecessary value assignment IDictionary diagnostics = input.ReadStringStringMap(); if (format <= Lucene3xSegmentInfoFormat.FORMAT_HAS_VECTORS) { // NOTE: unused - int hasVectors = input.ReadByte(); + //int hasVectors = input.ReadByte(); + input.ReadByte(); // LUCENENET: IDE0059: Remove unnecessary value assignment } // Replicate logic from 3.x's SegmentInfo.files(): diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSkipListReader.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSkipListReader.cs index 95a6db9fd7..7d37cc483d 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSkipListReader.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSkipListReader.cs @@ -1,5 +1,6 @@ using Lucene.Net.Support; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene3x { @@ -26,9 +27,9 @@ namespace Lucene.Net.Codecs.Lucene3x internal sealed class Lucene3xSkipListReader : MultiLevelSkipListReader { private bool currentFieldStoresPayloads; - private long[] freqPointer; - private long[] proxPointer; - private int[] payloadLength; + private readonly long[] freqPointer; // LUCENENET: marked readonly + private readonly long[] proxPointer; // LUCENENET: marked readonly + private readonly int[] payloadLength; // LUCENENET: marked readonly private long lastFreqPointer; private long lastProxPointer; @@ -73,6 +74,7 @@ public void Init(long skipPointer, long freqBasePointer, long proxBasePointer, i /// public int PayloadLength => lastPayloadLength; + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void SeekChild(int level) { base.SeekChild(level); @@ -81,6 +83,7 @@ protected override void SeekChild(int level) payloadLength[level] = lastPayloadLength; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void SetLastSkipData(int level) { base.SetLastSkipData(level); diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xStoredFieldsFormat.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xStoredFieldsFormat.cs index 7b1eca98f8..ef3d6fc663 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xStoredFieldsFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xStoredFieldsFormat.cs @@ -1,4 +1,5 @@ using System; +using System.Runtime.CompilerServices; using Directory = Lucene.Net.Store.Directory; namespace Lucene.Net.Codecs.Lucene3x @@ -27,6 +28,7 @@ namespace Lucene.Net.Codecs.Lucene3x [Obsolete("Only for reading existing 3.x indexes")] internal class Lucene3xStoredFieldsFormat : StoredFieldsFormat { + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override StoredFieldsReader FieldsReader(Directory directory, SegmentInfo si, FieldInfos fn, IOContext context) { return new Lucene3xStoredFieldsReader(directory, si, fn, context); diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xStoredFieldsReader.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xStoredFieldsReader.cs index 4243824eef..1f1f9aff81 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xStoredFieldsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xStoredFieldsReader.cs @@ -2,6 +2,7 @@ using System; using System.Diagnostics; using System.IO; +using System.Runtime.CompilerServices; using CompoundFileDirectory = Lucene.Net.Store.CompoundFileDirectory; namespace Lucene.Net.Codecs.Lucene3x @@ -72,12 +73,12 @@ internal sealed class Lucene3xStoredFieldsReader : StoredFieldsReader, IDisposab internal const int FORMAT_MINIMUM = FORMAT_LUCENE_3_0_NO_COMPRESSED_FIELDS; // NOTE: bit 0 is free here! You can steal it! - public static readonly int FIELD_IS_BINARY = 1 << 1; + public const int FIELD_IS_BINARY = 1 << 1; // the old bit 1 << 2 was compressed, is now left out private const int _NUMERIC_BIT_SHIFT = 3; - internal static readonly int FIELD_IS_NUMERIC_MASK = 0x07 << _NUMERIC_BIT_SHIFT; + internal const int FIELD_IS_NUMERIC_MASK = 0x07 << _NUMERIC_BIT_SHIFT; public const int FIELD_IS_NUMERIC_INT = 1 << _NUMERIC_BIT_SHIFT; public const int FIELD_IS_NUMERIC_LONG = 2 << _NUMERIC_BIT_SHIFT; @@ -85,20 +86,24 @@ internal sealed class Lucene3xStoredFieldsReader : StoredFieldsReader, IDisposab public const int FIELD_IS_NUMERIC_DOUBLE = 4 << _NUMERIC_BIT_SHIFT; private readonly FieldInfos fieldInfos; +#pragma warning disable CA2213 // Disposable fields should be disposed private readonly IndexInput fieldsStream; private readonly IndexInput indexStream; - private int numTotalDocs; - private int size; +#pragma warning restore CA2213 // Disposable fields should be disposed + private readonly int numTotalDocs; // LUCENENET: marked readonly + private readonly int size; // LUCENENET: marked readonly private bool closed; private readonly int format; // The docID offset where our docs begin in the index // file. this will be 0 if we have our own private file. - private int docStoreOffset; + private readonly int docStoreOffset; // LUCENENET: marked readonly // when we are inside a compound share doc store (CFX), // (lucene 3.0 indexes only), we privately open our own fd. +#pragma warning disable CA2213 // Disposable fields should be disposed private readonly CompoundFileDirectory storeCFSReader; +#pragma warning restore CA2213 // Disposable fields should be disposed /// /// Returns a cloned FieldsReader that shares open @@ -107,6 +112,7 @@ internal sealed class Lucene3xStoredFieldsReader : StoredFieldsReader, IDisposab /// clones are called (eg, currently SegmentReader manages /// this logic). /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object Clone() { EnsureOpen(); @@ -221,16 +227,16 @@ public Lucene3xStoredFieldsReader(Directory d, SegmentInfo si, FieldInfos fn, IO { Dispose(); } // keep our original exception -#pragma warning disable 168 - catch (Exception t) -#pragma warning restore 168 + catch (Exception) // LUCENENET: IDE0059: Remove unnecessary value assignment { + // ignored } } } } /// If this FieldsReader is disposed. + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void EnsureOpen() { if (closed) @@ -244,6 +250,7 @@ private void EnsureOpen() /// This means that the Fields values will not be accessible. /// /// If there is a low-level I/O error. + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Dispose(bool disposing) { if (disposing) @@ -256,6 +263,7 @@ protected override void Dispose(bool disposing) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void SeekIndex(int docID) { indexStream.Seek(FORMAT_SIZE + (docID + docStoreOffset) * 8L); @@ -362,12 +370,14 @@ private void SkipField(int bits) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long RamBytesUsed() { // everything is stored on disk return 0; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void CheckIntegrity() { } diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs index edb13141dd..9d4be0394c 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs @@ -8,6 +8,7 @@ using BytesRef = Lucene.Net.Util.BytesRef; using CompoundFileDirectory = Lucene.Net.Store.CompoundFileDirectory; using Directory = Lucene.Net.Store.Directory; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene3x { @@ -80,21 +81,25 @@ internal class Lucene3xTermVectorsReader : TermVectorsReader private readonly FieldInfos fieldInfos; - private IndexInput tvx; - private IndexInput tvd; - private IndexInput tvf; - private int size; - private int numTotalDocs; +#pragma warning disable CA2213 // Disposable fields should be disposed + private readonly IndexInput tvx; // LUCENENET: marked readonly + private readonly IndexInput tvd; // LUCENENET: marked readonly + private readonly IndexInput tvf; // LUCENENET: marked readonly +#pragma warning restore CA2213 // Disposable fields should be disposed + private readonly int size; // LUCENENET: marked readonly + private readonly int numTotalDocs; // LUCENENET: marked readonly // The docID offset where our docs begin in the index // file. this will be 0 if we have our own private file. - private int docStoreOffset; + private readonly int docStoreOffset; // LUCENENET: marked readonly // when we are inside a compound share doc store (CFX), // (lucene 3.0 indexes only), we privately open our own fd. // TODO: if we are worried, maybe we could eliminate the // extra fd somehow when you also have vectors... +#pragma warning disable CA2213 // Disposable fields should be disposed private readonly CompoundFileDirectory storeCFSReader; +#pragma warning restore CA2213 // Disposable fields should be disposed private readonly int format; @@ -187,11 +192,13 @@ public Lucene3xTermVectorsReader(Directory d, SegmentInfo si, FieldInfos fieldIn } // Not private to avoid synthetic access$NNN methods + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void SeekTvx(int docNum) { tvx.Seek((docNum + docStoreOffset) * 16L + FORMAT_SIZE); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private int CheckValidFormat(IndexInput @in) { int format = @in.ReadInt32(); @@ -206,6 +213,7 @@ private int CheckValidFormat(IndexInput @in) return format; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Dispose(bool disposing) { if (disposing) @@ -266,6 +274,7 @@ public TVFields(Lucene3xTermVectorsReader outerInstance, int docID) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override IEnumerator GetEnumerator() { return new IteratorAnonymousInnerClassHelper(this); @@ -275,7 +284,8 @@ private class IteratorAnonymousInnerClassHelper : IEnumerator { private readonly TVFields outerInstance; private string current; - private int i, upTo; + private int i; + private readonly int upTo; public IteratorAnonymousInnerClassHelper(TVFields outerInstance) { @@ -284,6 +294,7 @@ public IteratorAnonymousInnerClassHelper(TVFields outerInstance) i = 0; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public bool MoveNext() { if (outerInstance.fieldNumbers != null && i < upTo) @@ -317,8 +328,7 @@ public override Terms GetTerms(string field) return null; } - int fieldIndex; - if (!fieldNumberToIndex.TryGetValue(fieldInfo.Number, out fieldIndex)) + if (!fieldNumberToIndex.TryGetValue(fieldInfo.Number, out int fieldIndex)) { // Term vectors were not indexed for this field return null; @@ -365,6 +375,7 @@ public TVTerms(Lucene3xTermVectorsReader outerInstance, long tvfFP) unicodeSortOrder = outerInstance.SortTermsByUnicode(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override TermsEnum GetEnumerator() { var termsEnum = new TVTermsEnum(outerInstance); @@ -372,23 +383,11 @@ public override TermsEnum GetEnumerator() return termsEnum; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override TermsEnum GetEnumerator(TermsEnum reuse) { - TVTermsEnum termsEnum; -#pragma warning disable IDE0038 // Use pattern matching - if (reuse is null || !(reuse is TVTermsEnum)) -#pragma warning restore IDE0038 // Use pattern matching - { + if (reuse is null || !(reuse is TVTermsEnum termsEnum) || !termsEnum.CanReuse(outerInstance.tvf)) termsEnum = new TVTermsEnum(outerInstance); - } - else - { - var reusable = (TVTermsEnum)reuse; - if (reusable.CanReuse(outerInstance.tvf)) - termsEnum = reusable; - else - termsEnum = new TVTermsEnum(outerInstance); - } termsEnum.Reset(numTerms, tvfFPStart, storePositions, storeOffsets, unicodeSortOrder); return termsEnum; @@ -406,6 +405,7 @@ public override TermsEnum GetEnumerator(TermsEnum reuse) public override IComparer Comparer { + [MethodImpl(MethodImplOptions.AggressiveInlining)] get { if (unicodeSortOrder) @@ -456,6 +456,7 @@ public TVTermsEnum(Lucene3xTermVectorsReader outerInstance) tvf = (IndexInput)origTVF.Clone(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual bool CanReuse(IndexInput tvf) { return tvf == origTVF; @@ -556,6 +557,7 @@ public override void SeekExact(long ord) throw new NotSupportedException(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override bool MoveNext() { if (++currentTerm >= numTerms) @@ -581,21 +583,17 @@ public override BytesRef Next() public override long TotalTermFreq => termAndPostings[currentTerm].Freq; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) // ignored { - TVDocsEnum docsEnum; - if (reuse != null && reuse is TVDocsEnum) - { - docsEnum = (TVDocsEnum)reuse; - } - else - { + if (reuse is null || !(reuse is TVDocsEnum docsEnum)) docsEnum = new TVDocsEnum(); - } + docsEnum.Reset(liveDocs, termAndPostings[currentTerm]); return docsEnum; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPositionsEnum reuse, DocsAndPositionsFlags flags) { if (!storePositions && !storeOffsets) @@ -603,21 +601,16 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPos return null; } - TVDocsAndPositionsEnum docsAndPositionsEnum; - if (reuse != null && reuse is TVDocsAndPositionsEnum) - { - docsAndPositionsEnum = (TVDocsAndPositionsEnum)reuse; - } - else - { + if (reuse is null || !(reuse is TVDocsAndPositionsEnum docsAndPositionsEnum)) docsAndPositionsEnum = new TVDocsAndPositionsEnum(); - } + docsAndPositionsEnum.Reset(liveDocs, termAndPostings[currentTerm]); return docsAndPositionsEnum; } public override IComparer Comparer { + [MethodImpl(MethodImplOptions.AggressiveInlining)] get { if (unicodeSortOrder) @@ -645,6 +638,7 @@ private class TVDocsEnum : DocsEnum public override int DocID => doc; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int NextDoc() { if (!didNext && (liveDocs == null || liveDocs.Get(0))) @@ -658,6 +652,7 @@ public override int NextDoc() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int Advance(int target) { if (!didNext && target == 0) @@ -670,6 +665,7 @@ public override int Advance(int target) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void Reset(IBits liveDocs, TermAndPostings termAndPostings) { this.liveDocs = liveDocs; @@ -678,6 +674,7 @@ public virtual void Reset(IBits liveDocs, TermAndPostings termAndPostings) didNext = false; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long GetCost() { return 1; @@ -696,6 +693,7 @@ private class TVDocsAndPositionsEnum : DocsAndPositionsEnum public override int Freq { + [MethodImpl(MethodImplOptions.AggressiveInlining)] get { if (positions != null) @@ -712,6 +710,7 @@ public override int Freq public override int DocID => doc; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int NextDoc() { if (!didNext && (liveDocs == null || liveDocs.Get(0))) @@ -725,6 +724,7 @@ public override int NextDoc() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int Advance(int target) { if (!didNext && target == 0) @@ -737,6 +737,7 @@ public override int Advance(int target) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void Reset(IBits liveDocs, TermAndPostings termAndPostings) { this.liveDocs = liveDocs; @@ -748,6 +749,7 @@ public virtual void Reset(IBits liveDocs, TermAndPostings termAndPostings) nextPos = 0; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override BytesRef GetPayload() { return null; @@ -780,6 +782,7 @@ public override int NextPosition() public override int StartOffset { + [MethodImpl(MethodImplOptions.AggressiveInlining)] get { if (startOffsets != null) @@ -795,6 +798,7 @@ public override int StartOffset public override int EndOffset { + [MethodImpl(MethodImplOptions.AggressiveInlining)] get { if (endOffsets != null) @@ -808,12 +812,14 @@ public override int EndOffset } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long GetCost() { return 1; } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override Fields Get(int docID) { if (tvx != null) @@ -860,17 +866,20 @@ public override object Clone() // true when segments are used for "normal" searching; // it's only false during testing, to create a pre-flex // index, using the test-only PreFlexRW. + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected internal virtual bool SortTermsByUnicode() { return true; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long RamBytesUsed() { // everything is disk-based return 0; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void CheckIntegrity() { } diff --git a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermDocs.cs b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermDocs.cs index 1057564720..a24fa90bae 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermDocs.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermDocs.cs @@ -1,6 +1,7 @@ using Lucene.Net.Diagnostics; using System; using System.Diagnostics; +using System.Runtime.CompilerServices; using IBits = Lucene.Net.Util.IBits; namespace Lucene.Net.Codecs.Lucene3x @@ -45,8 +46,8 @@ internal class SegmentTermDocs internal int doc = 0; internal int freq; - private int skipInterval; - private int maxSkipLevels; + private readonly int skipInterval; // LUCENENET: marked readonly + private readonly int maxSkipLevels; // LUCENENET: marked readonly private Lucene3xSkipListReader skipListReader; private long freqBasePointer; @@ -67,6 +68,7 @@ public SegmentTermDocs(IndexInput freqStream, TermInfosReader tis, FieldInfos fi maxSkipLevels = tis.MaxSkipLevels; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void Seek(Term term) { TermInfo ti = tis.Get(term); @@ -121,12 +123,14 @@ internal virtual void Seek(TermInfo ti, Term term) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected virtual void Dispose(bool disposing) { if (disposing) @@ -143,6 +147,7 @@ protected virtual void Dispose(bool disposing) public int Freq => freq; + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected internal virtual void SkippingDoc() { } @@ -247,6 +252,7 @@ private int ReadNoTf(int[] docs, int[] freqs, int length) /// /// Overridden by to skip in prox stream. + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected internal virtual void SkipProx(long proxPointer, int payloadLength) { } diff --git a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermEnum.cs b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermEnum.cs index d892162b26..32884fe263 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermEnum.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermEnum.cs @@ -1,6 +1,7 @@ using Lucene.Net.Diagnostics; using System; using System.Diagnostics; +using System.Runtime.CompilerServices; using FieldInfos = Lucene.Net.Index.FieldInfos; using IndexFormatTooNewException = Lucene.Net.Index.IndexFormatTooNewException; using IndexFormatTooOldException = Lucene.Net.Index.IndexFormatTooOldException; @@ -58,8 +59,8 @@ internal sealed class SegmentTermEnum : IDisposable internal TermInfo termInfo = new TermInfo(); - private int format; - private bool isIndex = false; + private readonly int format; // LUCENENET: marked readonly + private readonly bool isIndex = false; // LUCENENET: marked readonly internal long indexPointer = 0; internal int indexInterval; // LUCENENET NOTE: Changed from public field to internal (class is internal anyway) internal int skipInterval; @@ -115,16 +116,8 @@ public SegmentTermEnum(IndexInput i, FieldInfos fis, bool isi) public object Clone() { - SegmentTermEnum clone = null; - try - { - clone = (SegmentTermEnum)base.MemberwiseClone(); - } -#pragma warning disable 168 - catch (InvalidOperationException e) -#pragma warning restore 168 - { - } + // LUCENENET: MemberwiseClone() doesn't throw in .NET + SegmentTermEnum clone = (SegmentTermEnum)base.MemberwiseClone(); clone.input = (IndexInput)input.Clone(); clone.termInfo = new TermInfo(termInfo); @@ -212,6 +205,7 @@ internal int ScanTo(Term term) /// Returns the current Term in the enumeration. /// Initially invalid, valid after called for the first time. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public Term Term() { return termBuffer.ToTerm(); @@ -219,6 +213,7 @@ public Term Term() /// /// Returns the previous Term enumerated. Initially null. + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal Term Prev() { return prevBuffer.ToTerm(); @@ -228,6 +223,7 @@ internal Term Prev() /// Returns the current in the enumeration. /// Initially invalid, valid after called for the first time. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal TermInfo TermInfo() { return new TermInfo(termInfo); @@ -237,6 +233,7 @@ internal TermInfo TermInfo() /// Sets the argument to the current in the enumeration. /// Initially invalid, valid after called for the first time. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal void TermInfo(TermInfo ti) { ti.Set(termInfo); @@ -262,6 +259,7 @@ internal void TermInfo(TermInfo ti) /// /// Closes the enumeration to further activity, freeing resources. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Dispose() { input.Dispose(); diff --git a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermPositions.cs b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermPositions.cs index f426967e2a..de24367581 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermPositions.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermPositions.cs @@ -1,7 +1,7 @@ using Lucene.Net.Diagnostics; using Lucene.Net.Index; using System; -using System.Diagnostics; +using System.Runtime.CompilerServices; using BytesRef = Lucene.Net.Util.BytesRef; namespace Lucene.Net.Codecs.Lucene3x @@ -34,7 +34,7 @@ namespace Lucene.Net.Codecs.Lucene3x internal sealed class SegmentTermPositions : SegmentTermDocs { private IndexInput proxStream; - private IndexInput proxStreamOrig; + private readonly IndexInput proxStreamOrig; // LUCENENET: marked readonly private int proxCount; private int position; @@ -80,17 +80,13 @@ internal override void Seek(TermInfo ti, Term term) needToLoadPayload = false; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Dispose(bool disposing) { base.Dispose(disposing); if (disposing) - { - if (proxStream != null) - { - proxStream.Dispose(); - } - } + proxStream?.Dispose(); } public int NextPosition() @@ -129,6 +125,7 @@ private int ReadDeltaPosition() return delta; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected internal sealed override void SkippingDoc() { // we remember to skip a document lazily @@ -157,6 +154,7 @@ public sealed override int Read(int[] docs, int[] freqs) /// /// Called by base.SkipTo(). + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected internal override void SkipProx(long proxPointer, int payloadLength) { // we save the pointer, we might have to skip there lazily @@ -167,6 +165,7 @@ protected internal override void SkipProx(long proxPointer, int payloadLength) needToLoadPayload = false; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void SkipPositions(int n) { if (Debugging.AssertsEnabled) Debugging.Assert(m_indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); @@ -177,6 +176,7 @@ private void SkipPositions(int n) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void SkipPayload() { if (needToLoadPayload && payloadLength > 0) diff --git a/src/Lucene.Net/Codecs/Lucene3x/TermBuffer.cs b/src/Lucene.Net/Codecs/Lucene3x/TermBuffer.cs index 3624709a54..3c2bd8b305 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/TermBuffer.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/TermBuffer.cs @@ -3,7 +3,7 @@ using Lucene.Net.Util; using System; using System.Collections.Generic; -using System.Diagnostics; +using System.Runtime.CompilerServices; using BytesRef = Lucene.Net.Util.BytesRef; using FieldInfos = Lucene.Net.Index.FieldInfos; @@ -51,6 +51,7 @@ internal sealed class TermBuffer internal int newSuffixStart; // only valid right after .read is called + [MethodImpl(MethodImplOptions.AggressiveInlining)] public int CompareTo(TermBuffer other) { if (field == other.field) // fields are interned @@ -99,6 +100,7 @@ public void Read(IndexInput input, FieldInfos fieldInfos) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Set(Term term) { if (term == null) @@ -113,6 +115,7 @@ public void Set(Term term) this.term = term; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Set(TermBuffer other) { field = other.field; @@ -123,6 +126,7 @@ public void Set(TermBuffer other) bytes.CopyBytes(other.bytes); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Reset() { field = null; @@ -130,6 +134,7 @@ public void Reset() currentFieldNumber = -1; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public Term ToTerm() { if (field == null) // unset @@ -142,16 +147,8 @@ public Term ToTerm() public object Clone() { - TermBuffer clone = null; - try - { - clone = (TermBuffer)base.MemberwiseClone(); - } -#pragma warning disable 168 - catch (InvalidOperationException e) -#pragma warning restore 168 - { - } + // LUCENENET: MemberwiseClone() doesn't throw in .NET + TermBuffer clone = (TermBuffer)base.MemberwiseClone(); clone.bytes = BytesRef.DeepCopyOf(bytes); return clone; } diff --git a/src/Lucene.Net/Codecs/Lucene3x/TermInfo.cs b/src/Lucene.Net/Codecs/Lucene3x/TermInfo.cs index 09f3411387..d4e2c71e24 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/TermInfo.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/TermInfo.cs @@ -1,4 +1,5 @@ using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene3x { @@ -55,6 +56,7 @@ public TermInfo(TermInfo ti) SkipOffset = ti.SkipOffset; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Set(int docFreq, long freqPointer, long proxPointer, int skipOffset) { this.DocFreq = docFreq; @@ -63,6 +65,7 @@ public void Set(int docFreq, long freqPointer, long proxPointer, int skipOffset) this.SkipOffset = skipOffset; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Set(TermInfo ti) { DocFreq = ti.DocFreq; diff --git a/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs b/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs index 3b585716ce..fc0d201365 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/TermInfosReader.cs @@ -3,7 +3,7 @@ using Lucene.Net.Util; using System; using System.Collections.Generic; -using System.Diagnostics; +using System.Runtime.CompilerServices; using BytesRef = Lucene.Net.Util.BytesRef; using Directory = Lucene.Net.Store.Directory; @@ -46,8 +46,10 @@ internal sealed class TermInfosReader : IDisposable private readonly string segment; private readonly FieldInfos fieldInfos; +#pragma warning disable CA2213 // Disposable fields should be disposed private readonly DisposableThreadLocal threadResources = new DisposableThreadLocal(); private readonly SegmentTermEnum origEnum; +#pragma warning restore CA2213 // Disposable fields should be disposed private readonly long size; private readonly TermInfosReaderIndex index; @@ -90,6 +92,7 @@ public override int GetHashCode() return term.GetHashCode(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object Clone() { return new CloneableTerm(term); @@ -169,6 +172,7 @@ internal TermInfosReader(Directory dir, string seg, FieldInfos fis, IOContext co public int MaxSkipLevels => origEnum.maxSkipLevels; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Dispose() { IOUtils.Dispose(origEnum, threadResources); @@ -181,6 +185,7 @@ public void Dispose() /// internal long Count => size; + [MethodImpl(MethodImplOptions.AggressiveInlining)] private ThreadResources GetThreadResources() { ThreadResources resources = threadResources.Value; @@ -195,7 +200,8 @@ private ThreadResources GetThreadResources() private static readonly IComparer legacyComparer = BytesRef.UTF8SortedAsUTF16Comparer; - private int CompareAsUTF16(Term term1, Term term2) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static int CompareAsUTF16(Term term1, Term term2) // LUCENENET: CA1822: Mark members as static { if (term1.Field.Equals(term2.Field, StringComparison.Ordinal)) { @@ -209,6 +215,7 @@ private int CompareAsUTF16(Term term1, Term term2) /// /// Returns the for a in the set, or null. + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal TermInfo Get(Term term) { return Get(term, false); @@ -235,16 +242,19 @@ private TermInfo Get(Term term, bool mustSeekEnum) return SeekEnum(resources.termEnum, term, tiOrd, true); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void CacheCurrentTerm(SegmentTermEnum enumerator) { termsCache.Put(new CloneableTerm(enumerator.Term()), new TermInfoAndOrd(enumerator.termInfo, enumerator.position)); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static Term DeepCopyOf(Term other) { return new Term(other.Field, BytesRef.DeepCopyOf(other.Bytes)); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal TermInfo SeekEnum(SegmentTermEnum enumerator, Term term, bool useCache) { if (useCache) @@ -347,7 +357,7 @@ internal TermInfo SeekEnum(SegmentTermEnum enumerator, Term term, TermInfoAndOrd } // called only from asserts - private bool SameTermInfo(TermInfo ti1, TermInfo ti2, SegmentTermEnum enumerator) + private static bool SameTermInfo(TermInfo ti1, TermInfo ti2, SegmentTermEnum enumerator) // LUCENENET: CA1822: Mark members as static { if (ti1.DocFreq != ti2.DocFreq) { @@ -369,6 +379,7 @@ private bool SameTermInfo(TermInfo ti1, TermInfo ti2, SegmentTermEnum enumerator return true; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void EnsureIndexIsRead() { if (index == null) @@ -408,6 +419,7 @@ internal long GetPosition(Term term) /// /// Returns an enumeration of all the s and s in the set. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public SegmentTermEnum Terms() { return (SegmentTermEnum)origEnum.Clone(); @@ -415,12 +427,14 @@ public SegmentTermEnum Terms() /// /// Returns an enumeration of terms starting at or after the named term. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public SegmentTermEnum Terms(Term term) { Get(term, true); return (SegmentTermEnum)GetThreadResources().termEnum.Clone(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal long RamBytesUsed() { return index == null ? 0 : index.RamBytesUsed(); diff --git a/src/Lucene.Net/Codecs/Lucene3x/TermInfosReaderIndex.cs b/src/Lucene.Net/Codecs/Lucene3x/TermInfosReaderIndex.cs index 728341e0c2..69c15d9723 100644 --- a/src/Lucene.Net/Codecs/Lucene3x/TermInfosReaderIndex.cs +++ b/src/Lucene.Net/Codecs/Lucene3x/TermInfosReaderIndex.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.IO; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene3x { @@ -42,9 +43,9 @@ namespace Lucene.Net.Codecs.Lucene3x internal class TermInfosReaderIndex { private const int MAX_PAGE_BITS = 18; // 256 KB block - private Term[] fields; - private int totalIndexInterval; - private IComparer comparer = BytesRef.UTF8SortedAsUTF16Comparer; + private readonly Term[] fields; // LUCENENET: marked readonly + private readonly int totalIndexInterval; // LUCENENET: marked readonly + private readonly IComparer comparer = BytesRef.UTF8SortedAsUTF16Comparer; // LUCENENET: marked readonly private readonly PagedBytesDataInput dataInput; private readonly PackedInt32s.Reader indexToDataOffset; private readonly int indexSize; @@ -122,6 +123,7 @@ public TermInfosReaderIndex(SegmentTermEnum indexEnum, int indexDivisor, long ti ramBytesUsed = fields.Length * (RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.ShallowSizeOfInstance(typeof(Term))) + dataPagedBytes.RamBytesUsed() + indexToDataOffset.RamBytesUsed(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static int EstimatePageBits(long estSize) { return Math.Max(Math.Min(64 - estSize.LeadingZeroCount(), MAX_PAGE_BITS), 4); @@ -224,6 +226,7 @@ internal virtual Term GetTerm(int termIndex) /// The index of the of term to compare. /// int. /// If there is a low-level I/O error. + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual int CompareTo(Term term, int termIndex) { return CompareTo(term, termIndex, (PagedBytesDataInput)dataInput.Clone(), new BytesRef()); @@ -267,12 +270,14 @@ private int CompareTo(Term term, int termIndex, PagedBytesDataInput input, Bytes /// The data block. /// int. /// If there is a low-level I/O error. + [MethodImpl(MethodImplOptions.AggressiveInlining)] private int CompareField(Term term, int termIndex, PagedBytesDataInput input) { input.SetPosition(indexToDataOffset.Get(termIndex)); return term.Field.CompareToOrdinal(fields[input.ReadVInt32()].Field); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual long RamBytesUsed() { return ramBytesUsed; diff --git a/src/Lucene.Net/Codecs/Lucene40/BitVector.cs b/src/Lucene.Net/Codecs/Lucene40/BitVector.cs index a73f1e9697..e7ca8c1f60 100644 --- a/src/Lucene.Net/Codecs/Lucene40/BitVector.cs +++ b/src/Lucene.Net/Codecs/Lucene40/BitVector.cs @@ -2,6 +2,7 @@ using Lucene.Net.Support; using System; using System.Diagnostics; +using System.Runtime.CompilerServices; using BitUtil = Lucene.Net.Util.BitUtil; namespace Lucene.Net.Codecs.Lucene40 @@ -54,7 +55,7 @@ internal sealed class BitVector : IMutableBits private byte[] bits; private int size; private int count; - private int version; + private readonly int version; // LUCENENET: marked readonly /// /// Constructs a vector capable of holding bits. @@ -72,7 +73,8 @@ internal BitVector(byte[] bits, int size) count = -1; } - private int GetNumBytes(int size) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static int GetNumBytes(int size) // LUCENENET: CA1822: Mark members as static { int bytesLength = (int)((uint)size >> 3); if ((size & 7) != 0) @@ -82,6 +84,7 @@ private int GetNumBytes(int size) return bytesLength; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public object Clone() { byte[] copyBits = new byte[bits.Length]; @@ -93,6 +96,7 @@ public object Clone() /// /// Sets the value of to one. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Set(int bit) { if (bit >= size) @@ -134,6 +138,7 @@ public bool GetAndSet(int bit) /// /// Sets the value of to zero. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Clear(int bit) { if (bit >= size) @@ -173,6 +178,7 @@ public bool GetAndClear(int bit) /// Returns true if is one and /// false if it is zero. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public bool Get(int bit) { if (Debugging.AssertsEnabled) Debugging.Assert(bit >= 0 && bit < size,"bit {0} is out of bounds 0..{1}", bit, (size - 1)); @@ -221,6 +227,7 @@ public int Count() // LUCENENET TODO: API - make into a property /// /// For testing + [MethodImpl(MethodImplOptions.AggressiveInlining)] public int GetRecomputedCount() { int c = 0; @@ -232,23 +239,23 @@ public int GetRecomputedCount() return c; } - private static string CODEC = "BitVector"; + private const string CODEC = "BitVector"; // Version before version tracking was added: - public readonly static int VERSION_PRE = -1; + public const int VERSION_PRE = -1; // First version: - public readonly static int VERSION_START = 0; + public const int VERSION_START = 0; // Changed DGaps to encode gaps between cleared bits, not // set: - public readonly static int VERSION_DGAPS_CLEARED = 1; + public const int VERSION_DGAPS_CLEARED = 1; // added checksum - public readonly static int VERSION_CHECKSUM = 2; + public const int VERSION_CHECKSUM = 2; // Increment version to change it: - public readonly static int VERSION_CURRENT = VERSION_CHECKSUM; + public const int VERSION_CURRENT = VERSION_CHECKSUM; public int Version => version; @@ -318,6 +325,7 @@ private void ClearUnusedBits() /// /// Set all bits. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void SetAll() { Arrays.Fill(bits, (byte)0xff); @@ -327,6 +335,7 @@ public void SetAll() /// /// Write as a bit set. + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void WriteBits(IndexOutput output) { output.WriteInt32(Length); // write size @@ -469,6 +478,7 @@ public BitVector(Directory d, string name, IOContext context) } // asserts only + [MethodImpl(MethodImplOptions.AggressiveInlining)] private bool VerifyCount() { if (Debugging.AssertsEnabled) Debugging.Assert(count != -1); @@ -480,6 +490,7 @@ private bool VerifyCount() /// /// Read as a bit set. + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void ReadBits(IndexInput input) { count = input.ReadInt32(); // read count diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40Codec.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40Codec.cs index 59d5a5d2db..fc86de02bb 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40Codec.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40Codec.cs @@ -1,4 +1,5 @@ using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene40 { @@ -53,6 +54,7 @@ public PerFieldPostingsFormatAnonymousInnerClassHelper(Lucene40Codec outerInstan this.outerInstance = outerInstance; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override PostingsFormat GetPostingsFormatForField(string field) { return outerInstance.GetPostingsFormatForField(field); @@ -93,6 +95,7 @@ public Lucene40Codec() /// /// The default implementation always returns "Lucene40". /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual PostingsFormat GetPostingsFormatForField(string field) { // LUCENENET specific - lazy initialize the codec to ensure we get the correct type if overridden. diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40DocValuesFormat.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40DocValuesFormat.cs index 5a29e4aeb9..a56cc199e9 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40DocValuesFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40DocValuesFormat.cs @@ -1,4 +1,5 @@ using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene40 { @@ -139,6 +140,7 @@ public override DocValuesConsumer FieldsConsumer(SegmentWriteState state) throw new NotSupportedException("this codec can only be used for reading"); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override DocValuesProducer FieldsProducer(SegmentReadState state) { string filename = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, "dv", IndexFileNames.COMPOUND_FILE_EXTENSION); diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40DocValuesReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40DocValuesReader.cs index 58cfe21875..fa95ec4825 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40DocValuesReader.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40DocValuesReader.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; using System.Globalization; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene40 { @@ -52,7 +53,7 @@ internal sealed class Lucene40DocValuesReader : DocValuesProducer private readonly Directory dir; private readonly SegmentReadState state; private readonly string legacyKey; - private static readonly string segmentSuffix = "dv"; + private const string segmentSuffix = "dv"; // ram instances we have already loaded private readonly IDictionary numericInstances = new Dictionary(); @@ -74,8 +75,7 @@ public override NumericDocValues GetNumeric(FieldInfo field) { lock (this) { - NumericDocValues instance; - if (!numericInstances.TryGetValue(field.Number, out instance)) + if (!numericInstances.TryGetValue(field.Number, out NumericDocValues instance)) { string fileName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name + "_" + Convert.ToString(field.Number, CultureInfo.InvariantCulture), segmentSuffix, "dat"); IndexInput input = dir.OpenInput(fileName, state.Context); @@ -88,31 +88,31 @@ public override NumericDocValues GetNumeric(FieldInfo field) //{ if (type == LegacyDocValuesType.VAR_INTS) { - instance = LoadVarInt32sField(field, input); + instance = LoadVarInt32sField(/* field, // LUCENENET: Never read */ input); } else if (type == LegacyDocValuesType.FIXED_INTS_8) { - instance = LoadByteField(field, input); + instance = LoadByteField(/* field, // LUCENENET: Never read */ input); } else if (type == LegacyDocValuesType.FIXED_INTS_16) { - instance = LoadInt16Field(field, input); + instance = LoadInt16Field(/* field, // LUCENENET: Never read */ input); } else if (type == LegacyDocValuesType.FIXED_INTS_32) { - instance = LoadInt32Field(field, input); + instance = LoadInt32Field(/* field, // LUCENENET: Never read */ input); } else if (type == LegacyDocValuesType.FIXED_INTS_64) { - instance = LoadInt64Field(field, input); + instance = LoadInt64Field(/* field, // LUCENENET: Never read */ input); } else if (type == LegacyDocValuesType.FLOAT_32) { - instance = LoadSingleField(field, input); + instance = LoadSingleField(/* field, // LUCENENET: Never read */ input); } else if (type == LegacyDocValuesType.FLOAT_64) { - instance = LoadDoubleField(field, input); + instance = LoadDoubleField(/* field, // LUCENENET: Never read */ input); } else { @@ -142,7 +142,7 @@ public override NumericDocValues GetNumeric(FieldInfo field) /// /// NOTE: This was loadVarIntsField() in Lucene. /// - private NumericDocValues LoadVarInt32sField(FieldInfo field, IndexInput input) + private NumericDocValues LoadVarInt32sField(/*FieldInfo field, // LUCENENET: Never read */ IndexInput input) { CodecUtil.CheckHeader(input, Lucene40DocValuesFormat.VAR_INTS_CODEC_NAME, Lucene40DocValuesFormat.VAR_INTS_VERSION_START, Lucene40DocValuesFormat.VAR_INTS_VERSION_CURRENT); var header = (sbyte)input.ReadByte(); @@ -180,6 +180,7 @@ public NumericDocValuesAnonymousInnerClassHelper(long[] values) this.values = values; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long Get(int docID) { return values[docID]; @@ -199,6 +200,7 @@ public NumericDocValuesAnonymousInnerClassHelper2(long minValue, long defaultVal this.reader = reader; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long Get(int docID) { long value = reader.Get(docID); @@ -213,7 +215,7 @@ public override long Get(int docID) } } - private NumericDocValues LoadByteField(FieldInfo field, IndexInput input) + private NumericDocValues LoadByteField(/*FieldInfo field, // LUCENENET: Never read */ IndexInput input) { CodecUtil.CheckHeader(input, Lucene40DocValuesFormat.INTS_CODEC_NAME, Lucene40DocValuesFormat.INTS_VERSION_START, Lucene40DocValuesFormat.INTS_VERSION_CURRENT); int valueSize = input.ReadInt32(); @@ -237,6 +239,7 @@ public NumericDocValuesAnonymousInnerClassHelper3(byte[] values) this.values = values; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long Get(int docID) { return (sbyte)values[docID]; @@ -246,7 +249,7 @@ public override long Get(int docID) /// /// NOTE: This was loadShortField() in Lucene. /// - private NumericDocValues LoadInt16Field(FieldInfo field, IndexInput input) + private NumericDocValues LoadInt16Field(/*FieldInfo field, // LUCENENET: Never read */ IndexInput input) { CodecUtil.CheckHeader(input, Lucene40DocValuesFormat.INTS_CODEC_NAME, Lucene40DocValuesFormat.INTS_VERSION_START, Lucene40DocValuesFormat.INTS_VERSION_CURRENT); int valueSize = input.ReadInt32(); @@ -273,6 +276,7 @@ public NumericDocValuesAnonymousInnerClassHelper4(short[] values) this.values = values; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long Get(int docID) { return values[docID]; @@ -282,7 +286,7 @@ public override long Get(int docID) /// /// NOTE: This was loadIntField() in Lucene. /// - private NumericDocValues LoadInt32Field(FieldInfo field, IndexInput input) + private NumericDocValues LoadInt32Field(/*FieldInfo field, // LUCENENET: Never read */ IndexInput input) { CodecUtil.CheckHeader(input, Lucene40DocValuesFormat.INTS_CODEC_NAME, Lucene40DocValuesFormat.INTS_VERSION_START, Lucene40DocValuesFormat.INTS_VERSION_CURRENT); int valueSize = input.ReadInt32(); @@ -309,6 +313,7 @@ public NumericDocValuesAnonymousInnerClassHelper5(int[] values) this.values = values; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long Get(int docID) { return values[docID]; @@ -318,7 +323,7 @@ public override long Get(int docID) /// /// NOTE: This was loadLongField() in Lucene. /// - private NumericDocValues LoadInt64Field(FieldInfo field, IndexInput input) + private NumericDocValues LoadInt64Field(/*FieldInfo field, // LUCENENET: Never read */ IndexInput input) { CodecUtil.CheckHeader(input, Lucene40DocValuesFormat.INTS_CODEC_NAME, Lucene40DocValuesFormat.INTS_VERSION_START, Lucene40DocValuesFormat.INTS_VERSION_CURRENT); int valueSize = input.ReadInt32(); @@ -345,6 +350,7 @@ public NumericDocValuesAnonymousInnerClassHelper6(long[] values) this.values = values; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long Get(int docID) { return values[docID]; @@ -354,7 +360,7 @@ public override long Get(int docID) /// /// NOTE: This was loadFloatField() in Lucene. /// - private NumericDocValues LoadSingleField(FieldInfo field, IndexInput input) + private NumericDocValues LoadSingleField(/*FieldInfo field, // LUCENENET: Never read */ IndexInput input) { CodecUtil.CheckHeader(input, Lucene40DocValuesFormat.FLOATS_CODEC_NAME, Lucene40DocValuesFormat.FLOATS_VERSION_START, Lucene40DocValuesFormat.FLOATS_VERSION_CURRENT); int valueSize = input.ReadInt32(); @@ -381,13 +387,14 @@ public NumericDocValuesAnonymousInnerClassHelper7(int[] values) this.values = values; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long Get(int docID) { return values[docID]; } } - private NumericDocValues LoadDoubleField(FieldInfo field, IndexInput input) + private NumericDocValues LoadDoubleField(/*FieldInfo field, // LUCENENET: Never read */ IndexInput input) { CodecUtil.CheckHeader(input, Lucene40DocValuesFormat.FLOATS_CODEC_NAME, Lucene40DocValuesFormat.FLOATS_VERSION_START, Lucene40DocValuesFormat.FLOATS_VERSION_CURRENT); int valueSize = input.ReadInt32(); @@ -414,6 +421,7 @@ public NumericDocValuesAnonymousInnerClassHelper8(long[] values) this.values = values; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long Get(int docID) { return values[docID]; @@ -424,8 +432,7 @@ public override BinaryDocValues GetBinary(FieldInfo field) { lock (this) { - BinaryDocValues instance; - if (!binaryInstances.TryGetValue(field.Number, out instance)) + if (!binaryInstances.TryGetValue(field.Number, out BinaryDocValues instance)) { var type = field.GetAttribute(legacyKey).ToLegacyDocValuesType(); @@ -496,6 +503,7 @@ public BinaryDocValuesAnonymousInnerClassHelper(int fixedLength, PagedBytes.Read this.bytesReader = bytesReader; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Get(int docID, BytesRef result) { bytesReader.FillSlice(result, fixedLength * (long)docID, fixedLength); @@ -550,6 +558,7 @@ public BinaryDocValuesAnonymousInnerClassHelper2(PagedBytes.Reader bytesReader, this.reader = reader; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Get(int docID, BytesRef result) { long startAddress = reader.Get(docID); @@ -610,6 +619,7 @@ public BinaryDocValuesAnonymousInnerClassHelper3(int fixedLength, PagedBytes.Rea this.reader = reader; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Get(int docID, BytesRef result) { long offset = fixedLength * reader.Get(docID); @@ -690,8 +700,7 @@ public override SortedDocValues GetSorted(FieldInfo field) { lock (this) { - SortedDocValues instance; - if (!sortedInstances.TryGetValue(field.Number, out instance)) + if (!sortedInstances.TryGetValue(field.Number, out SortedDocValues instance)) { string dataName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name + "_" + Convert.ToString(field.Number, CultureInfo.InvariantCulture), segmentSuffix, "dat"); string indexName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name + "_" + Convert.ToString(field.Number, CultureInfo.InvariantCulture), segmentSuffix, "idx"); @@ -707,11 +716,11 @@ public override SortedDocValues GetSorted(FieldInfo field) if (type == LegacyDocValuesType.BYTES_FIXED_SORTED) { - instance = LoadBytesFixedSorted(field, data, index); + instance = LoadBytesFixedSorted(/* field, // LUCENENET: Never read */ data, index); } else if (type == LegacyDocValuesType.BYTES_VAR_SORTED) { - instance = LoadBytesVarSorted(field, data, index); + instance = LoadBytesVarSorted(/* field, // LUCENENET: Never read */ data, index); } else { @@ -739,7 +748,7 @@ public override SortedDocValues GetSorted(FieldInfo field) } } - private SortedDocValues LoadBytesFixedSorted(FieldInfo field, IndexInput data, IndexInput index) + private SortedDocValues LoadBytesFixedSorted(/*FieldInfo field, // LUCENENET: Never read */ IndexInput data, IndexInput index) { CodecUtil.CheckHeader(data, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_CODEC_NAME_DAT, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_VERSION_START, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_VERSION_CURRENT); CodecUtil.CheckHeader(index, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_CODEC_NAME_IDX, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_VERSION_START, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_VERSION_CURRENT); @@ -771,11 +780,13 @@ public SortedDocValuesAnonymousInnerClassHelper(int fixedLength, int valueCount, this.reader = reader; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int GetOrd(int docID) { return (int)reader.Get(docID); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void LookupOrd(int ord, BytesRef result) { bytesReader.FillSlice(result, fixedLength * (long)ord, fixedLength); @@ -784,7 +795,7 @@ public override void LookupOrd(int ord, BytesRef result) public override int ValueCount => valueCount; } - private SortedDocValues LoadBytesVarSorted(FieldInfo field, IndexInput data, IndexInput index) + private SortedDocValues LoadBytesVarSorted(/*FieldInfo field, // LUCENENET: Never read */ IndexInput data, IndexInput index) { CodecUtil.CheckHeader(data, Lucene40DocValuesFormat.BYTES_VAR_SORTED_CODEC_NAME_DAT, Lucene40DocValuesFormat.BYTES_VAR_SORTED_VERSION_START, Lucene40DocValuesFormat.BYTES_VAR_SORTED_VERSION_CURRENT); CodecUtil.CheckHeader(index, Lucene40DocValuesFormat.BYTES_VAR_SORTED_CODEC_NAME_IDX, Lucene40DocValuesFormat.BYTES_VAR_SORTED_VERSION_START, Lucene40DocValuesFormat.BYTES_VAR_SORTED_VERSION_CURRENT); @@ -817,11 +828,13 @@ public SortedDocValuesAnonymousInnerClassHelper2(PagedBytes.Reader bytesReader, this.valueCount = valueCount; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int GetOrd(int docID) { return (int)ordsReader.Get(docID); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void LookupOrd(int ord, BytesRef result) { long startAddress = addressReader.Get(ord); @@ -857,11 +870,13 @@ public SortedDocValuesAnonymousInnerClassHelper3(SortedDocValues @in) this.@in = @in; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int GetOrd(int docID) { return @in.GetOrd(docID) - 1; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void LookupOrd(int ord, BytesRef result) { @in.LookupOrd(ord + 1, result); @@ -875,11 +890,13 @@ public override SortedSetDocValues GetSortedSet(FieldInfo field) throw new InvalidOperationException("Lucene 4.0 does not support SortedSet: how did you pull this off?"); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override IBits GetDocsWithField(FieldInfo field) { return new Lucene.Net.Util.Bits.MatchAllBits(state.SegmentInfo.DocCount); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Dispose(bool disposing) { if (disposing) @@ -890,6 +907,7 @@ protected override void Dispose(bool disposing) public override long RamBytesUsed() => ramBytesUsed; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void CheckIntegrity() { } } } \ No newline at end of file diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40FieldInfosReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40FieldInfosReader.cs index 301947b004..488c0de2da 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40FieldInfosReader.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40FieldInfosReader.cs @@ -1,6 +1,7 @@ using Lucene.Net.Index; using System; using System.Collections.Generic; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene40 { @@ -186,11 +187,13 @@ internal enum LegacyDocValuesType : sbyte internal static class LegacyDocValuesTypeExtensions { + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static DocValuesType GetMapping(this LegacyDocValuesType legacyDocValuesType) { return mapping[legacyDocValuesType]; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static LegacyDocValuesType ToLegacyDocValuesType(this string name) // Was ValueOf in Java { return (LegacyDocValuesType)Enum.Parse(typeof(LegacyDocValuesType), name); diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40LiveDocsFormat.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40LiveDocsFormat.cs index e954a6524d..38209fa31d 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40LiveDocsFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40LiveDocsFormat.cs @@ -1,6 +1,5 @@ using Lucene.Net.Diagnostics; using System.Collections.Generic; -using System.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene40 @@ -22,13 +21,12 @@ namespace Lucene.Net.Codecs.Lucene40 * limitations under the License. */ - using IBits = Lucene.Net.Util.IBits; - // javadocs using Directory = Lucene.Net.Store.Directory; + using IBits = Lucene.Net.Util.IBits; + using IMutableBits = Lucene.Net.Util.IMutableBits; using IndexFileNames = Lucene.Net.Index.IndexFileNames; using IOContext = Lucene.Net.Store.IOContext; - using IMutableBits = Lucene.Net.Util.IMutableBits; using SegmentCommitInfo = Lucene.Net.Index.SegmentCommitInfo; /// @@ -67,7 +65,7 @@ public class Lucene40LiveDocsFormat : LiveDocsFormat { /// /// Extension of deletes - internal static readonly string DELETES_EXTENSION = "del"; + internal const string DELETES_EXTENSION = "del"; /// /// Sole constructor. @@ -75,6 +73,7 @@ public Lucene40LiveDocsFormat() { } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override IMutableBits NewLiveDocs(int size) { BitVector bitVector = new BitVector(size); @@ -82,6 +81,7 @@ public override IMutableBits NewLiveDocs(int size) return bitVector; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override IMutableBits NewLiveDocs(IBits existing) { BitVector liveDocs = (BitVector)existing; @@ -113,6 +113,7 @@ public override void WriteLiveDocs(IMutableBits bits, Directory dir, SegmentComm liveDocs.Write(dir, filename, context); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Files(SegmentCommitInfo info, ICollection files) { if (info.HasDeletions) diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40NormsFormat.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40NormsFormat.cs index 73e9c385cf..2609d8abbf 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40NormsFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40NormsFormat.cs @@ -1,4 +1,5 @@ using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene40 { @@ -51,6 +52,7 @@ public override DocValuesConsumer NormsConsumer(SegmentWriteState state) throw new NotSupportedException("this codec can only be used for reading"); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override DocValuesProducer NormsProducer(SegmentReadState state) { string filename = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, "nrm", IndexFileNames.COMPOUND_FILE_EXTENSION); diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsBaseFormat.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsBaseFormat.cs index 291d2c7bd1..e6b887340d 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsBaseFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsBaseFormat.cs @@ -1,4 +1,5 @@ using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene40 { @@ -37,6 +38,7 @@ public Lucene40PostingsBaseFormat() { } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override PostingsReaderBase PostingsReaderBase(SegmentReadState state) { return new Lucene40PostingsReader(state.Directory, state.FieldInfos, state.SegmentInfo, state.Context, state.SegmentSuffix); diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsFormat.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsFormat.cs index f374d976a3..804b7d7bd2 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsFormat.cs @@ -1,6 +1,5 @@ using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; namespace Lucene.Net.Codecs.Lucene40 { @@ -266,11 +265,11 @@ public override FieldsProducer FieldsProducer(SegmentReadState state) /// /// Extension of freq postings file. - internal static readonly string FREQ_EXTENSION = "frq"; + internal const string FREQ_EXTENSION = "frq"; /// /// Extension of prox postings file. - internal static readonly string PROX_EXTENSION = "prx"; + internal const string PROX_EXTENSION = "prx"; public override string ToString() { diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsReader.cs index 309790fbb4..734ecb58c3 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsReader.cs @@ -2,6 +2,7 @@ using Lucene.Net.Index; using Lucene.Net.Support; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene40 { @@ -46,17 +47,17 @@ namespace Lucene.Net.Codecs.Lucene40 [Obsolete("Only for reading old 4.0 segments")] public class Lucene40PostingsReader : PostingsReaderBase { - internal static readonly string TERMS_CODEC = "Lucene40PostingsWriterTerms"; - internal static readonly string FRQ_CODEC = "Lucene40PostingsWriterFrq"; - internal static readonly string PRX_CODEC = "Lucene40PostingsWriterPrx"; + internal const string TERMS_CODEC = "Lucene40PostingsWriterTerms"; + internal const string FRQ_CODEC = "Lucene40PostingsWriterFrq"; + internal const string PRX_CODEC = "Lucene40PostingsWriterPrx"; //private static boolean DEBUG = BlockTreeTermsWriter.DEBUG; // Increment version to change it: - internal static readonly int VERSION_START = 0; + internal const int VERSION_START = 0; - internal static readonly int VERSION_LONG_SKIP = 1; - internal static readonly int VERSION_CURRENT = VERSION_LONG_SKIP; + internal const int VERSION_LONG_SKIP = 1; + internal const int VERSION_CURRENT = VERSION_LONG_SKIP; private readonly IndexInput freqIn; private readonly IndexInput proxIn; @@ -109,6 +110,7 @@ public Lucene40PostingsReader(Directory dir, FieldInfos fieldInfos, SegmentInfo } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Init(IndexInput termsIn) { // Make sure we are talking to the matching past writer @@ -126,6 +128,7 @@ private sealed class StandardTermState : BlockTermState internal long proxOffset; internal long skipOffset; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object Clone() { StandardTermState other = new StandardTermState(); @@ -133,6 +136,7 @@ public override object Clone() return other; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void CopyFrom(TermState other) { base.CopyFrom(other); @@ -142,12 +146,14 @@ public override void CopyFrom(TermState other) skipOffset = other2.skipOffset; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override string ToString() { return base.ToString() + " freqFP=" + freqOffset + " proxFP=" + proxOffset + " skipOffset=" + skipOffset; } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override BlockTermState NewTermState() { return new StandardTermState(); @@ -178,7 +184,7 @@ public override void DecodeTerm(long[] longs, DataInput @in, FieldInfo fieldInfo { StandardTermState termState2 = (StandardTermState)termState; // if (DEBUG) System.out.println("SPR: nextTerm seg=" + segment + " tbOrd=" + termState2.termBlockOrd + " bytesReader.fp=" + termState.bytesReader.getPosition()); - bool isFirstTerm = termState2.TermBlockOrd == 0; + //bool isFirstTerm = termState2.TermBlockOrd == 0; // LUCENENET: IDE0059: Remove unnecessary value assignment if (absolute) { termState2.freqOffset = 0; @@ -223,23 +229,20 @@ public override DocsEnum Docs(FieldInfo fieldInfo, BlockTermState termState, IBi return NewDocsEnum(liveDocs, fieldInfo, (StandardTermState)termState); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private bool CanReuse(DocsEnum reuse, IBits liveDocs) { - if (reuse != null && (reuse is SegmentDocsEnumBase)) - { - SegmentDocsEnumBase docsEnum = (SegmentDocsEnumBase)reuse; - // If you are using ParellelReader, and pass in a - // reused DocsEnum, it could have come from another - // reader also using standard codec - if (docsEnum.startFreqIn == freqIn) - { - // we only reuse if the the actual the incoming enum has the same liveDocs as the given liveDocs - return liveDocs == docsEnum.m_liveDocs; - } - } + // If you are using ParellelReader, and pass in a + // reused DocsEnum, it could have come from another + // reader also using standard codec + if (reuse != null && (reuse is SegmentDocsEnumBase docsEnum) && docsEnum.startFreqIn == freqIn) + // we only reuse if the the actual the incoming enum has the same liveDocs as the given liveDocs + return liveDocs == docsEnum.m_liveDocs; + return false; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private DocsEnum NewDocsEnum(IBits liveDocs, FieldInfo fieldInfo, StandardTermState termState) { if (liveDocs == null) @@ -263,47 +266,27 @@ public override DocsAndPositionsEnum DocsAndPositions(FieldInfo fieldInfo, Block // TODO: refactor if (fieldInfo.HasPayloads || hasOffsets) { - SegmentFullPositionsEnum docsEnum; - if (reuse == null || !(reuse is SegmentFullPositionsEnum)) - { + // If you are using ParellelReader, and pass in a + // reused DocsEnum, it could have come from another + // reader also using standard codec + if (reuse is null || !(reuse is SegmentFullPositionsEnum docsEnum) || docsEnum.startFreqIn != freqIn) docsEnum = new SegmentFullPositionsEnum(this, freqIn, proxIn); - } - else - { - docsEnum = (SegmentFullPositionsEnum)reuse; - if (docsEnum.startFreqIn != freqIn) - { - // If you are using ParellelReader, and pass in a - // reused DocsEnum, it could have come from another - // reader also using standard codec - docsEnum = new SegmentFullPositionsEnum(this, freqIn, proxIn); - } - } + return docsEnum.Reset(fieldInfo, (StandardTermState)termState, liveDocs); } else { - SegmentDocsAndPositionsEnum docsEnum; - if (reuse == null || !(reuse is SegmentDocsAndPositionsEnum)) - { + // If you are using ParellelReader, and pass in a + // reused DocsEnum, it could have come from another + // reader also using standard codec + if (reuse is null || !(reuse is SegmentDocsAndPositionsEnum docsEnum) || docsEnum.startFreqIn != freqIn) docsEnum = new SegmentDocsAndPositionsEnum(this, freqIn, proxIn); - } - else - { - docsEnum = (SegmentDocsAndPositionsEnum)reuse; - if (docsEnum.startFreqIn != freqIn) - { - // If you are using ParellelReader, and pass in a - // reused DocsEnum, it could have come from another - // reader also using standard codec - docsEnum = new SegmentDocsAndPositionsEnum(this, freqIn, proxIn); - } - } + return docsEnum.Reset(fieldInfo, (StandardTermState)termState, liveDocs); } } - internal static readonly int BUFFERSIZE = 64; + internal const int BUFFERSIZE = 64; private abstract class SegmentDocsEnumBase : DocsEnum { @@ -424,7 +407,8 @@ private int BinarySearch(int hi, int low, int target, int[] docs) return low - 1; } - internal int ReadFreq(IndexInput freqIn, int code) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + internal static int ReadFreq(IndexInput freqIn, int code) // LUCENENET: CA1822: Mark members as static { if ((code & 1) != 0) // if low bit is set { @@ -465,6 +449,7 @@ protected internal int Refill() protected internal abstract int NextUnreadDoc(); + [MethodImpl(MethodImplOptions.AggressiveInlining)] private int FillDocs(int size) { IndexInput freqIn = this.freqIn; @@ -479,6 +464,7 @@ private int FillDocs(int size) return size; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private int FillDocsAndFreqs(int size) { IndexInput freqIn = this.freqIn; @@ -534,6 +520,7 @@ private int SkipTo(int target) return ScanTo(target); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long GetCost() { return m_limit; @@ -542,12 +529,9 @@ public override long GetCost() private sealed class AllDocsSegmentDocsEnum : SegmentDocsEnumBase { - private readonly Lucene40PostingsReader outerInstance; - internal AllDocsSegmentDocsEnum(Lucene40PostingsReader outerInstance, IndexInput startFreqIn) : base(outerInstance, startFreqIn, null) { - this.outerInstance = outerInstance; if (Debugging.AssertsEnabled) Debugging.Assert(m_liveDocs == null); } @@ -635,12 +619,9 @@ protected internal override int NextUnreadDoc() private sealed class LiveDocsSegmentDocsEnum : SegmentDocsEnumBase { - private readonly Lucene40PostingsReader outerInstance; - internal LiveDocsSegmentDocsEnum(Lucene40PostingsReader outerInstance, IndexInput startFreqIn, IBits liveDocs) : base(outerInstance, startFreqIn, liveDocs) { - this.outerInstance = outerInstance; if (Debugging.AssertsEnabled) Debugging.Assert(liveDocs != null); } @@ -948,11 +929,13 @@ public override int NextPosition() /// Returns the payload at this position, or null if no /// payload was indexed. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override BytesRef GetPayload() { return null; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long GetCost() { return limit; @@ -1280,17 +1263,20 @@ public override BytesRef GetPayload() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long GetCost() { return limit; } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long RamBytesUsed() { return 0; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void CheckIntegrity() { } diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40SegmentInfoFormat.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40SegmentInfoFormat.cs index 1f97d4215f..96f149a1d5 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40SegmentInfoFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40SegmentInfoFormat.cs @@ -82,10 +82,10 @@ public Lucene40SegmentInfoFormat() /// /// File extension used to store . - public readonly static string SI_EXTENSION = "si"; + public const string SI_EXTENSION = "si"; - internal readonly static string CODEC_NAME = "Lucene40SegmentInfo"; - internal readonly static int VERSION_START = 0; - internal readonly static int VERSION_CURRENT = VERSION_START; + internal const string CODEC_NAME = "Lucene40SegmentInfo"; + internal const int VERSION_START = 0; + internal const int VERSION_CURRENT = VERSION_START; } } \ No newline at end of file diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40SkipListReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40SkipListReader.cs index cfc992eac9..f074e7d288 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40SkipListReader.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40SkipListReader.cs @@ -1,5 +1,6 @@ using Lucene.Net.Support; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene40 { @@ -32,10 +33,10 @@ public class Lucene40SkipListReader : MultiLevelSkipListReader { private bool currentFieldStoresPayloads; private bool currentFieldStoresOffsets; - private long[] freqPointer; - private long[] proxPointer; - private int[] payloadLength; - private int[] offsetLength; + private readonly long[] freqPointer; // LUCENENET: marked readonly + private readonly long[] proxPointer; // LUCENENET: marked readonly + private readonly int[] payloadLength; // LUCENENET: marked readonly + private readonly int[] offsetLength; // LUCENENET: marked readonly private long lastFreqPointer; private long lastProxPointer; @@ -95,6 +96,7 @@ public virtual void Init(long skipPointer, long freqBasePointer, long proxBasePo /// public virtual int OffsetLength => lastOffsetLength; + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void SeekChild(int level) { base.SeekChild(level); @@ -104,6 +106,7 @@ protected override void SeekChild(int level) offsetLength[level] = lastOffsetLength; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void SetLastSkipData(int level) { base.SetLastSkipData(level); diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsFormat.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsFormat.cs index cb1ad8b94a..884faeefdc 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsFormat.cs @@ -1,3 +1,5 @@ +using System.Runtime.CompilerServices; + namespace Lucene.Net.Codecs.Lucene40 { /* @@ -82,11 +84,13 @@ public Lucene40StoredFieldsFormat() { } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override StoredFieldsReader FieldsReader(Directory directory, SegmentInfo si, FieldInfos fn, IOContext context) { return new Lucene40StoredFieldsReader(directory, si, fn, context); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override StoredFieldsWriter FieldsWriter(Directory directory, SegmentInfo si, IOContext context) { return new Lucene40StoredFieldsWriter(directory, si.Name, context); diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsReader.cs index f66567fb53..0621eb4e55 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsReader.cs @@ -2,6 +2,7 @@ using System; using System.Diagnostics; using System.IO; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene40 { @@ -41,16 +42,18 @@ namespace Lucene.Net.Codecs.Lucene40 /// @lucene.internal /// /// - public sealed class Lucene40StoredFieldsReader : StoredFieldsReader, IDisposable + public sealed class Lucene40StoredFieldsReader : StoredFieldsReader // LUCENENET specific - removed IDisposable, it is already implemented in base class #if FEATURE_CLONEABLE , System.ICloneable #endif { private readonly FieldInfos fieldInfos; +#pragma warning disable CA2213 // Disposable fields should be disposed private readonly IndexInput fieldsStream; private readonly IndexInput indexStream; - private int numTotalDocs; - private int size; +#pragma warning restore CA2213 // Disposable fields should be disposed + private readonly int numTotalDocs; // LUCENENET: marked readonly + private readonly int size; // LUCENENET: marked readonly private bool closed; /// @@ -60,6 +63,7 @@ public sealed class Lucene40StoredFieldsReader : StoredFieldsReader, IDisposable /// clones are called (eg, currently manages /// this logic). /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object Clone() { EnsureOpen(); @@ -128,6 +132,7 @@ public Lucene40StoredFieldsReader(Directory d, SegmentInfo si, FieldInfos fn, IO } /// if this FieldsReader is disposed. + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void EnsureOpen() { if (closed) @@ -141,6 +146,7 @@ private void EnsureOpen() /// This means that the values will not be accessible. /// /// If an I/O error occurs. + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Dispose(bool disposing) { if (disposing) @@ -160,6 +166,7 @@ protected override void Dispose(bool disposing) /// public int Count => size; + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void SeekIndex(int docID) { indexStream.Seek(Lucene40StoredFieldsWriter.HEADER_LENGTH_IDX + docID * 8L); @@ -302,11 +309,13 @@ public IndexInput RawDocs(int[] lengths, int startDocID, int numDocs) return fieldsStream; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long RamBytesUsed() { return 0; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void CheckIntegrity() { } diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs index 116afebf24..47bf7c84f5 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs @@ -2,7 +2,6 @@ using Lucene.Net.Diagnostics; using Lucene.Net.Documents; using System; -using System.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene40 @@ -25,12 +24,12 @@ namespace Lucene.Net.Codecs.Lucene40 */ using AtomicReader = Lucene.Net.Index.AtomicReader; - using IBits = Lucene.Net.Util.IBits; using BytesRef = Lucene.Net.Util.BytesRef; using Directory = Lucene.Net.Store.Directory; using Document = Documents.Document; using FieldInfo = Lucene.Net.Index.FieldInfo; using FieldInfos = Lucene.Net.Index.FieldInfos; + using IBits = Lucene.Net.Util.IBits; using IIndexableField = Lucene.Net.Index.IIndexableField; using IndexFileNames = Lucene.Net.Index.IndexFileNames; using IndexInput = Lucene.Net.Store.IndexInput; @@ -51,12 +50,12 @@ namespace Lucene.Net.Codecs.Lucene40 public sealed class Lucene40StoredFieldsWriter : StoredFieldsWriter { // NOTE: bit 0 is free here! You can steal it! - internal static readonly int FIELD_IS_BINARY = 1 << 1; + internal const int FIELD_IS_BINARY = 1 << 1; // the old bit 1 << 2 was compressed, is now left out private const int _NUMERIC_BIT_SHIFT = 3; - internal static readonly int FIELD_IS_NUMERIC_MASK = 0x07 << _NUMERIC_BIT_SHIFT; + internal const int FIELD_IS_NUMERIC_MASK = 0x07 << _NUMERIC_BIT_SHIFT; internal const int FIELD_IS_NUMERIC_INT = 1 << _NUMERIC_BIT_SHIFT; internal const int FIELD_IS_NUMERIC_LONG = 2 << _NUMERIC_BIT_SHIFT; @@ -84,8 +83,10 @@ public sealed class Lucene40StoredFieldsWriter : StoredFieldsWriter private readonly Directory directory; private readonly string segment; +#pragma warning disable CA2213 // Disposable fields should be disposed private IndexOutput fieldsStream; private IndexOutput indexStream; +#pragma warning restore CA2213 // Disposable fields should be disposed /// /// Sole constructor. @@ -123,12 +124,14 @@ public Lucene40StoredFieldsWriter(Directory directory, string segment, IOContext // and adds a new entry for this document into the index // stream. this assumes the buffer was already written // in the correct fields format. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void StartDocument(int numStoredFields) { indexStream.WriteInt64(fieldsStream.GetFilePointer()); fieldsStream.WriteVInt32(numStoredFields); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Dispose(bool disposing) { if (disposing) @@ -269,6 +272,7 @@ public void AddRawDocuments(IndexInput stream, int[] lengths, int numDocs) if (Debugging.AssertsEnabled) Debugging.Assert(fieldsStream.GetFilePointer() == position); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Finish(FieldInfos fis, int numDocs) { if (HEADER_LENGTH_IDX + ((long)numDocs) * 8 != indexStream.GetFilePointer()) @@ -298,9 +302,9 @@ public override int Merge(MergeState mergeState) { StoredFieldsReader fieldsReader = matchingSegmentReader.FieldsReader; // we can only bulk-copy if the matching reader is also a Lucene40FieldsReader - if (fieldsReader != null && fieldsReader is Lucene40StoredFieldsReader) + if (fieldsReader != null && fieldsReader is Lucene40StoredFieldsReader lucene40StoredFieldsReader) { - matchingFieldsReader = (Lucene40StoredFieldsReader)fieldsReader; + matchingFieldsReader = lucene40StoredFieldsReader; } } diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsFormat.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsFormat.cs index 269e0f004e..88e0521259 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsFormat.cs @@ -1,3 +1,5 @@ +using System.Runtime.CompilerServices; + namespace Lucene.Net.Codecs.Lucene40 { /* @@ -114,11 +116,13 @@ public Lucene40TermVectorsFormat() { } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override TermVectorsReader VectorsReader(Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos, IOContext context) { return new Lucene40TermVectorsReader(directory, segmentInfo, fieldInfos, context); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override TermVectorsWriter VectorsWriter(Directory directory, SegmentInfo segmentInfo, IOContext context) { return new Lucene40TermVectorsWriter(directory, segmentInfo.Name, context); diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs index 210ce3293c..4ebb197925 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs @@ -3,7 +3,7 @@ using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Diagnostics; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene40 { @@ -24,7 +24,6 @@ namespace Lucene.Net.Codecs.Lucene40 * limitations under the License. */ - using IBits = Lucene.Net.Util.IBits; using BytesRef = Lucene.Net.Util.BytesRef; using Directory = Lucene.Net.Store.Directory; using DocsAndPositionsEnum = Lucene.Net.Index.DocsAndPositionsEnum; @@ -32,6 +31,7 @@ namespace Lucene.Net.Codecs.Lucene40 using FieldInfo = Lucene.Net.Index.FieldInfo; using FieldInfos = Lucene.Net.Index.FieldInfos; using Fields = Lucene.Net.Index.Fields; + using IBits = Lucene.Net.Util.IBits; using IndexFileNames = Lucene.Net.Index.IndexFileNames; using IndexInput = Lucene.Net.Store.IndexInput; using IOContext = Lucene.Net.Store.IOContext; @@ -46,7 +46,7 @@ namespace Lucene.Net.Codecs.Lucene40 /// It reads .tvd, .tvf, and .tvx files. /// /// - public class Lucene40TermVectorsReader : TermVectorsReader, IDisposable + public class Lucene40TermVectorsReader : TermVectorsReader // LUCENENET specific - removed IDisposable, it is already implemented in base class { internal const sbyte STORE_POSITIONS_WITH_TERMVECTOR = 0x1; @@ -79,13 +79,15 @@ public class Lucene40TermVectorsReader : TermVectorsReader, IDisposable internal static readonly long HEADER_LENGTH_DOCS = CodecUtil.HeaderLength(CODEC_NAME_DOCS); internal static readonly long HEADER_LENGTH_INDEX = CodecUtil.HeaderLength(CODEC_NAME_INDEX); - private FieldInfos fieldInfos; + private readonly FieldInfos fieldInfos; // LUCENENET: marked readonly - private IndexInput tvx; - private IndexInput tvd; - private IndexInput tvf; - private int size; - private int numTotalDocs; +#pragma warning disable CA2213 // Disposable fields should be disposed + private readonly IndexInput tvx; // LUCENENET: marked readonly + private readonly IndexInput tvd; // LUCENENET: marked readonly + private readonly IndexInput tvf; // LUCENENET: marked readonly +#pragma warning restore CA2213 // Disposable fields should be disposed + private readonly int size; // LUCENENET: marked readonly + private readonly int numTotalDocs; // LUCENENET: marked readonly /// /// Used by clone. @@ -150,10 +152,9 @@ public Lucene40TermVectorsReader(Directory d, SegmentInfo si, FieldInfos fieldIn { Dispose(); } // ensure we throw our original exception -#pragma warning disable 168 - catch (Exception t) -#pragma warning restore 168 + catch (Exception) // LUCENENET: IDE0059: Remove unnecessary value assignment { + // ignored } } } @@ -286,6 +287,7 @@ public override IEnumerator GetEnumerator() return GetFieldInfoEnumerable().GetEnumerator(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private IEnumerable GetFieldInfoEnumerable() { int fieldUpto = 0; @@ -295,6 +297,7 @@ private IEnumerable GetFieldInfoEnumerable() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override Terms GetTerms(string field) { FieldInfo fieldInfo = outerInstance.fieldInfos.FieldInfo(field); @@ -304,8 +307,7 @@ public override Terms GetTerms(string field) return null; } - int fieldIndex; - if (!fieldNumberToIndex.TryGetValue(fieldInfo.Number, out fieldIndex)) + if (!fieldNumberToIndex.TryGetValue(fieldInfo.Number, out int fieldIndex)) { // Term vectors were not indexed for this field return null; @@ -314,20 +316,7 @@ public override Terms GetTerms(string field) return new TVTerms(outerInstance, fieldFPs[fieldIndex]); } - public override int Count - { - get - { - if (fieldNumbers == null) - { - return 0; - } - else - { - return fieldNumbers.Length; - } - } - } + public override int Count => fieldNumbers is null ? 0 : fieldNumbers.Length; } private class TVTerms : Terms @@ -352,6 +341,7 @@ public TVTerms(Lucene40TermVectorsReader outerInstance, long tvfFP) tvfFPStart = outerInstance.tvf.GetFilePointer(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override TermsEnum GetEnumerator() { var termsEnum = new TVTermsEnum(outerInstance); @@ -359,23 +349,11 @@ public override TermsEnum GetEnumerator() return termsEnum; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override TermsEnum GetEnumerator(TermsEnum reuse) { - TVTermsEnum termsEnum; -#pragma warning disable IDE0038 // Use pattern matching - if (reuse is null || !(reuse is TVTermsEnum)) -#pragma warning restore IDE0038 // Use pattern matching - { + if (reuse is null || !(reuse is TVTermsEnum termsEnum) || !termsEnum.CanReuse(outerInstance.tvf)) termsEnum = new TVTermsEnum(outerInstance); - } - else - { - var reusable = (TVTermsEnum)reuse; - if (reusable.CanReuse(outerInstance.tvf)) - termsEnum = reusable; - else - termsEnum = new TVTermsEnum(outerInstance); - } termsEnum.Reset(numTerms, tvfFPStart, storePositions, storeOffsets, storePayloads); return termsEnum; @@ -436,6 +414,7 @@ public TVTermsEnum(Lucene40TermVectorsReader outerInstance) tvf = (IndexInput)origTVF.Clone(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual bool CanReuse(IndexInput tvf) { return tvf == origTVF; @@ -580,21 +559,17 @@ public override BytesRef Next() public override long TotalTermFreq => freq; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) // ignored { - TVDocsEnum docsEnum; - if (reuse != null && reuse is TVDocsEnum) - { - docsEnum = (TVDocsEnum)reuse; - } - else - { + if (reuse is null || !(reuse is TVDocsEnum docsEnum)) docsEnum = new TVDocsEnum(); - } + docsEnum.Reset(liveDocs, freq); return docsEnum; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPositionsEnum reuse, DocsAndPositionsFlags flags) { if (!storePositions && !storeOffsets) @@ -602,15 +577,9 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPos return null; } - TVDocsAndPositionsEnum docsAndPositionsEnum; - if (reuse != null && reuse is TVDocsAndPositionsEnum) - { - docsAndPositionsEnum = (TVDocsAndPositionsEnum)reuse; - } - else - { + if (reuse is null || !(reuse is TVDocsAndPositionsEnum docsAndPositionsEnum)) docsAndPositionsEnum = new TVDocsAndPositionsEnum(); - } + docsAndPositionsEnum.Reset(liveDocs, positions, startOffsets, endOffsets, payloadOffsets, payloadData); return docsAndPositionsEnum; } @@ -644,11 +613,13 @@ public override int NextDoc() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int Advance(int target) { return SlowAdvance(target); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void Reset(IBits liveDocs, int freq) { this.liveDocs = liveDocs; @@ -657,6 +628,7 @@ public virtual void Reset(IBits liveDocs, int freq) didNext = false; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long GetCost() { return 1; @@ -707,6 +679,7 @@ public override int NextDoc() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int Advance(int target) { return SlowAdvance(target); @@ -801,6 +774,7 @@ public override int EndOffset } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long GetCost() { return 1; @@ -848,11 +822,13 @@ public override object Clone() return new Lucene40TermVectorsReader(fieldInfos, cloneTvx, cloneTvd, cloneTvf, size, numTotalDocs); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long RamBytesUsed() { return 0; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void CheckIntegrity() { } diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs index 4263f39a02..6055bf1fa8 100644 --- a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs +++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs @@ -2,7 +2,6 @@ using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene40 @@ -26,13 +25,13 @@ namespace Lucene.Net.Codecs.Lucene40 using ArrayUtil = Lucene.Net.Util.ArrayUtil; using AtomicReader = Lucene.Net.Index.AtomicReader; - using IBits = Lucene.Net.Util.IBits; using BytesRef = Lucene.Net.Util.BytesRef; using DataInput = Lucene.Net.Store.DataInput; using Directory = Lucene.Net.Store.Directory; using FieldInfo = Lucene.Net.Index.FieldInfo; using FieldInfos = Lucene.Net.Index.FieldInfos; using Fields = Lucene.Net.Index.Fields; + using IBits = Lucene.Net.Util.IBits; using IndexFileNames = Lucene.Net.Index.IndexFileNames; using IndexOutput = Lucene.Net.Store.IndexOutput; using IOContext = Lucene.Net.Store.IOContext; @@ -60,7 +59,9 @@ public sealed class Lucene40TermVectorsWriter : TermVectorsWriter { private readonly Directory directory; private readonly string segment; +#pragma warning disable CA2213 // Disposable fields should be disposed private IndexOutput tvx = null, tvd = null, tvf = null; +#pragma warning restore CA2213 // Disposable fields should be disposed /// /// Sole constructor. @@ -95,6 +96,7 @@ public Lucene40TermVectorsWriter(Directory directory, string segment, IOContext } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void StartDocument(int numVectorFields) { lastFieldName = null; @@ -156,7 +158,7 @@ public override void FinishDocument() private int[] offsetStartBuffer = new int[10]; private int[] offsetEndBuffer = new int[10]; - private BytesRef payloadData = new BytesRef(10); + private readonly BytesRef payloadData = new BytesRef(10); // LUCENENET: marked readonly private int bufferedIndex = 0; private int bufferedFreq = 0; private bool positions = false; @@ -332,10 +334,9 @@ public override void Abort() { Dispose(); } -#pragma warning disable 168 - catch (Exception ignored) -#pragma warning restore 168 + catch (Exception) // LUCENENET: IDE0059: Remove unnecessary value assignment { + // ignored } IOUtils.DeleteFilesIgnoringExceptions(directory, IndexFileNames.SegmentFileName(segment, "", Lucene40TermVectorsReader.VECTORS_INDEX_EXTENSION), @@ -389,9 +390,9 @@ public override int Merge(MergeState mergeState) { TermVectorsReader vectorsReader = matchingSegmentReader.TermVectorsReader; - if (vectorsReader != null && vectorsReader is Lucene40TermVectorsReader) + if (vectorsReader != null && vectorsReader is Lucene40TermVectorsReader lucene40TermVectorsReader) { - matchingVectorsReader = (Lucene40TermVectorsReader)vectorsReader; + matchingVectorsReader = lucene40TermVectorsReader; } } if (reader.LiveDocs != null) @@ -504,6 +505,7 @@ private int CopyVectorsNoDeletions(MergeState mergeState, Lucene40TermVectorsRea return maxDoc; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Finish(FieldInfos fis, int numDocs) { if (Lucene40TermVectorsReader.HEADER_LENGTH_INDEX + ((long)numDocs) * 16 != tvx.GetFilePointer()) @@ -519,6 +521,7 @@ public override void Finish(FieldInfos fis, int numDocs) /// /// Close all streams. + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Dispose(bool disposing) { if (disposing) diff --git a/src/Lucene.Net/Codecs/Lucene41/ForUtil.cs b/src/Lucene.Net/Codecs/Lucene41/ForUtil.cs index 8cc37cde29..4406d54d28 100644 --- a/src/Lucene.Net/Codecs/Lucene41/ForUtil.cs +++ b/src/Lucene.Net/Codecs/Lucene41/ForUtil.cs @@ -3,8 +3,8 @@ using Lucene.Net.Support; using Lucene.Net.Util.Packed; using System; -using System.Diagnostics; using System.IO; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene41 { @@ -34,7 +34,7 @@ internal sealed class ForUtil /// /// Special number of bits per value used whenever all values to encode are equal. /// - private static readonly int ALL_VALUES_EQUAL = 0; + private const int ALL_VALUES_EQUAL = 0; /// /// Upper limit of the number of bytes that might be required to stored @@ -76,6 +76,7 @@ private static int LoadMaxDataSize() // LUCENENET: Avoid static constructors (se /// Compute the number of iterations required to decode /// values with the provided . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static int ComputeIterations(PackedInt32s.IDecoder decoder) { return (int)Math.Ceiling((float)Lucene41PostingsFormat.BLOCK_SIZE / decoder.ByteValueCount); @@ -85,6 +86,7 @@ private static int ComputeIterations(PackedInt32s.IDecoder decoder) /// Compute the number of bytes required to encode a block of values that require /// bits per value with format . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static int EncodedSize(PackedInt32s.Format format, int packedIntsVersion, int bitsPerValue) { long byteCount = format.ByteCount(packedIntsVersion, Lucene41PostingsFormat.BLOCK_SIZE, bitsPerValue); @@ -229,6 +231,7 @@ internal void SkipBlock(IndexInput @in) @in.Seek(@in.GetFilePointer() + encodedSize); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static bool IsAllEqual(int[] data) { int v = data[0]; @@ -246,6 +249,7 @@ private static bool IsAllEqual(int[] data) /// Compute the number of bits required to serialize any of the longs in /// . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static int BitsRequired(int[] data) { long or = 0; diff --git a/src/Lucene.Net/Codecs/Lucene41/Lucene41Codec.cs b/src/Lucene.Net/Codecs/Lucene41/Lucene41Codec.cs index 8557ee34e5..e206944f6a 100644 --- a/src/Lucene.Net/Codecs/Lucene41/Lucene41Codec.cs +++ b/src/Lucene.Net/Codecs/Lucene41/Lucene41Codec.cs @@ -1,4 +1,5 @@ using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene41 { @@ -78,6 +79,7 @@ public PerFieldPostingsFormatAnonymousInnerClassHelper(Lucene41Codec outerInstan this.outerInstance = outerInstance; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override PostingsFormat GetPostingsFormatForField(string field) { return outerInstance.GetPostingsFormatForField(field); @@ -111,6 +113,7 @@ public Lucene41Codec() /// /// The default implementation always returns "Lucene41" /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual PostingsFormat GetPostingsFormatForField(string field) { // LUCENENET specific - lazy initialize the codec to ensure we get the correct type if overridden. diff --git a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsBaseFormat.cs b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsBaseFormat.cs index 94c4d54f2a..aeb0d252c1 100644 --- a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsBaseFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsBaseFormat.cs @@ -1,3 +1,5 @@ +using System.Runtime.CompilerServices; + namespace Lucene.Net.Codecs.Lucene41 { /* @@ -37,11 +39,13 @@ public Lucene41PostingsBaseFormat() { } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override PostingsReaderBase PostingsReaderBase(SegmentReadState state) { return new Lucene41PostingsReader(state.Directory, state.FieldInfos, state.SegmentInfo, state.Context, state.SegmentSuffix); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override PostingsWriterBase PostingsWriterBase(SegmentWriteState state) { return new Lucene41PostingsWriter(state); diff --git a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsReader.cs b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsReader.cs index f3b3305ecb..7683ecfed2 100644 --- a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsReader.cs +++ b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsReader.cs @@ -4,6 +4,7 @@ using Lucene.Net.Support; using Lucene.Net.Util; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene41 { @@ -33,12 +34,14 @@ namespace Lucene.Net.Codecs.Lucene41 /// public sealed class Lucene41PostingsReader : PostingsReaderBase { +#pragma warning disable CA2213 // Disposable fields should be disposed private readonly IndexInput docIn; private readonly IndexInput posIn; private readonly IndexInput payIn; +#pragma warning restore CA2213 // Disposable fields should be disposed private readonly ForUtil forUtil; - private int version; + private readonly int version; // LUCENENET: marked readonly // public static boolean DEBUG = false; @@ -82,6 +85,7 @@ public Lucene41PostingsReader(Directory dir, FieldInfos fieldInfos, SegmentInfo } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Init(IndexInput termsIn) { // Make sure we are talking to the matching postings writer @@ -125,11 +129,13 @@ internal static void ReadVInt32Block(IndexInput docIn, int[] docBuffer, int[] fr } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override BlockTermState NewTermState() { return new Lucene41PostingsWriter.Int32BlockTermState(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Dispose(bool disposing) { if (disposing) @@ -234,21 +240,12 @@ private void DecodeTerm(DataInput @in, FieldInfo fieldInfo, Lucene41PostingsWrit } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override DocsEnum Docs(FieldInfo fieldInfo, BlockTermState termState, IBits liveDocs, DocsEnum reuse, DocsFlags flags) { - BlockDocsEnum docsEnum; - if (reuse is BlockDocsEnum) - { - docsEnum = (BlockDocsEnum)reuse; - if (!docsEnum.CanReuse(docIn, fieldInfo)) - { - docsEnum = new BlockDocsEnum(this, fieldInfo); - } - } - else - { + if (reuse is null || !(reuse is BlockDocsEnum docsEnum) || !docsEnum.CanReuse(docIn, fieldInfo)) docsEnum = new BlockDocsEnum(this, fieldInfo); - } + return docsEnum.Reset(liveDocs, (Lucene41PostingsWriter.Int32BlockTermState)termState, flags); } @@ -262,36 +259,16 @@ public override DocsAndPositionsEnum DocsAndPositions(FieldInfo fieldInfo, Block if ((!indexHasOffsets || (flags & DocsAndPositionsFlags.OFFSETS) == 0) && (!indexHasPayloads || (flags & DocsAndPositionsFlags.PAYLOADS) == 0)) { - BlockDocsAndPositionsEnum docsAndPositionsEnum; - if (reuse is BlockDocsAndPositionsEnum) - { - docsAndPositionsEnum = (BlockDocsAndPositionsEnum)reuse; - if (!docsAndPositionsEnum.CanReuse(docIn, fieldInfo)) - { - docsAndPositionsEnum = new BlockDocsAndPositionsEnum(this, fieldInfo); - } - } - else - { + if (reuse is null || !(reuse is BlockDocsAndPositionsEnum docsAndPositionsEnum) || !docsAndPositionsEnum.CanReuse(docIn, fieldInfo)) docsAndPositionsEnum = new BlockDocsAndPositionsEnum(this, fieldInfo); - } + return docsAndPositionsEnum.Reset(liveDocs, (Lucene41PostingsWriter.Int32BlockTermState)termState); } else { - EverythingEnum everythingEnum; - if (reuse is EverythingEnum) - { - everythingEnum = (EverythingEnum)reuse; - if (!everythingEnum.CanReuse(docIn, fieldInfo)) - { - everythingEnum = new EverythingEnum(this, fieldInfo); - } - } - else - { + if (reuse is null || !(reuse is EverythingEnum everythingEnum) || !everythingEnum.CanReuse(docIn, fieldInfo)) everythingEnum = new EverythingEnum(this, fieldInfo); - } + return everythingEnum.Reset(liveDocs, (Lucene41PostingsWriter.Int32BlockTermState)termState, flags); } } @@ -355,6 +332,7 @@ public BlockDocsEnum(Lucene41PostingsReader outerInstance, FieldInfo fieldInfo) encoded = new byte[ForUtil.MAX_ENCODED_SIZE]; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public bool CanReuse(IndexInput docIn, FieldInfo fieldInfo) { // LUCENENET specific - to avoid boxing, changed from CompareTo() to IndexOptionsComparer.Compare() @@ -593,6 +571,7 @@ public override int Advance(int target) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long GetCost() { return docFreq; @@ -676,6 +655,7 @@ public BlockDocsAndPositionsEnum(Lucene41PostingsReader outerInstance, FieldInfo indexHasPayloads = fieldInfo.HasPayloads; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public bool CanReuse(IndexInput docIn, FieldInfo fieldInfo) { return docIn == startDocIn && @@ -1042,11 +1022,13 @@ public override int NextPosition() public override int EndOffset => -1; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override BytesRef GetPayload() { return null; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long GetCost() { return docFreq; @@ -1178,6 +1160,7 @@ public EverythingEnum(Lucene41PostingsReader outerInstance, FieldInfo fieldInfo) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public bool CanReuse(IndexInput docIn, FieldInfo fieldInfo) { // LUCENENET specific - to avoid boxing, changed from CompareTo() to IndexOptionsComparer.Compare() @@ -1682,6 +1665,7 @@ public override int NextPosition() public override int EndOffset => endOffset; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override BytesRef GetPayload() { // if (DEBUG) { @@ -1697,12 +1681,14 @@ public override BytesRef GetPayload() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long GetCost() { return docFreq; } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long RamBytesUsed() { return 0; diff --git a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsWriter.cs b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsWriter.cs index f47cb45177..a7ef3ab3b5 100644 --- a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsWriter.cs +++ b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsWriter.cs @@ -1,6 +1,7 @@ using Lucene.Net.Diagnostics; using Lucene.Net.Index; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene41 { @@ -62,9 +63,11 @@ public sealed class Lucene41PostingsWriter : PostingsWriterBase internal const int VERSION_CHECKSUM = 2; internal const int VERSION_CURRENT = VERSION_CHECKSUM; +#pragma warning disable CA2213 // Disposable fields should be disposed internal IndexOutput docOut; internal IndexOutput posOut; internal IndexOutput payOut; +#pragma warning restore CA2213 // Disposable fields should be disposed internal static readonly Int32BlockTermState emptyState = new Int32BlockTermState(); internal Int32BlockTermState lastState; @@ -210,6 +213,7 @@ public sealed class Int32BlockTermState : BlockTermState // freq is always implicitly totalTermFreq in this case. internal int singletonDocID = -1; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object Clone() { Int32BlockTermState other = new Int32BlockTermState(); @@ -235,11 +239,13 @@ public override string ToString() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override BlockTermState NewTermState() { return new Int32BlockTermState(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Init(IndexOutput termsOut) { CodecUtil.WriteHeader(termsOut, TERMS_CODEC, VERSION_CURRENT); diff --git a/src/Lucene.Net/Codecs/Lucene41/Lucene41SkipReader.cs b/src/Lucene.Net/Codecs/Lucene41/Lucene41SkipReader.cs index 81d23b5437..5e0168c99b 100644 --- a/src/Lucene.Net/Codecs/Lucene41/Lucene41SkipReader.cs +++ b/src/Lucene.Net/Codecs/Lucene41/Lucene41SkipReader.cs @@ -1,6 +1,6 @@ using Lucene.Net.Diagnostics; using Lucene.Net.Support; -using System.Diagnostics; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene41 { @@ -54,11 +54,11 @@ internal sealed class Lucene41SkipReader : MultiLevelSkipListReader // private boolean DEBUG = Lucene41PostingsReader.DEBUG; private readonly int blockSize; - private long[] docPointer; - private long[] posPointer; - private long[] payPointer; - private int[] posBufferUpto; - private int[] payloadByteUpto; + private readonly long[] docPointer; // LUCENENET: marked readonly + private readonly long[] posPointer; // LUCENENET: marked readonly + private readonly long[] payPointer; // LUCENENET: marked readonly + private readonly int[] posBufferUpto; // LUCENENET: marked readonly + private readonly int[] payloadByteUpto; // LUCENENET: marked readonly private long lastPosPointer; private long lastPayPointer; @@ -106,6 +106,7 @@ public Lucene41SkipReader(IndexInput skipStream, int maxSkipLevels, int blockSiz /// 1. silly reading a non-existed skip point after the last block boundary /// 2. moving into the vInt block /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal int Trim(int df) { return df % blockSize == 0 ? df - 1 : df; diff --git a/src/Lucene.Net/Codecs/Lucene41/Lucene41SkipWriter.cs b/src/Lucene.Net/Codecs/Lucene41/Lucene41SkipWriter.cs index 4201c683f1..2febb8b333 100644 --- a/src/Lucene.Net/Codecs/Lucene41/Lucene41SkipWriter.cs +++ b/src/Lucene.Net/Codecs/Lucene41/Lucene41SkipWriter.cs @@ -1,4 +1,5 @@ using Lucene.Net.Support; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene41 { @@ -44,11 +45,11 @@ internal sealed class Lucene41SkipWriter : MultiLevelSkipListWriter { // private boolean DEBUG = Lucene41PostingsReader.DEBUG; - private int[] lastSkipDoc; - private long[] lastSkipDocPointer; - private long[] lastSkipPosPointer; - private long[] lastSkipPayPointer; - private int[] lastPayloadByteUpto; + private readonly int[] lastSkipDoc; // LUCENENET: marked readonly + private readonly long[] lastSkipDocPointer; // LUCENENET: marked readonly + private readonly long[] lastSkipPosPointer; // LUCENENET: marked readonly + private readonly long[] lastSkipPayPointer; // LUCENENET: marked readonly + private readonly int[] lastPayloadByteUpto; // LUCENENET: marked readonly private readonly IndexOutput docOut; private readonly IndexOutput posOut; @@ -84,6 +85,7 @@ public Lucene41SkipWriter(int maxSkipLevels, int blockSize, int docCount, IndexO } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void SetField(bool fieldHasPositions, bool fieldHasOffsets, bool fieldHasPayloads) { this.fieldHasPositions = fieldHasPositions; diff --git a/src/Lucene.Net/Codecs/Lucene42/Lucene42Codec.cs b/src/Lucene.Net/Codecs/Lucene42/Lucene42Codec.cs index 3ffeea979c..3e9126b453 100644 --- a/src/Lucene.Net/Codecs/Lucene42/Lucene42Codec.cs +++ b/src/Lucene.Net/Codecs/Lucene42/Lucene42Codec.cs @@ -1,4 +1,5 @@ using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene42 { @@ -61,6 +62,7 @@ public PerFieldPostingsFormatAnonymousInnerClassHelper(Lucene42Codec outerInstan this.outerInstance = outerInstance; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override PostingsFormat GetPostingsFormatForField(string field) { return outerInstance.GetPostingsFormatForField(field); @@ -78,6 +80,7 @@ public PerFieldDocValuesFormatAnonymousInnerClassHelper(Lucene42Codec outerInsta this.outerInstance = outerInstance; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override DocValuesFormat GetDocValuesFormatForField(string field) { return outerInstance.GetDocValuesFormatForField(field); @@ -111,6 +114,7 @@ public Lucene42Codec() /// /// The default implementation always returns "Lucene41" /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual PostingsFormat GetPostingsFormatForField(string field) { // LUCENENET specific - lazy initialize the codec to ensure we get the correct type if overridden. @@ -127,6 +131,7 @@ public virtual PostingsFormat GetPostingsFormatForField(string field) /// /// The default implementation always returns "Lucene42" /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual DocValuesFormat GetDocValuesFormatForField(string field) { // LUCENENET specific - lazy initialize the codec to ensure we get the correct type if overridden. @@ -147,10 +152,6 @@ public virtual DocValuesFormat GetDocValuesFormatForField(string field) private class Lucene42NormsFormatAnonymousInnerClassHelper : Lucene42NormsFormat { - public Lucene42NormsFormatAnonymousInnerClassHelper() - { - } - public override DocValuesConsumer NormsConsumer(SegmentWriteState state) { throw new NotSupportedException("this codec can only be used for reading"); diff --git a/src/Lucene.Net/Codecs/Lucene42/Lucene42DocValuesFormat.cs b/src/Lucene.Net/Codecs/Lucene42/Lucene42DocValuesFormat.cs index 913e7b90d5..005eb00f8d 100644 --- a/src/Lucene.Net/Codecs/Lucene42/Lucene42DocValuesFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene42/Lucene42DocValuesFormat.cs @@ -1,4 +1,5 @@ using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene42 { @@ -155,6 +156,7 @@ public override DocValuesConsumer FieldsConsumer(SegmentWriteState state) throw new NotSupportedException("this codec can only be used for reading"); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override DocValuesProducer FieldsProducer(SegmentReadState state) { return new Lucene42DocValuesProducer(state, DATA_CODEC, DATA_EXTENSION, METADATA_CODEC, METADATA_EXTENSION); diff --git a/src/Lucene.Net/Codecs/Lucene42/Lucene42DocValuesProducer.cs b/src/Lucene.Net/Codecs/Lucene42/Lucene42DocValuesProducer.cs index 401458d399..91ed273b05 100644 --- a/src/Lucene.Net/Codecs/Lucene42/Lucene42DocValuesProducer.cs +++ b/src/Lucene.Net/Codecs/Lucene42/Lucene42DocValuesProducer.cs @@ -4,6 +4,7 @@ using System; using System.Collections.Generic; using System.IO; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene42 { @@ -105,7 +106,7 @@ internal Lucene42DocValuesProducer(SegmentReadState state, string dataCodec, str numerics = new Dictionary(); binaries = new Dictionary(); fsts = new Dictionary(); - ReadFields(@in, state.FieldInfos); + ReadFields(@in /*, state.FieldInfos // LUCENENET: Never read */); if (version >= VERSION_CHECKSUM) { @@ -154,7 +155,7 @@ internal Lucene42DocValuesProducer(SegmentReadState state, string dataCodec, str } } - private void ReadFields(IndexInput meta, FieldInfos infos) + private void ReadFields(IndexInput meta /*, FieldInfos infos // LUCENENET: Never read */) { int fieldNumber = meta.ReadVInt32(); while (fieldNumber != -1) @@ -223,8 +224,7 @@ public override NumericDocValues GetNumeric(FieldInfo field) { lock (this) { - NumericDocValues instance; - if (!numericInstances.TryGetValue(field.Number, out instance) || instance == null) + if (!numericInstances.TryGetValue(field.Number, out NumericDocValues instance) || instance == null) { instance = LoadNumeric(field); numericInstances[field.Number] = instance; @@ -235,6 +235,7 @@ public override NumericDocValues GetNumeric(FieldInfo field) public override long RamBytesUsed() => ramBytesUsed; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void CheckIntegrity() { if (version >= VERSION_CHECKSUM) @@ -276,7 +277,7 @@ private NumericDocValues LoadNumeric(FieldInfo field) byte[] bytes = new byte[maxDoc]; data.ReadBytes(bytes, 0, bytes.Length); ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(bytes)); - return new NumericDocValuesAnonymousInnerClassHelper2(this, bytes); + return new NumericDocValuesAnonymousInnerClassHelper2(bytes); case GCD_COMPRESSED: long min = data.ReadInt64(); @@ -302,6 +303,7 @@ public NumericDocValuesAnonymousInnerClassHelper(long[] decode, PackedInt32s.Rea this.ordsReader = ordsReader; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long Get(int docID) { return decode[(int)ordsReader.Get(docID)]; @@ -312,11 +314,12 @@ private class NumericDocValuesAnonymousInnerClassHelper2 : NumericDocValues { private readonly byte[] bytes; - public NumericDocValuesAnonymousInnerClassHelper2(Lucene42DocValuesProducer outerInstance, byte[] bytes) + public NumericDocValuesAnonymousInnerClassHelper2(byte[] bytes) { this.bytes = bytes; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long Get(int docID) { return (sbyte)bytes[docID]; @@ -336,6 +339,7 @@ public NumericDocValuesAnonymousInnerClassHelper3(long min, long mult, BlockPack this.quotientReader = quotientReader; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long Get(int docID) { return min + mult * quotientReader.Get(docID); @@ -346,8 +350,7 @@ public override BinaryDocValues GetBinary(FieldInfo field) { lock (this) { - BinaryDocValues instance; - if (!binaryInstances.TryGetValue(field.Number, out instance) || instance == null) + if (!binaryInstances.TryGetValue(field.Number, out BinaryDocValues instance) || instance == null) { instance = LoadBinary(field); binaryInstances[field.Number] = instance; @@ -388,6 +391,7 @@ public BinaryDocValuesAnonymousInnerClassHelper(PagedBytes.Reader bytesReader, i this.fixedLength = fixedLength; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Get(int docID, BytesRef result) { bytesReader.FillSlice(result, fixedLength * (long)docID, fixedLength); @@ -405,6 +409,7 @@ public BinaryDocValuesAnonymousInnerClassHelper2(PagedBytes.Reader bytesReader, this.addresses = addresses; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Get(int docID, BytesRef result) { long startAddress = docID == 0 ? 0 : addresses.Get(docID - 1); @@ -463,6 +468,7 @@ public SortedDocValuesAnonymousInnerClassHelper(FSTEntry entry, NumericDocValues this.fstEnum = fstEnum; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int GetOrd(int docID) { return (int)docToOrd.Get(docID); @@ -512,6 +518,7 @@ public override int LookupTerm(BytesRef key) public override int ValueCount => (int)entry.NumOrds; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override TermsEnum GetTermsEnum() { return new FSTTermsEnum(fst); @@ -579,6 +586,7 @@ public SortedSetDocValuesAnonymousInnerClassHelper(FSTEntry entry, BinaryDocValu private long currentOrd; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long NextOrd() { if (input.Eof) @@ -592,6 +600,7 @@ public override long NextOrd() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void SetDocument(int docID) { docToOrds.Get(docID, @ref); @@ -643,12 +652,14 @@ public override long LookupTerm(BytesRef key) public override long ValueCount => entry.NumOrds; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override TermsEnum GetTermsEnum() { return new FSTTermsEnum(fst); } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override IBits GetDocsWithField(FieldInfo field) { if (field.DocValuesType == DocValuesType.SORTED_SET) @@ -661,6 +672,7 @@ public override IBits GetDocsWithField(FieldInfo field) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Dispose(bool disposing) { if (disposing) @@ -752,6 +764,7 @@ public override SeekStatus SeekCeil(BytesRef text) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override bool SeekExact(BytesRef text) { if (@in.SeekExact(text) == null) diff --git a/src/Lucene.Net/Codecs/Lucene42/Lucene42NormsConsumer.cs b/src/Lucene.Net/Codecs/Lucene42/Lucene42NormsConsumer.cs index 4eda6730fa..b949414580 100644 --- a/src/Lucene.Net/Codecs/Lucene42/Lucene42NormsConsumer.cs +++ b/src/Lucene.Net/Codecs/Lucene42/Lucene42NormsConsumer.cs @@ -1,7 +1,6 @@ using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using JCG = J2N.Collections.Generic; namespace Lucene.Net.Codecs.Lucene42 @@ -48,7 +47,9 @@ internal class Lucene42NormsConsumer : DocValuesConsumer internal const sbyte UNCOMPRESSED = 2; internal const sbyte GCD_COMPRESSED = 3; +#pragma warning disable CA2213 // Disposable fields should be disposed internal IndexOutput data, meta; +#pragma warning restore CA2213 // Disposable fields should be disposed internal readonly int maxDoc; internal readonly float acceptableOverheadRatio; @@ -85,7 +86,7 @@ public override void AddNumericField(FieldInfo field, IEnumerable values) long maxValue = long.MinValue; long gcd = 0; // TODO: more efficient? - JCG.HashSet uniqueValues = null; + JCG.HashSet uniqueValues/* = null*/; // LUCENENET: IDE0059: Remove unnecessary value assignment if (true) { uniqueValues = new JCG.HashSet(); diff --git a/src/Lucene.Net/Codecs/Lucene42/Lucene42NormsFormat.cs b/src/Lucene.Net/Codecs/Lucene42/Lucene42NormsFormat.cs index 7884efe478..56bf2bb77a 100644 --- a/src/Lucene.Net/Codecs/Lucene42/Lucene42NormsFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene42/Lucene42NormsFormat.cs @@ -1,3 +1,5 @@ +using System.Runtime.CompilerServices; + namespace Lucene.Net.Codecs.Lucene42 { /* @@ -62,11 +64,13 @@ public Lucene42NormsFormat(float acceptableOverheadRatio) this.acceptableOverheadRatio = acceptableOverheadRatio; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override DocValuesConsumer NormsConsumer(SegmentWriteState state) { return new Lucene42NormsConsumer(state, DATA_CODEC, DATA_EXTENSION, METADATA_CODEC, METADATA_EXTENSION, acceptableOverheadRatio); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override DocValuesProducer NormsProducer(SegmentReadState state) { return new Lucene42DocValuesProducer(state, DATA_CODEC, DATA_EXTENSION, METADATA_CODEC, METADATA_EXTENSION); diff --git a/src/Lucene.Net/Codecs/Lucene45/Lucene45Codec.cs b/src/Lucene.Net/Codecs/Lucene45/Lucene45Codec.cs index 7a6db7f5ad..c14dee13d1 100644 --- a/src/Lucene.Net/Codecs/Lucene45/Lucene45Codec.cs +++ b/src/Lucene.Net/Codecs/Lucene45/Lucene45Codec.cs @@ -1,4 +1,5 @@ using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene45 { @@ -63,6 +64,7 @@ public PerFieldPostingsFormatAnonymousInnerClassHelper(Lucene45Codec outerInstan this.outerInstance = outerInstance; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override PostingsFormat GetPostingsFormatForField(string field) { return outerInstance.GetPostingsFormatForField(field); @@ -80,6 +82,7 @@ public PerFieldDocValuesFormatAnonymousInnerClassHelper(Lucene45Codec outerInsta this.outerInstance = outerInstance; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override DocValuesFormat GetDocValuesFormatForField(string field) { return outerInstance.GetDocValuesFormatForField(field); @@ -113,6 +116,7 @@ public Lucene45Codec() /// /// The default implementation always returns "Lucene41" /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual PostingsFormat GetPostingsFormatForField(string field) { // LUCENENET specific - lazy initialize the codec to ensure we get the correct type if overridden. @@ -129,6 +133,7 @@ public virtual PostingsFormat GetPostingsFormatForField(string field) /// /// The default implementation always returns "Lucene45" /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual DocValuesFormat GetDocValuesFormatForField(string field) { // LUCENENET specific - lazy initialize the codec to ensure we get the correct type if overridden. diff --git a/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesConsumer.cs b/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesConsumer.cs index 5b32e5c18f..d26013f22c 100644 --- a/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesConsumer.cs +++ b/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesConsumer.cs @@ -2,7 +2,7 @@ using System; using System.Collections; using System.Collections.Generic; -using System.Diagnostics; +using System.Runtime.CompilerServices; using JCG = J2N.Collections.Generic; namespace Lucene.Net.Codecs.Lucene45 @@ -39,11 +39,11 @@ namespace Lucene.Net.Codecs.Lucene45 /// /// Writer for - public class Lucene45DocValuesConsumer : DocValuesConsumer, IDisposable + public class Lucene45DocValuesConsumer : DocValuesConsumer // LUCENENET specific - removed IDisposable, it is already implemented in base class { - internal static readonly int BLOCK_SIZE = 16384; - internal static readonly int ADDRESS_INTERVAL = 16; - internal static readonly long MISSING_ORD = -1L; + internal const int BLOCK_SIZE = 16384; + internal const int ADDRESS_INTERVAL = 16; + internal const long MISSING_ORD = -1L; /// /// Compressed using packed blocks of s. @@ -81,7 +81,9 @@ public class Lucene45DocValuesConsumer : DocValuesConsumer, IDisposable /// public static readonly int SORTED_SET_SINGLE_VALUED_SORTED = 1; +#pragma warning disable CA2213 // Disposable fields should be disposed internal IndexOutput data, meta; +#pragma warning restore CA2213 // Disposable fields should be disposed internal readonly int maxDoc; /// @@ -109,6 +111,7 @@ public Lucene45DocValuesConsumer(SegmentWriteState state, string dataCodec, stri } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void AddNumericField(FieldInfo field, IEnumerable values) { AddNumericField(field, values, true); @@ -406,8 +409,8 @@ protected virtual void AddTermsDict(FieldInfo field, IEnumerable value // write addresses of indexed terms termAddresses.Finish(); addressBuffer.WriteTo(data); - addressBuffer = null; - termAddresses = null; + //addressBuffer = null; // LUCENENET: IDE0059: Remove unnecessary value assignment + //termAddresses = null; // LUCENENET: IDE0059: Remove unnecessary value assignment meta.WriteVInt32(minLength); meta.WriteVInt32(maxLength); meta.WriteVInt64(count); diff --git a/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesFormat.cs b/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesFormat.cs index 780a2a1ae1..473f7c2230 100644 --- a/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesFormat.cs +++ b/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesFormat.cs @@ -1,3 +1,5 @@ +using System.Runtime.CompilerServices; + namespace Lucene.Net.Codecs.Lucene45 { /* @@ -159,11 +161,13 @@ public Lucene45DocValuesFormat() { } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override DocValuesConsumer FieldsConsumer(SegmentWriteState state) { return new Lucene45DocValuesConsumer(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override DocValuesProducer FieldsProducer(SegmentReadState state) { return new Lucene45DocValuesProducer(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); diff --git a/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesProducer.cs b/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesProducer.cs index 65ac00dd56..fd29a7a245 100644 --- a/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesProducer.cs +++ b/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesProducer.cs @@ -4,6 +4,7 @@ using System; using System.Collections.Generic; using System.IO; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs.Lucene45 { @@ -51,7 +52,7 @@ namespace Lucene.Net.Codecs.Lucene45 /// /// Reader for . - public class Lucene45DocValuesProducer : DocValuesProducer, IDisposable + public class Lucene45DocValuesProducer : DocValuesProducer // LUCENENET specific - removed IDisposable, it is already implemented in base class { private readonly IDictionary numerics; private readonly IDictionary binaries; @@ -85,7 +86,7 @@ protected internal Lucene45DocValuesProducer(SegmentReadState state, string data ordIndexes = new Dictionary(); binaries = new Dictionary(); sortedSets = new Dictionary(); - ReadFields(@in, state.FieldInfos); + ReadFields(@in /*, state.FieldInfos // LUCENENET: Not read */); if (version >= Lucene45DocValuesFormat.VERSION_CHECKSUM) { @@ -136,7 +137,7 @@ protected internal Lucene45DocValuesProducer(SegmentReadState state, string data ramBytesUsed = new AtomicInt64(RamUsageEstimator.ShallowSizeOfInstance(this.GetType())); } - private void ReadSortedField(int fieldNumber, IndexInput meta, FieldInfos infos) + private void ReadSortedField(int fieldNumber, IndexInput meta /*, FieldInfos infos // LUCENENET: Never read */) { // sorted = binary + numeric if (meta.ReadVInt32() != fieldNumber) @@ -162,7 +163,7 @@ private void ReadSortedField(int fieldNumber, IndexInput meta, FieldInfos infos) ords[fieldNumber] = n; } - private void ReadSortedSetFieldWithAddresses(int fieldNumber, IndexInput meta, FieldInfos infos) + private void ReadSortedSetFieldWithAddresses(int fieldNumber, IndexInput meta /*, FieldInfos infos // LUCENENET: Never read */) { // sortedset = binary + numeric (addresses) + ordIndex if (meta.ReadVInt32() != fieldNumber) @@ -199,7 +200,7 @@ private void ReadSortedSetFieldWithAddresses(int fieldNumber, IndexInput meta, F ordIndexes[fieldNumber] = n2; } - private void ReadFields(IndexInput meta, FieldInfos infos) + private void ReadFields(IndexInput meta /*, FieldInfos infos // LUCENENET: Not read */) { int fieldNumber = meta.ReadVInt32(); while (fieldNumber != -1) @@ -224,7 +225,7 @@ private void ReadFields(IndexInput meta, FieldInfos infos) } else if (type == Lucene45DocValuesFormat.SORTED) { - ReadSortedField(fieldNumber, meta, infos); + ReadSortedField(fieldNumber, meta /*, infos // LUCENENET: Never read */); } else if (type == Lucene45DocValuesFormat.SORTED_SET) { @@ -232,7 +233,7 @@ private void ReadFields(IndexInput meta, FieldInfos infos) sortedSets[fieldNumber] = ss; if (ss.Format == Lucene45DocValuesConsumer.SORTED_SET_WITH_ADDRESSES) { - ReadSortedSetFieldWithAddresses(fieldNumber, meta, infos); + ReadSortedSetFieldWithAddresses(fieldNumber, meta/*, infos // LUCENENET: Never read */); } else if (ss.Format == Lucene45DocValuesConsumer.SORTED_SET_SINGLE_VALUED_SORTED) { @@ -244,7 +245,7 @@ private void ReadFields(IndexInput meta, FieldInfos infos) { throw new Exception("sortedset entry for field: " + fieldNumber + " is corrupt (resource=" + meta + ")"); } - ReadSortedField(fieldNumber, meta, infos); + ReadSortedField(fieldNumber, meta/*, infos // LUCENENET: Never read */); } else { @@ -352,14 +353,17 @@ internal virtual SortedSetEntry ReadSortedSetEntry(IndexInput meta) return entry; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override NumericDocValues GetNumeric(FieldInfo field) { NumericEntry entry = numerics[field.Number]; return GetNumeric(entry); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long RamBytesUsed() => ramBytesUsed; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void CheckIntegrity() { if (version >= Lucene45DocValuesFormat.VERSION_CHECKSUM) @@ -383,13 +387,13 @@ internal virtual Int64Values GetNumeric(NumericEntry entry) long min = entry.minValue; long mult = entry.gcd; BlockPackedReader quotientReader = new BlockPackedReader(data, entry.PackedInt32sVersion, entry.BlockSize, entry.Count, true); - return new Int64ValuesAnonymousInnerClassHelper(this, min, mult, quotientReader); + return new Int64ValuesAnonymousInnerClassHelper(min, mult, quotientReader); case Lucene45DocValuesConsumer.TABLE_COMPRESSED: long[] table = entry.table; int bitsRequired = PackedInt32s.BitsRequired(table.Length - 1); PackedInt32s.Reader ords = PackedInt32s.GetDirectReaderNoHeader(data, PackedInt32s.Format.PACKED, entry.PackedInt32sVersion, (int)entry.Count, bitsRequired); - return new Int64ValuesAnonymousInnerClassHelper2(this, table, ords); + return new Int64ValuesAnonymousInnerClassHelper2(table, ords); default: throw new Exception(); @@ -398,20 +402,18 @@ internal virtual Int64Values GetNumeric(NumericEntry entry) private class Int64ValuesAnonymousInnerClassHelper : Int64Values { - private readonly Lucene45DocValuesProducer outerInstance; - - private long min; - private long mult; - private BlockPackedReader quotientReader; + private readonly long min; + private readonly long mult; + private readonly BlockPackedReader quotientReader; - public Int64ValuesAnonymousInnerClassHelper(Lucene45DocValuesProducer outerInstance, long min, long mult, BlockPackedReader quotientReader) + public Int64ValuesAnonymousInnerClassHelper(long min, long mult, BlockPackedReader quotientReader) { - this.outerInstance = outerInstance; this.min = min; this.mult = mult; this.quotientReader = quotientReader; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long Get(long id) { return min + mult * quotientReader.Get(id); @@ -420,18 +422,16 @@ public override long Get(long id) private class Int64ValuesAnonymousInnerClassHelper2 : Int64Values { - private readonly Lucene45DocValuesProducer outerInstance; - - private long[] table; - private PackedInt32s.Reader ords; + private readonly long[] table; + private readonly PackedInt32s.Reader ords; - public Int64ValuesAnonymousInnerClassHelper2(Lucene45DocValuesProducer outerInstance, long[] table, PackedInt32s.Reader ords) + public Int64ValuesAnonymousInnerClassHelper2(long[] table, PackedInt32s.Reader ords) { - this.outerInstance = outerInstance; this.table = table; this.ords = ords; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long Get(long id) { return table[(int)ords.Get((int)id)]; @@ -444,7 +444,7 @@ public override BinaryDocValues GetBinary(FieldInfo field) switch (bytes.format) { case Lucene45DocValuesConsumer.BINARY_FIXED_UNCOMPRESSED: - return GetFixedBinary(field, bytes); + return GetFixedBinary(/*field, LUCENENET: Never read */ bytes); case Lucene45DocValuesConsumer.BINARY_VARIABLE_UNCOMPRESSED: return GetVariableBinary(field, bytes); @@ -457,23 +457,21 @@ public override BinaryDocValues GetBinary(FieldInfo field) } } - private BinaryDocValues GetFixedBinary(FieldInfo field, BinaryEntry bytes) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private BinaryDocValues GetFixedBinary(/* FieldInfo field, // LUCENENET: Never read */ BinaryEntry bytes) { IndexInput data = (IndexInput)this.data.Clone(); - return new Int64BinaryDocValuesAnonymousInnerClassHelper(this, bytes, data); + return new Int64BinaryDocValuesAnonymousInnerClassHelper(bytes, data); } private class Int64BinaryDocValuesAnonymousInnerClassHelper : Int64BinaryDocValues { - private readonly Lucene45DocValuesProducer outerInstance; + private readonly Lucene45DocValuesProducer.BinaryEntry bytes; + private readonly IndexInput data; - private Lucene45DocValuesProducer.BinaryEntry bytes; - private IndexInput data; - - public Int64BinaryDocValuesAnonymousInnerClassHelper(Lucene45DocValuesProducer outerInstance, Lucene45DocValuesProducer.BinaryEntry bytes, IndexInput data) + public Int64BinaryDocValuesAnonymousInnerClassHelper(Lucene45DocValuesProducer.BinaryEntry bytes, IndexInput data) { - this.outerInstance = outerInstance; this.bytes = bytes; this.data = data; } @@ -509,8 +507,7 @@ protected virtual MonotonicBlockPackedReader GetAddressInstance(IndexInput data, MonotonicBlockPackedReader addresses; lock (addressInstances) { - MonotonicBlockPackedReader addrInstance; - if (!addressInstances.TryGetValue(field.Number, out addrInstance) || addrInstance == null) + if (!addressInstances.TryGetValue(field.Number, out MonotonicBlockPackedReader addrInstance) || addrInstance == null) { data.Seek(bytes.AddressesOffset); addrInstance = new MonotonicBlockPackedReader(data, bytes.PackedInt32sVersion, bytes.BlockSize, bytes.Count, false); @@ -522,26 +519,24 @@ protected virtual MonotonicBlockPackedReader GetAddressInstance(IndexInput data, return addresses; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private BinaryDocValues GetVariableBinary(FieldInfo field, BinaryEntry bytes) { IndexInput data = (IndexInput)this.data.Clone(); MonotonicBlockPackedReader addresses = GetAddressInstance(data, field, bytes); - return new Int64BinaryDocValuesAnonymousInnerClassHelper2(this, bytes, data, addresses); + return new Int64BinaryDocValuesAnonymousInnerClassHelper2(bytes, data, addresses); } private class Int64BinaryDocValuesAnonymousInnerClassHelper2 : Int64BinaryDocValues { - private readonly Lucene45DocValuesProducer outerInstance; - - private Lucene45DocValuesProducer.BinaryEntry bytes; - private IndexInput data; - private MonotonicBlockPackedReader addresses; + private readonly Lucene45DocValuesProducer.BinaryEntry bytes; + private readonly IndexInput data; + private readonly MonotonicBlockPackedReader addresses; - public Int64BinaryDocValuesAnonymousInnerClassHelper2(Lucene45DocValuesProducer outerInstance, Lucene45DocValuesProducer.BinaryEntry bytes, IndexInput data, MonotonicBlockPackedReader addresses) + public Int64BinaryDocValuesAnonymousInnerClassHelper2(Lucene45DocValuesProducer.BinaryEntry bytes, IndexInput data, MonotonicBlockPackedReader addresses) { - this.outerInstance = outerInstance; this.bytes = bytes; this.data = data; this.addresses = addresses; @@ -581,8 +576,7 @@ protected virtual MonotonicBlockPackedReader GetIntervalInstance(IndexInput data long interval = bytes.AddressInterval; lock (addressInstances) { - MonotonicBlockPackedReader addrInstance; - if (!addressInstances.TryGetValue(field.Number, out addrInstance)) + if (!addressInstances.TryGetValue(field.Number, out MonotonicBlockPackedReader addrInstance)) { data.Seek(bytes.AddressesOffset); long size; @@ -603,6 +597,7 @@ protected virtual MonotonicBlockPackedReader GetIntervalInstance(IndexInput data return addresses; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private BinaryDocValues GetCompressedBinary(FieldInfo field, BinaryEntry bytes) { IndexInput data = (IndexInput)this.data.Clone(); @@ -621,30 +616,29 @@ public override SortedDocValues GetSorted(FieldInfo field) data.Seek(entry.Offset); BlockPackedReader ordinals = new BlockPackedReader(data, entry.PackedInt32sVersion, entry.BlockSize, entry.Count, true); - return new SortedDocValuesAnonymousInnerClassHelper(this, valueCount, binary, ordinals); + return new SortedDocValuesAnonymousInnerClassHelper(valueCount, binary, ordinals); } private class SortedDocValuesAnonymousInnerClassHelper : SortedDocValues { - private readonly Lucene45DocValuesProducer outerInstance; - - private int valueCount; - private BinaryDocValues binary; - private BlockPackedReader ordinals; + private readonly int valueCount; + private readonly BinaryDocValues binary; + private readonly BlockPackedReader ordinals; - public SortedDocValuesAnonymousInnerClassHelper(Lucene45DocValuesProducer outerInstance, int valueCount, BinaryDocValues binary, BlockPackedReader ordinals) + public SortedDocValuesAnonymousInnerClassHelper(int valueCount, BinaryDocValues binary, BlockPackedReader ordinals) { - this.outerInstance = outerInstance; this.valueCount = valueCount; this.binary = binary; this.ordinals = ordinals; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int GetOrd(int docID) { return (int)ordinals.Get(docID); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void LookupOrd(int ord, BytesRef result) { binary.Get(ord, result); @@ -652,11 +646,12 @@ public override void LookupOrd(int ord, BytesRef result) public override int ValueCount => valueCount; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int LookupTerm(BytesRef key) { - if (binary is CompressedBinaryDocValues) + if (binary is CompressedBinaryDocValues compressedBinaryDocValues) { - return (int)((CompressedBinaryDocValues)binary).LookupTerm(key); + return (int)compressedBinaryDocValues.LookupTerm(key); } else { @@ -664,11 +659,12 @@ public override int LookupTerm(BytesRef key) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override TermsEnum GetTermsEnum() { - if (binary is CompressedBinaryDocValues) + if (binary is CompressedBinaryDocValues compressedBinaryDocValues) { - return ((CompressedBinaryDocValues)binary).GetTermsEnum(); + return compressedBinaryDocValues.GetTermsEnum(); } else { @@ -687,8 +683,7 @@ protected virtual MonotonicBlockPackedReader GetOrdIndexInstance(IndexInput data MonotonicBlockPackedReader ordIndex; lock (ordIndexInstances) { - MonotonicBlockPackedReader ordIndexInstance; - if (!ordIndexInstances.TryGetValue(field.Number, out ordIndexInstance)) + if (!ordIndexInstances.TryGetValue(field.Number, out MonotonicBlockPackedReader ordIndexInstance)) { data.Seek(entry.Offset); ordIndexInstance = new MonotonicBlockPackedReader(data, entry.PackedInt32sVersion, entry.BlockSize, entry.Count, false); @@ -721,21 +716,18 @@ public override SortedSetDocValues GetSortedSet(FieldInfo field) // but the addresses to the ord stream are in RAM MonotonicBlockPackedReader ordIndex = GetOrdIndexInstance(data, field, ordIndexes[field.Number]); - return new RandomAccessOrdsAnonymousInnerClassHelper(this, valueCount, binary, ordinals, ordIndex); + return new RandomAccessOrdsAnonymousInnerClassHelper(valueCount, binary, ordinals, ordIndex); } private class RandomAccessOrdsAnonymousInnerClassHelper : RandomAccessOrds { - private readonly Lucene45DocValuesProducer outerInstance; + private readonly long valueCount; + private readonly Lucene45DocValuesProducer.Int64BinaryDocValues binary; + private readonly Int64Values ordinals; + private readonly MonotonicBlockPackedReader ordIndex; - private long valueCount; - private Lucene45DocValuesProducer.Int64BinaryDocValues binary; - private Int64Values ordinals; - private MonotonicBlockPackedReader ordIndex; - - public RandomAccessOrdsAnonymousInnerClassHelper(Lucene45DocValuesProducer outerInstance, long valueCount, Lucene45DocValuesProducer.Int64BinaryDocValues binary, Int64Values ordinals, MonotonicBlockPackedReader ordIndex) + public RandomAccessOrdsAnonymousInnerClassHelper(long valueCount, Lucene45DocValuesProducer.Int64BinaryDocValues binary, Int64Values ordinals, MonotonicBlockPackedReader ordIndex) { - this.outerInstance = outerInstance; this.valueCount = valueCount; this.binary = binary; this.ordinals = ordinals; @@ -746,6 +738,7 @@ public RandomAccessOrdsAnonymousInnerClassHelper(Lucene45DocValuesProducer outer internal long offset; internal long endOffset; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long NextOrd() { if (offset == endOffset) @@ -760,12 +753,14 @@ public override long NextOrd() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void SetDocument(int docID) { startOffset = offset = (docID == 0 ? 0 : ordIndex.Get(docID - 1)); endOffset = ordIndex.Get(docID); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void LookupOrd(long ord, BytesRef result) { binary.Get(ord, result); @@ -773,11 +768,12 @@ public override void LookupOrd(long ord, BytesRef result) public override long ValueCount => valueCount; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long LookupTerm(BytesRef key) { - if (binary is CompressedBinaryDocValues) + if (binary is CompressedBinaryDocValues compressedBinaryDocValues) { - return ((CompressedBinaryDocValues)binary).LookupTerm(key); + return compressedBinaryDocValues.LookupTerm(key); } else { @@ -785,11 +781,12 @@ public override long LookupTerm(BytesRef key) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override TermsEnum GetTermsEnum() { - if (binary is CompressedBinaryDocValues) + if (binary is CompressedBinaryDocValues compressedBinaryDocValues) { - return ((CompressedBinaryDocValues)binary).GetTermsEnum(); + return compressedBinaryDocValues.GetTermsEnum(); } else { @@ -797,17 +794,20 @@ public override TermsEnum GetTermsEnum() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long OrdAt(int index) { return ordinals.Get(startOffset + index); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int Cardinality() { return (int)(endOffset - startOffset); } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private IBits GetMissingBits(long offset) { if (offset == -1) @@ -825,8 +825,8 @@ private class BitsAnonymousInnerClassHelper : IBits { private readonly Lucene45DocValuesProducer outerInstance; - private long offset; - private IndexInput @in; + private readonly long offset; + private readonly IndexInput @in; public BitsAnonymousInnerClassHelper(Lucene45DocValuesProducer outerInstance, long offset, IndexInput @in) { @@ -874,6 +874,7 @@ public override IBits GetDocsWithField(FieldInfo field) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Dispose(bool disposing) { if (disposing) @@ -1051,6 +1052,7 @@ internal virtual long LookupTerm(BytesRef key) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual TermsEnum GetTermsEnum() { try @@ -1063,6 +1065,7 @@ internal virtual TermsEnum GetTermsEnum() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual TermsEnum GetTermsEnum(IndexInput input) { input.Seek(bytes.offset); @@ -1174,6 +1177,7 @@ public override TermsEnum.SeekStatus SeekCeil(BytesRef text) return TermsEnum.SeekStatus.END; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void SeekExact(long ord) { DoSeek(ord); @@ -1201,6 +1205,7 @@ private void DoSeek(long ord) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void SetTerm() { // TODO: is there a cleaner way diff --git a/src/Lucene.Net/Codecs/Lucene46/Lucene46Codec.cs b/src/Lucene.Net/Codecs/Lucene46/Lucene46Codec.cs index 1abc49fb3d..ce48a90e4a 100644 --- a/src/Lucene.Net/Codecs/Lucene46/Lucene46Codec.cs +++ b/src/Lucene.Net/Codecs/Lucene46/Lucene46Codec.cs @@ -1,3 +1,5 @@ +using System.Runtime.CompilerServices; + namespace Lucene.Net.Codecs.Lucene46 { /* @@ -58,6 +60,7 @@ public PerFieldPostingsFormatAnonymousInnerClassHelper(Lucene46Codec outerInstan this.outerInstance = outerInstance; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override PostingsFormat GetPostingsFormatForField(string field) { return outerInstance.GetPostingsFormatForField(field); @@ -75,6 +78,7 @@ public PerFieldDocValuesFormatAnonymousInnerClassHelper(Lucene46Codec outerInsta this.outerInstance = outerInstance; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override DocValuesFormat GetDocValuesFormatForField(string field) { return outerInstance.GetDocValuesFormatForField(field); @@ -108,6 +112,7 @@ public Lucene46Codec() /// /// The default implementation always returns "Lucene41" /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual PostingsFormat GetPostingsFormatForField(string field) { // LUCENENET specific - lazy initialize the codec to ensure we get the correct type if overridden. @@ -124,6 +129,7 @@ public virtual PostingsFormat GetPostingsFormatForField(string field) /// /// The default implementation always returns "Lucene45" /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual DocValuesFormat GetDocValuesFormatForField(string field) { // LUCENENET specific - lazy initialize the codec to ensure we get the correct type if overridden. diff --git a/src/Lucene.Net/Codecs/MappingMultiDocsAndPositionsEnum.cs b/src/Lucene.Net/Codecs/MappingMultiDocsAndPositionsEnum.cs index aea72dce18..fc5622c655 100644 --- a/src/Lucene.Net/Codecs/MappingMultiDocsAndPositionsEnum.cs +++ b/src/Lucene.Net/Codecs/MappingMultiDocsAndPositionsEnum.cs @@ -1,6 +1,7 @@ using Lucene.Net.Support; using System; using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs { @@ -49,6 +50,7 @@ public MappingMultiDocsAndPositionsEnum() { } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal MappingMultiDocsAndPositionsEnum Reset(MultiDocsAndPositionsEnum postingsEnum) { this.numSubs = postingsEnum.NumSubs; @@ -126,6 +128,7 @@ public override int NextDoc() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int NextPosition() { return current.NextPosition(); @@ -135,11 +138,13 @@ public override int NextPosition() public override int EndOffset => current.EndOffset; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override BytesRef GetPayload() { return current.GetPayload(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long GetCost() { long cost = 0; diff --git a/src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs b/src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs index 32e1fe4002..7a61290e2b 100644 --- a/src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs +++ b/src/Lucene.Net/Codecs/MappingMultiDocsEnum.cs @@ -1,8 +1,8 @@ using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; -using System.Diagnostics; using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs { @@ -128,6 +128,7 @@ public override int NextDoc() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long GetCost() { long cost = 0; diff --git a/src/Lucene.Net/Codecs/MultiLevelSkipListReader.cs b/src/Lucene.Net/Codecs/MultiLevelSkipListReader.cs index 1329c7d330..943b0589ea 100644 --- a/src/Lucene.Net/Codecs/MultiLevelSkipListReader.cs +++ b/src/Lucene.Net/Codecs/MultiLevelSkipListReader.cs @@ -1,6 +1,6 @@ using Lucene.Net.Diagnostics; using System; -using System.Diagnostics; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs { @@ -52,26 +52,26 @@ public abstract class MultiLevelSkipListReader : IDisposable // the skipInterval. The top level can not contain more than // skipLevel entries, the second top level can not contain more // than skipLevel^2 entries and so forth. - private int numberOfLevelsToBuffer = 1; + private readonly int numberOfLevelsToBuffer = 1; // LUCENENET: marked readonly private int docCount; private bool haveSkipped; /// /// SkipStream for each level. - private IndexInput[] skipStream; + private readonly IndexInput[] skipStream; /// /// The start pointer of each skip level. - private long[] skipPointer; + private readonly long[] skipPointer; /// /// SkipInterval of each level. - private int[] skipInterval; + private readonly int[] skipInterval; /// /// Number of docs skipped per level. - private int[] numSkipped; + private readonly int[] numSkipped; /// /// Doc id of current skip entry per level. @@ -83,7 +83,7 @@ public abstract class MultiLevelSkipListReader : IDisposable /// /// Child pointer of current skip entry per level. - private long[] childPointer; + private readonly long[] childPointer; /// /// childPointer of last read skip entry with docId <= @@ -91,7 +91,7 @@ public abstract class MultiLevelSkipListReader : IDisposable /// private long lastChildPointer; - private bool inputIsBuffered; + private readonly bool inputIsBuffered; private readonly int skipMultiplier; /// @@ -327,6 +327,7 @@ private void LoadSkipLevels() /// /// Copies the values of the last read skip entry on this . + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected virtual void SetLastSkipData(int level) { lastDoc = m_skipDoc[level]; @@ -338,7 +339,7 @@ protected virtual void SetLastSkipData(int level) private sealed class SkipBuffer : IndexInput { private byte[] data; - private long pointer; + private readonly long pointer; private int pos; internal SkipBuffer(IndexInput input, int length) @@ -357,6 +358,7 @@ protected override void Dispose(bool disposing) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long GetFilePointer() { return pointer + pos; @@ -364,17 +366,20 @@ public override long GetFilePointer() public override long Length => data.Length; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override byte ReadByte() { return data[pos++]; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void ReadBytes(byte[] b, int offset, int len) { Array.Copy(data, pos, b, offset, len); pos += len; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Seek(long pos) { this.pos = (int)(pos - pointer); diff --git a/src/Lucene.Net/Codecs/MultiLevelSkipListWriter.cs b/src/Lucene.Net/Codecs/MultiLevelSkipListWriter.cs index b234a5b2f2..c65f598ef9 100644 --- a/src/Lucene.Net/Codecs/MultiLevelSkipListWriter.cs +++ b/src/Lucene.Net/Codecs/MultiLevelSkipListWriter.cs @@ -1,6 +1,6 @@ using Lucene.Net.Diagnostics; -using System.Diagnostics; using System.IO; +using System.Runtime.CompilerServices; namespace Lucene.Net.Codecs { @@ -61,11 +61,11 @@ public abstract class MultiLevelSkipListWriter /// /// The skip interval in the list with level = 0. - private int skipInterval; + private readonly int skipInterval; // LUCENENET: marked readonly /// /// SkipInterval used for level > 0. - private int skipMultiplier; + private readonly int skipMultiplier; // LUCENENET: marked readonly /// /// For every skip level a different buffer is used. @@ -107,6 +107,7 @@ protected MultiLevelSkipListWriter(int skipInterval, int maxSkipLevels, int df) /// /// Allocates internal skip buffers. + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected virtual void Init() { skipBuffer = new RAMOutputStream[m_numberOfSkipLevels]; @@ -118,6 +119,7 @@ protected virtual void Init() /// /// Creates new buffers or empties the existing ones. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void ResetSkip() { if (skipBuffer == null) diff --git a/src/Lucene.Net/Codecs/PerField/PerFieldDocValuesFormat.cs b/src/Lucene.Net/Codecs/PerField/PerFieldDocValuesFormat.cs index 8d1eb9d68d..ddc3102173 100644 --- a/src/Lucene.Net/Codecs/PerField/PerFieldDocValuesFormat.cs +++ b/src/Lucene.Net/Codecs/PerField/PerFieldDocValuesFormat.cs @@ -2,8 +2,8 @@ using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; +using System.Runtime.CompilerServices; using JCG = J2N.Collections.Generic; namespace Lucene.Net.Codecs.PerField @@ -76,11 +76,12 @@ public abstract class PerFieldDocValuesFormat : DocValuesFormat /// /// Sole constructor. - public PerFieldDocValuesFormat() + protected PerFieldDocValuesFormat() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : base() { } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override sealed DocValuesConsumer FieldsConsumer(SegmentWriteState state) { return new FieldsWriter(this, state); @@ -112,21 +113,25 @@ public FieldsWriter(PerFieldDocValuesFormat outerInstance, SegmentWriteState sta segmentWriteState = state; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void AddNumericField(FieldInfo field, IEnumerable values) { GetInstance(field).AddNumericField(field, values); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void AddBinaryField(FieldInfo field, IEnumerable values) { GetInstance(field).AddBinaryField(field, values); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void AddSortedField(FieldInfo field, IEnumerable values, IEnumerable docToOrd) { GetInstance(field).AddSortedField(field, values, docToOrd); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void AddSortedSetField(FieldInfo field, IEnumerable values, IEnumerable docToOrdCount, IEnumerable ords) { GetInstance(field).AddSortedSetField(field, values, docToOrdCount, ords); @@ -159,8 +164,7 @@ internal virtual DocValuesConsumer GetInstance(FieldInfo field) int? suffix = null; - ConsumerAndSuffix consumer; - if (!formats.TryGetValue(format, out consumer) || consumer == null) + if (!formats.TryGetValue(format, out ConsumerAndSuffix consumer) || consumer == null) { // First time we are seeing this format; create a new instance @@ -211,6 +215,7 @@ internal virtual DocValuesConsumer GetInstance(FieldInfo field) return consumer.Consumer; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Dispose(bool disposing) { if (disposing) @@ -221,11 +226,13 @@ protected override void Dispose(bool disposing) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static string GetSuffix(string formatName, string suffix) { return formatName + "_" + suffix; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static string GetFullSegmentSuffix(string outerSegmentSuffix, string segmentSuffix) { if (outerSegmentSuffix.Length == 0) @@ -304,57 +311,56 @@ internal FieldsReader(PerFieldDocValuesFormat outerInstance, FieldsReader other) // Then rebuild fields: foreach (KeyValuePair ent in other.fields) { - DocValuesProducer producer; - oldToNew.TryGetValue(ent.Value, out producer); + oldToNew.TryGetValue(ent.Value, out DocValuesProducer producer); if (Debugging.AssertsEnabled) Debugging.Assert(producer != null); fields[ent.Key] = producer; } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override NumericDocValues GetNumeric(FieldInfo field) { - DocValuesProducer producer; - if (fields.TryGetValue(field.Name, out producer) && producer != null) + if (fields.TryGetValue(field.Name, out DocValuesProducer producer) && producer != null) { return producer.GetNumeric(field); } return null; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override BinaryDocValues GetBinary(FieldInfo field) { - DocValuesProducer producer; - if (fields.TryGetValue(field.Name, out producer) && producer != null) + if (fields.TryGetValue(field.Name, out DocValuesProducer producer) && producer != null) { return producer.GetBinary(field); } return null; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override SortedDocValues GetSorted(FieldInfo field) { - DocValuesProducer producer; - if (fields.TryGetValue(field.Name, out producer) && producer != null) + if (fields.TryGetValue(field.Name, out DocValuesProducer producer) && producer != null) { return producer.GetSorted(field); } return null; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override SortedSetDocValues GetSortedSet(FieldInfo field) { - DocValuesProducer producer; - if (fields.TryGetValue(field.Name, out producer) && producer != null) + if (fields.TryGetValue(field.Name, out DocValuesProducer producer) && producer != null) { return producer.GetSortedSet(field); } return null; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override IBits GetDocsWithField(FieldInfo field) { - DocValuesProducer producer; - if (fields.TryGetValue(field.Name, out producer) && producer != null) + if (fields.TryGetValue(field.Name, out DocValuesProducer producer) && producer != null) { return producer.GetDocsWithField(field); } @@ -369,11 +375,13 @@ protected override void Dispose(bool disposing) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public object Clone() { return new FieldsReader(outerInstance, this); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long RamBytesUsed() { long size = 0; @@ -385,6 +393,7 @@ public override long RamBytesUsed() return size; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void CheckIntegrity() { foreach (DocValuesProducer format in formats.Values) @@ -394,6 +403,7 @@ public override void CheckIntegrity() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override sealed DocValuesProducer FieldsProducer(SegmentReadState state) { return new FieldsReader(this, state); diff --git a/src/Lucene.Net/Codecs/PerField/PerFieldPostingsFormat.cs b/src/Lucene.Net/Codecs/PerField/PerFieldPostingsFormat.cs index 64c046a482..def4773904 100644 --- a/src/Lucene.Net/Codecs/PerField/PerFieldPostingsFormat.cs +++ b/src/Lucene.Net/Codecs/PerField/PerFieldPostingsFormat.cs @@ -1,8 +1,8 @@ using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; -using System.Diagnostics; using System.Globalization; +using System.Runtime.CompilerServices; using JCG = J2N.Collections.Generic; namespace Lucene.Net.Codecs.PerField @@ -70,11 +70,12 @@ public abstract class PerFieldPostingsFormat : PostingsFormat /// /// Sole constructor. - public PerFieldPostingsFormat() + protected PerFieldPostingsFormat() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : base() { } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override sealed FieldsConsumer FieldsConsumer(SegmentWriteState state) { return new FieldsWriter(this, state); @@ -85,6 +86,7 @@ internal class FieldsConsumerAndSuffix : IDisposable internal FieldsConsumer Consumer { get; set; } internal int Suffix { get; set; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Dispose() { Consumer.Dispose(); @@ -120,8 +122,7 @@ public override TermsConsumer AddField(FieldInfo field) int? suffix; - FieldsConsumerAndSuffix consumer; - if (!formats.TryGetValue(format, out consumer) || consumer == null) + if (!formats.TryGetValue(format, out FieldsConsumerAndSuffix consumer) || consumer == null) { // First time we are seeing this format; create a new instance @@ -136,8 +137,8 @@ public override TermsConsumer AddField(FieldInfo field) } suffixes[formatName] = suffix; - string segmentSuffix = GetFullSegmentSuffix(field.Name, - segmentWriteState.SegmentSuffix, + string segmentSuffix = GetFullSegmentSuffix(field.Name, + segmentWriteState.SegmentSuffix, GetSuffix(formatName, Convert.ToString(suffix, CultureInfo.InvariantCulture))); consumer = new FieldsConsumerAndSuffix(); consumer.Consumer = format.FieldsConsumer(new SegmentWriteState(segmentWriteState, segmentSuffix)); @@ -162,6 +163,7 @@ public override TermsConsumer AddField(FieldInfo field) return consumer.Consumer.AddField(field); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Dispose(bool disposing) { if (disposing) @@ -172,11 +174,13 @@ protected override void Dispose(bool disposing) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static string GetSuffix(string formatName, string suffix) { return formatName + "_" + suffix; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static string GetFullSegmentSuffix(string fieldName, string outerSegmentSuffix, string segmentSuffix) { if (outerSegmentSuffix.Length == 0) @@ -194,16 +198,12 @@ internal static string GetFullSegmentSuffix(string fieldName, string outerSegmen private class FieldsReader : FieldsProducer { - private readonly PerFieldPostingsFormat outerInstance; - // LUCENENET specific: Use StringComparer.Ordinal to get the same ordering as Java internal readonly IDictionary fields = new JCG.SortedDictionary(StringComparer.Ordinal); internal readonly IDictionary formats = new Dictionary(); - public FieldsReader(PerFieldPostingsFormat outerInstance, SegmentReadState readState) + public FieldsReader(SegmentReadState readState) { - this.outerInstance = outerInstance; - // Read _X.per and init each format: bool success = false; try @@ -242,24 +242,26 @@ public FieldsReader(PerFieldPostingsFormat outerInstance, SegmentReadState readS } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override IEnumerator GetEnumerator() { return fields.Keys.GetEnumerator(); // LUCENENET NOTE: enumerators are not writable in .NET } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override Terms GetTerms(string field) { - FieldsProducer fieldsProducer; - if (fields.TryGetValue(field, out fieldsProducer) && fieldsProducer != null) + if (fields.TryGetValue(field, out FieldsProducer fieldsProducer) && fieldsProducer != null) { return fieldsProducer.GetTerms(field); } - + return null; } public override int Count => fields.Count; + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Dispose(bool disposing) { if (disposing) @@ -268,6 +270,7 @@ protected override void Dispose(bool disposing) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long RamBytesUsed() { long sizeInBytes = 0; @@ -279,6 +282,7 @@ public override long RamBytesUsed() return sizeInBytes; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void CheckIntegrity() { foreach (FieldsProducer producer in formats.Values) @@ -288,9 +292,10 @@ public override void CheckIntegrity() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override sealed FieldsProducer FieldsProducer(SegmentReadState state) { - return new FieldsReader(this, state); + return new FieldsReader(state); } /// diff --git a/src/Lucene.Net/Codecs/PostingsFormat.cs b/src/Lucene.Net/Codecs/PostingsFormat.cs index 2b4f352151..a315a2e133 100644 --- a/src/Lucene.Net/Codecs/PostingsFormat.cs +++ b/src/Lucene.Net/Codecs/PostingsFormat.cs @@ -149,9 +149,9 @@ public static ICollection AvailablePostingsFormats { get { - if (postingsFormatFactory is IServiceListable) + if (postingsFormatFactory is IServiceListable serviceListable) { - return ((IServiceListable)postingsFormatFactory).AvailableServices; + return serviceListable.AvailableServices; } else { diff --git a/src/Lucene.Net/Document/CompressionTools.cs b/src/Lucene.Net/Document/CompressionTools.cs index 5199ab7e72..87d10ada9b 100644 --- a/src/Lucene.Net/Document/CompressionTools.cs +++ b/src/Lucene.Net/Document/CompressionTools.cs @@ -27,12 +27,9 @@ namespace Lucene.Net.Documents /// this class uses the /// class to compress and decompress. /// - public class CompressionTools + public static class CompressionTools // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { // Export only static methods - private CompressionTools() - { - } /// /// Compresses the specified range using the @@ -45,7 +42,6 @@ public static byte[] Compress(byte[] value, int offset, int length, CompressionL { using (DeflateStream deflateStream = new DeflateStream(compressionMemoryStream, compressionLevel)) { - deflateStream.Write(value, offset, length); } resultArray = compressionMemoryStream.ToArray(); @@ -114,12 +110,10 @@ public static byte[] Decompress(byte[] value, int offset, int length) using (MemoryStream decompressedStream = new MemoryStream()) { - using (MemoryStream compressedStream = new MemoryStream(value)) + using (MemoryStream compressedStream = new MemoryStream(value, offset, length)) { - using (DeflateStream dStream = new DeflateStream(compressedStream, CompressionMode.Decompress)) - { - dStream.CopyTo(decompressedStream); - } + using DeflateStream dStream = new DeflateStream(compressedStream, CompressionMode.Decompress); + dStream.CopyTo(decompressedStream); } decompressedBytes = decompressedStream.ToArray(); } diff --git a/src/Lucene.Net/Document/DateTools.cs b/src/Lucene.Net/Document/DateTools.cs index 0851e4feb8..7dca3a8d7d 100644 --- a/src/Lucene.Net/Document/DateTools.cs +++ b/src/Lucene.Net/Document/DateTools.cs @@ -41,13 +41,13 @@ namespace Lucene.Net.Documents /// public static class DateTools { - private static readonly string YEAR_FORMAT = "yyyy"; - private static readonly string MONTH_FORMAT = "yyyyMM"; - private static readonly string DAY_FORMAT = "yyyyMMdd"; - private static readonly string HOUR_FORMAT = "yyyyMMddHH"; - private static readonly string MINUTE_FORMAT = "yyyyMMddHHmm"; - private static readonly string SECOND_FORMAT = "yyyyMMddHHmmss"; - private static readonly string MILLISECOND_FORMAT = "yyyyMMddHHmmssfff"; + private const string YEAR_FORMAT = "yyyy"; + private const string MONTH_FORMAT = "yyyyMM"; + private const string DAY_FORMAT = "yyyyMMdd"; + private const string HOUR_FORMAT = "yyyyMMddHH"; + private const string MINUTE_FORMAT = "yyyyMMddHHmm"; + private const string SECOND_FORMAT = "yyyyMMddHHmmss"; + private const string MILLISECOND_FORMAT = "yyyyMMddHHmmssfff"; // LUCENENET - not used //private static readonly System.Globalization.Calendar calInstance = new System.Globalization.GregorianCalendar(); diff --git a/src/Lucene.Net/Document/DerefBytesDocValuesField.cs b/src/Lucene.Net/Document/DerefBytesDocValuesField.cs index 4032ce9e72..c6c37448a8 100644 --- a/src/Lucene.Net/Document/DerefBytesDocValuesField.cs +++ b/src/Lucene.Net/Document/DerefBytesDocValuesField.cs @@ -63,7 +63,9 @@ public DerefBytesDocValuesField(string name, BytesRef bytes) /// binary content /// (ignored) /// if the field name is null +#pragma warning disable IDE0060 // Remove unused parameter public DerefBytesDocValuesField(string name, BytesRef bytes, bool isFixedLength) +#pragma warning restore IDE0060 // Remove unused parameter : base(name, bytes) { } diff --git a/src/Lucene.Net/Document/Field.cs b/src/Lucene.Net/Document/Field.cs index e1c015a011..a620ebc823 100644 --- a/src/Lucene.Net/Document/Field.cs +++ b/src/Lucene.Net/Document/Field.cs @@ -887,12 +887,6 @@ public virtual TokenStream GetTokenStream(Analyzer analyzer) internal sealed class StringTokenStream : TokenStream { - internal void InitializeInstanceFields() - { - termAttribute = AddAttribute(); - offsetAttribute = AddAttribute(); - } - internal ICharTermAttribute termAttribute; internal IOffsetAttribute offsetAttribute; internal bool used = false; @@ -905,7 +899,8 @@ internal void InitializeInstanceFields() /// internal StringTokenStream() { - InitializeInstanceFields(); + termAttribute = AddAttribute(); + offsetAttribute = AddAttribute(); } /// diff --git a/src/Lucene.Net/Document/SortedBytesDocValuesField.cs b/src/Lucene.Net/Document/SortedBytesDocValuesField.cs index 2252adad99..f6a199035e 100644 --- a/src/Lucene.Net/Document/SortedBytesDocValuesField.cs +++ b/src/Lucene.Net/Document/SortedBytesDocValuesField.cs @@ -66,7 +66,9 @@ public SortedBytesDocValuesField(string name, BytesRef bytes) /// binary content /// (ignored) /// if the field is null +#pragma warning disable IDE0060 // Remove unused parameter public SortedBytesDocValuesField(string name, BytesRef bytes, bool isFixedLength) +#pragma warning restore IDE0060 // Remove unused parameter : base(name, bytes) { } diff --git a/src/Lucene.Net/Document/StraightBytesDocValuesField.cs b/src/Lucene.Net/Document/StraightBytesDocValuesField.cs index a1db114477..9c61cdfeef 100644 --- a/src/Lucene.Net/Document/StraightBytesDocValuesField.cs +++ b/src/Lucene.Net/Document/StraightBytesDocValuesField.cs @@ -64,7 +64,9 @@ public StraightBytesDocValuesField(string name, BytesRef bytes) /// binary content /// (ignored) /// if the field is null +#pragma warning disable IDE0060 // Remove unused parameter public StraightBytesDocValuesField(string name, BytesRef bytes, bool isFixedLength) +#pragma warning restore IDE0060 // Remove unused parameter : base(name, bytes) { } diff --git a/src/Lucene.Net/Index/AtomicReader.cs b/src/Lucene.Net/Index/AtomicReader.cs index 1ecd1192b2..f0d5cbb681 100644 --- a/src/Lucene.Net/Index/AtomicReader.cs +++ b/src/Lucene.Net/Index/AtomicReader.cs @@ -49,12 +49,7 @@ namespace Lucene.Net.Index /// public abstract class AtomicReader : IndexReader { - private void InitializeInstanceFields() - { - readerContext = new AtomicReaderContext(this); - } - - private AtomicReaderContext readerContext; + private readonly AtomicReaderContext readerContext; // LUCENENET: marked readonly /// /// Sole constructor. (For invocation by subclass @@ -63,7 +58,7 @@ private void InitializeInstanceFields() protected AtomicReader() : base() { - InitializeInstanceFields(); + readerContext = new AtomicReaderContext(this); } public sealed override IndexReaderContext Context diff --git a/src/Lucene.Net/Index/BinaryDocValuesFieldUpdates.cs b/src/Lucene.Net/Index/BinaryDocValuesFieldUpdates.cs index 8e49c26242..28a8d8198f 100644 --- a/src/Lucene.Net/Index/BinaryDocValuesFieldUpdates.cs +++ b/src/Lucene.Net/Index/BinaryDocValuesFieldUpdates.cs @@ -121,7 +121,7 @@ public override void Reset() private FixedBitSet docsWithField; private PagedMutable docs; private PagedGrowableWriter offsets, lengths; - private BytesRef values; + private readonly BytesRef values; // LUCENENET: marked readonly private int size; public BinaryDocValuesFieldUpdates(string field, int maxDoc) @@ -178,23 +178,20 @@ public override DocValuesFieldUpdates.Iterator GetIterator() PagedGrowableWriter lengths = this.lengths; BytesRef values = this.values; FixedBitSet docsWithField = this.docsWithField; - new InPlaceMergeSorterAnonymousInnerClassHelper(this, docs, offsets, lengths, docsWithField).Sort(0, size); + new InPlaceMergeSorterAnonymousInnerClassHelper(docs, offsets, lengths, docsWithField).Sort(0, size); return new Iterator(size, offsets, lengths, docs, values, docsWithField); } private class InPlaceMergeSorterAnonymousInnerClassHelper : InPlaceMergeSorter { - private readonly BinaryDocValuesFieldUpdates outerInstance; - - private PagedMutable docs; - private PagedGrowableWriter offsets; - private PagedGrowableWriter lengths; - private FixedBitSet docsWithField; + private readonly PagedMutable docs; + private readonly PagedGrowableWriter offsets; + private readonly PagedGrowableWriter lengths; + private readonly FixedBitSet docsWithField; - public InPlaceMergeSorterAnonymousInnerClassHelper(BinaryDocValuesFieldUpdates outerInstance, PagedMutable docs, PagedGrowableWriter offsets, PagedGrowableWriter lengths, FixedBitSet docsWithField) + public InPlaceMergeSorterAnonymousInnerClassHelper(PagedMutable docs, PagedGrowableWriter offsets, PagedGrowableWriter lengths, FixedBitSet docsWithField) { - this.outerInstance = outerInstance; this.docs = docs; this.offsets = offsets; this.lengths = lengths; diff --git a/src/Lucene.Net/Index/BufferedUpdates.cs b/src/Lucene.Net/Index/BufferedUpdates.cs index 659c40ac05..191ddfdc49 100644 --- a/src/Lucene.Net/Index/BufferedUpdates.cs +++ b/src/Lucene.Net/Index/BufferedUpdates.cs @@ -151,7 +151,9 @@ load factor (say 2 * POINTER). Entry is object w/ internal readonly AtomicInt64 bytesUsed; - private static bool VERBOSE_DELETES = false; +#pragma warning disable CA1802 // Use literals where appropriate + private static readonly bool VERBOSE_DELETES = false; +#pragma warning restore CA1802 // Use literals where appropriate internal long gen; @@ -203,8 +205,7 @@ public override string ToString() public virtual void AddQuery(Query query, int docIDUpto) { - int? prev; - queries.TryGetValue(query, out prev); + queries.TryGetValue(query, out int? prev); queries[query] = docIDUpto; // increment bytes used only if the query wasn't added so far. if (prev == null) @@ -221,8 +222,7 @@ public virtual void AddDocID(int docID) public virtual void AddTerm(Term term, int docIDUpto) { - int? current; - terms.TryGetValue(term, out current); + terms.TryGetValue(term, out int? current); if (current != null && docIDUpto < current) { // Only record the new number if it's greater than the diff --git a/src/Lucene.Net/Index/CheckIndex.cs b/src/Lucene.Net/Index/CheckIndex.cs index 87da6d812f..29722fcf06 100644 --- a/src/Lucene.Net/Index/CheckIndex.cs +++ b/src/Lucene.Net/Index/CheckIndex.cs @@ -64,7 +64,7 @@ namespace Lucene.Net.Index public class CheckIndex { private TextWriter infoStream; - private Directory dir; + private readonly Directory dir; // LUCENENET: marked readonly /// /// Returned from detailing the health and status of the index. @@ -563,7 +563,7 @@ public virtual Status DoCheckIndex(IList onlySegments) int numSegments = sis.Count; string segmentsFileName = sis.GetSegmentsFileName(); // note: we only read the format byte (required preamble) here! - IndexInput input = null; + IndexInput input/* = null*/; // LUCENENET: IDE0059: Remove unnecessary value assignment try { input = dir.OpenInput(segmentsFileName, IOContext.READ_ONCE); @@ -582,7 +582,7 @@ public virtual Status DoCheckIndex(IList onlySegments) result.CantOpenSegments = true; return result; } - int format = 0; + int format/* = 0*/; // LUCENENET: IDE0059: Remove unnecessary value assignment try { format = input.ReadInt32(); @@ -625,7 +625,7 @@ public virtual Status DoCheckIndex(IList onlySegments) userDataString = ""; } - string versionString = null; + string versionString/* = null*/; // LUCENENET: IDE0059: Remove unnecessary value assignment if (oldSegs != null) { if (foundNonNullVersion) @@ -951,7 +951,7 @@ public static Status.FieldNormStatus TestFieldNorms(AtomicReader reader, TextWri #pragma warning disable 612, 618 if (Debugging.AssertsEnabled) Debugging.Assert(reader.HasNorms(info.Name)); // deprecated path #pragma warning restore 612, 618 - CheckNorms(info, reader, infoStream); + CheckNorms(info, reader /*, infoStream // LUCENENET: Not used */); ++status.TotFields; } else @@ -1452,9 +1452,9 @@ private static Status.TermIndexStatus CheckFields(Fields fields, IBits liveDocs, } else { - if (fieldTerms is BlockTreeTermsReader.FieldReader) + if (fieldTerms is BlockTreeTermsReader.FieldReader fieldReader) { - BlockTreeTermsReader.Stats stats = ((BlockTreeTermsReader.FieldReader)fieldTerms).ComputeStats(); + BlockTreeTermsReader.Stats stats = fieldReader.ComputeStats(); if (Debugging.AssertsEnabled) Debugging.Assert(stats != null); if (status.BlockTreeStats == null) { @@ -1896,9 +1896,9 @@ private static void CheckSortedSetDocValues(string fieldName, AtomicReader reade { throw new Exception("ord out of bounds: " + ord); } - if (dv is RandomAccessOrds) + if (dv is RandomAccessOrds randomAccessOrds2) { - long ord2 = ((RandomAccessOrds)dv).OrdAt(ordCount); + long ord2 = randomAccessOrds2.OrdAt(ordCount); if (ord != ord2) { throw new Exception("ordAt(" + ordCount + ") inconsistent, expected=" + ord + ",got=" + ord2 + " for doc: " + i); @@ -1913,9 +1913,9 @@ private static void CheckSortedSetDocValues(string fieldName, AtomicReader reade { throw new Exception("dv for field: " + fieldName + " has no ordinals but is not marked missing for doc: " + i); } - if (dv is RandomAccessOrds) + if (dv is RandomAccessOrds randomAccessOrds) { - long ordCount2 = ((RandomAccessOrds)dv).Cardinality(); + long ordCount2 = randomAccessOrds.Cardinality(); if (ordCount != ordCount2) { throw new Exception("cardinality inconsistent, expected=" + ordCount + ",got=" + ordCount2 + " for doc: " + i); @@ -1929,9 +1929,9 @@ private static void CheckSortedSetDocValues(string fieldName, AtomicReader reade { throw new Exception("dv for field: " + fieldName + " is marked missing but has ord=" + o + " for doc: " + i); } - if (dv is RandomAccessOrds) + if (dv is RandomAccessOrds randomAccessOrds) { - long ordCount2 = ((RandomAccessOrds)dv).Cardinality(); + long ordCount2 = randomAccessOrds.Cardinality(); if (ordCount2 != 0) { throw new Exception("dv for field: " + fieldName + " is marked missing but has cardinality " + ordCount2 + " for doc: " + i); @@ -2031,7 +2031,7 @@ private static void CheckDocValues(FieldInfo fi, AtomicReader reader, /*StreamWr } } - private static void CheckNorms(FieldInfo fi, AtomicReader reader, TextWriter infoStream) + private static void CheckNorms(FieldInfo fi, AtomicReader reader /*, TextWriter infoStream // LUCENENET: Not used */) { switch (fi.NormType) { @@ -2497,7 +2497,7 @@ public static void Main(string[] args) } Console.WriteLine("\nOpening index @ " + indexPath + "\n"); - Directory dir = null; + Directory dir/* = null*/; // LUCENENET: IDE0059: Remove unnecessary value assignment try { if (dirImpl == null) diff --git a/src/Lucene.Net/Index/CompositeReaderContext.cs b/src/Lucene.Net/Index/CompositeReaderContext.cs index f207ac037a..f0f28ab56a 100644 --- a/src/Lucene.Net/Index/CompositeReaderContext.cs +++ b/src/Lucene.Net/Index/CompositeReaderContext.cs @@ -97,8 +97,7 @@ public CompositeReaderContext Build() internal IndexReaderContext Build(CompositeReaderContext parent, IndexReader reader, int ord, int docBase) { - var ar = reader as AtomicReader; - if (ar != null) + if (reader is AtomicReader ar) { var atomic = new AtomicReaderContext(parent, ar, ord, docBase, leaves.Count, leafDocBase); leaves.Add(atomic); diff --git a/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs b/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs index be019b0530..6a48765f97 100644 --- a/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs +++ b/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs @@ -606,7 +606,7 @@ public virtual void SetThreadPriority(ThreadPriority priority) // Strangely, Sun's JDK 1.5 on Linux sometimes // throws NPE out of here... } - catch (SecurityException se) + catch (SecurityException /*se*/) // LUCENENET: IDE0059: Remove unnecessary value assignment #pragma warning restore 168 { // Ignore this because we will still run fine with diff --git a/src/Lucene.Net/Index/DirectoryReader.cs b/src/Lucene.Net/Index/DirectoryReader.cs index 9e013f18ff..9de6f4f819 100644 --- a/src/Lucene.Net/Index/DirectoryReader.cs +++ b/src/Lucene.Net/Index/DirectoryReader.cs @@ -365,9 +365,7 @@ public static bool IndexExists(Directory directory) { files = directory.ListAll(); } -#pragma warning disable 168 - catch (DirectoryNotFoundException nsde) -#pragma warning restore 168 + catch (DirectoryNotFoundException /*nsde*/) // LUCENENET: IDE0059: Remove unnecessary value assignment { // Directory does not exist --> no index exists return false; diff --git a/src/Lucene.Net/Index/DocFieldProcessor.cs b/src/Lucene.Net/Index/DocFieldProcessor.cs index ea2decda80..5d4377db8b 100644 --- a/src/Lucene.Net/Index/DocFieldProcessor.cs +++ b/src/Lucene.Net/Index/DocFieldProcessor.cs @@ -145,9 +145,9 @@ public override void Abort() // If any errors occured, throw it. if (th != null) { - if (th is Exception) + if (th is Exception e) { - throw (Exception)th; + throw e; } // defensive code - we should not hit unchecked exceptions throw new Exception(th.Message, th); diff --git a/src/Lucene.Net/Index/DocTermOrds.cs b/src/Lucene.Net/Index/DocTermOrds.cs index 70403c3a99..b6fea05604 100644 --- a/src/Lucene.Net/Index/DocTermOrds.cs +++ b/src/Lucene.Net/Index/DocTermOrds.cs @@ -112,15 +112,15 @@ public class DocTermOrds /// Term ords are shifted by this, internally, to reserve /// values 0 (end term) and 1 (index is a pointer into byte array) /// - private static readonly int TNUM_OFFSET = 2; + private const int TNUM_OFFSET = 2; /// /// Every 128th term is indexed, by default. - public static readonly int DEFAULT_INDEX_INTERVAL_BITS = 7; // decrease to a low number like 2 for testing + public const int DEFAULT_INDEX_INTERVAL_BITS = 7; // decrease to a low number like 2 for testing - private int indexIntervalBits; - private int indexIntervalMask; - private int indexInterval; + private readonly int indexIntervalBits; // LUCENENET: marked readonly + private readonly int indexIntervalMask; // LUCENENET: marked readonly + private readonly int indexInterval; // LUCENENET: marked readonly /// /// Don't uninvert terms that exceed this count. @@ -411,9 +411,7 @@ protected virtual void Uninvert(AtomicReader reader, IBits liveDocs, BytesRef te m_ordBase = (int)te.Ord; //System.out.println("got ordBase=" + ordBase); } -#pragma warning disable 168 - catch (NotSupportedException uoe) -#pragma warning restore 168 + catch (NotSupportedException) // LUCENENET: IDE0059: Remove unnecessary value assignment { // Reader cannot provide ord support, so we wrap // our own support by creating our own terms index: @@ -736,11 +734,6 @@ private static int WriteInt32(int x, sbyte[] arr, int pos) /// private sealed class OrdWrappedTermsEnum : TermsEnum { - internal void InitializeInstanceFields() - { - ord = -outerInstance.indexInterval - 1; - } - private readonly DocTermOrds outerInstance; internal readonly TermsEnum termsEnum; @@ -751,7 +744,7 @@ public OrdWrappedTermsEnum(DocTermOrds outerInstance, AtomicReader reader) { this.outerInstance = outerInstance; - InitializeInstanceFields(); + ord = -outerInstance.indexInterval - 1; if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.m_indexedTermsArray != null); termsEnum = reader.Fields.GetTerms(outerInstance.m_field).GetEnumerator(); } @@ -1070,7 +1063,7 @@ public override void SetDocument(int docID) public override void LookupOrd(long ord, BytesRef result) { - BytesRef @ref = null; + BytesRef @ref/* = null*/; // LUCENENET: IDE0059: Remove unnecessary value assignment try { @ref = outerInstance.LookupTerm(te, (int)ord); diff --git a/src/Lucene.Net/Index/DocValues.cs b/src/Lucene.Net/Index/DocValues.cs index cb43dadbb7..1cf287ecbe 100644 --- a/src/Lucene.Net/Index/DocValues.cs +++ b/src/Lucene.Net/Index/DocValues.cs @@ -147,9 +147,9 @@ public static SortedSetDocValues Singleton(SortedDocValues dv) /// public static SortedDocValues UnwrapSingleton(SortedSetDocValues dv) { - if (dv is SingletonSortedSetDocValues) + if (dv is SingletonSortedSetDocValues singletonSortedSetDocValues) { - return ((SingletonSortedSetDocValues)dv).SortedDocValues; + return singletonSortedSetDocValues.SortedDocValues; } else { @@ -167,10 +167,10 @@ public static IBits DocsWithValue(SortedDocValues dv, int maxDoc) private class BitsAnonymousInnerClassHelper : IBits { - private Lucene.Net.Index.SortedDocValues dv; - private int maxDoc; + private readonly SortedDocValues dv; + private readonly int maxDoc; - public BitsAnonymousInnerClassHelper(Lucene.Net.Index.SortedDocValues dv, int maxDoc) + public BitsAnonymousInnerClassHelper(SortedDocValues dv, int maxDoc) { this.dv = dv; this.maxDoc = maxDoc; @@ -194,10 +194,10 @@ public static IBits DocsWithValue(SortedSetDocValues dv, int maxDoc) private class BitsAnonymousInnerClassHelper2 : IBits { - private Lucene.Net.Index.SortedSetDocValues dv; - private int maxDoc; + private readonly SortedSetDocValues dv; + private readonly int maxDoc; - public BitsAnonymousInnerClassHelper2(Lucene.Net.Index.SortedSetDocValues dv, int maxDoc) + public BitsAnonymousInnerClassHelper2(SortedSetDocValues dv, int maxDoc) { this.dv = dv; this.maxDoc = maxDoc; diff --git a/src/Lucene.Net/Index/DocValuesProcessor.cs b/src/Lucene.Net/Index/DocValuesProcessor.cs index 38c0f4e393..3b71a8aeb7 100644 --- a/src/Lucene.Net/Index/DocValuesProcessor.cs +++ b/src/Lucene.Net/Index/DocValuesProcessor.cs @@ -125,89 +125,81 @@ public override void Flush(SegmentWriteState state) internal void AddBinaryField(FieldInfo fieldInfo, int docID, BytesRef value) { - DocValuesWriter writer; - writers.TryGetValue(fieldInfo.Name, out writer); BinaryDocValuesWriter binaryWriter; - if (writer == null) + if (!writers.TryGetValue(fieldInfo.Name, out DocValuesWriter writer) || writer is null) { binaryWriter = new BinaryDocValuesWriter(fieldInfo, bytesUsed); writers[fieldInfo.Name] = binaryWriter; } - else if (!(writer is BinaryDocValuesWriter)) + else if (writer is BinaryDocValuesWriter temp) { - throw new ArgumentException("Incompatible DocValues type: field \"" + fieldInfo.Name + "\" changed from " + GetTypeDesc(writer) + " to binary"); + binaryWriter = temp; } else { - binaryWriter = (BinaryDocValuesWriter)writer; + throw new ArgumentException($"Incompatible DocValues type: field \"{fieldInfo.Name}\" changed from {GetTypeDesc(writer)} to binary"); } binaryWriter.AddValue(docID, value); } internal void AddSortedField(FieldInfo fieldInfo, int docID, BytesRef value) { - DocValuesWriter writer; - writers.TryGetValue(fieldInfo.Name, out writer); SortedDocValuesWriter sortedWriter; - if (writer == null) + if (!writers.TryGetValue(fieldInfo.Name, out DocValuesWriter writer) || writer is null) { sortedWriter = new SortedDocValuesWriter(fieldInfo, bytesUsed); writers[fieldInfo.Name] = sortedWriter; } - else if (!(writer is SortedDocValuesWriter)) + else if (writer is SortedDocValuesWriter temp) { - throw new ArgumentException("Incompatible DocValues type: field \"" + fieldInfo.Name + "\" changed from " + GetTypeDesc(writer) + " to sorted"); + sortedWriter = temp; } else { - sortedWriter = (SortedDocValuesWriter)writer; + throw new ArgumentException($"Incompatible DocValues type: field \"{fieldInfo.Name}\" changed from {GetTypeDesc(writer)} to sorted"); } sortedWriter.AddValue(docID, value); } internal void AddSortedSetField(FieldInfo fieldInfo, int docID, BytesRef value) { - DocValuesWriter writer; - writers.TryGetValue(fieldInfo.Name, out writer); SortedSetDocValuesWriter sortedSetWriter; - if (writer == null) + if (!writers.TryGetValue(fieldInfo.Name, out DocValuesWriter writer) || writer is null) { sortedSetWriter = new SortedSetDocValuesWriter(fieldInfo, bytesUsed); writers[fieldInfo.Name] = sortedSetWriter; } - else if (!(writer is SortedSetDocValuesWriter)) + else if (writer is SortedSetDocValuesWriter temp) { - throw new ArgumentException("Incompatible DocValues type: field \"" + fieldInfo.Name + "\" changed from " + GetTypeDesc(writer) + " to sorted"); + sortedSetWriter = temp; } else { - sortedSetWriter = (SortedSetDocValuesWriter)writer; + throw new ArgumentException($"Incompatible DocValues type: field \"{fieldInfo.Name}\" changed from {GetTypeDesc(writer)} to sorted"); } sortedSetWriter.AddValue(docID, value); } internal void AddNumericField(FieldInfo fieldInfo, int docID, long value) { - DocValuesWriter writer; - writers.TryGetValue(fieldInfo.Name, out writer); NumericDocValuesWriter numericWriter; - if (writer == null) + if (!writers.TryGetValue(fieldInfo.Name, out DocValuesWriter writer) || writer is null) { numericWriter = new NumericDocValuesWriter(fieldInfo, bytesUsed, true); writers[fieldInfo.Name] = numericWriter; } - else if (!(writer is NumericDocValuesWriter)) + else if (writer is NumericDocValuesWriter temp) { - throw new ArgumentException("Incompatible DocValues type: field \"" + fieldInfo.Name + "\" changed from " + GetTypeDesc(writer) + " to numeric"); + numericWriter = temp; } else { - numericWriter = (NumericDocValuesWriter)writer; + throw new ArgumentException($"Incompatible DocValues type: field \"{fieldInfo.Name}\" changed from {GetTypeDesc(writer)} to numeric"); } numericWriter.AddValue(docID, value); } - private string GetTypeDesc(DocValuesWriter obj) + private static string GetTypeDesc(DocValuesWriter obj) // LUCENENET specific - made static { if (obj is BinaryDocValuesWriter) { @@ -233,10 +225,9 @@ public override void Abort() { writer.Abort(); } -#pragma warning disable 168 - catch (Exception t) -#pragma warning restore 168 + catch (Exception) // LUCENENET: IDE0059: Remove unnecessary value assignment { + // ignore } } writers.Clear(); diff --git a/src/Lucene.Net/Index/DocValuesUpdate.cs b/src/Lucene.Net/Index/DocValuesUpdate.cs index bd2494a181..b967d48cac 100644 --- a/src/Lucene.Net/Index/DocValuesUpdate.cs +++ b/src/Lucene.Net/Index/DocValuesUpdate.cs @@ -84,7 +84,7 @@ public sealed class BinaryDocValuesUpdate : DocValuesUpdate internal static readonly BytesRef MISSING = new BytesRef(); internal BinaryDocValuesUpdate(Term term, string field, BytesRef value) - : base(DocValuesFieldUpdatesType.BINARY, term, field, value == null ? MISSING : value) + : base(DocValuesFieldUpdatesType.BINARY, term, field, value ?? MISSING) { } diff --git a/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs b/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs index cdb4688d3e..cc163151a2 100644 --- a/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs +++ b/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs @@ -274,7 +274,7 @@ internal void DoAfterFlush(DocumentsWriterPerThread dwpt) long? bytes = flushingWriters[dwpt]; flushingWriters.Remove(dwpt); flushBytes -= (long)bytes; - perThreadPool.Recycle(dwpt); + //perThreadPool.Recycle(dwpt); // LUCENENET: This is a no-op method in Lucene and it cannot be overridden if (Debugging.AssertsEnabled) Debugging.Assert(AssertMemory()); } finally @@ -367,7 +367,7 @@ internal void DoOnAbort(ThreadState state) } if (Debugging.AssertsEnabled) Debugging.Assert(AssertMemory()); // Take it out of the loop this DWPT is stale - perThreadPool.Reset(state, closed); + DocumentsWriterPerThreadPool.Reset(state, closed); // LUCENENET specific - made static per CA1822 } finally { @@ -397,7 +397,7 @@ private void CheckoutAndBlock(ThreadState perThread) } DocumentsWriterPerThread dwpt; long bytes = perThread.bytesUsed; - dwpt = perThreadPool.Reset(perThread, closed); + dwpt = DocumentsWriterPerThreadPool.Reset(perThread, closed); // LUCENENET specific - made method static per CA1822 numPending--; blockedFlushes.AddLast(new BlockedFlush(dwpt, bytes)); } @@ -427,7 +427,7 @@ private DocumentsWriterPerThread InternalTryCheckOutForFlush(ThreadState perThre DocumentsWriterPerThread dwpt; long bytes = perThread.bytesUsed; // do that before // replace! - dwpt = perThreadPool.Reset(perThread, closed); + dwpt = DocumentsWriterPerThreadPool.Reset(perThread, closed); // LUCENENET specific - made method static per CA1822 if (Debugging.AssertsEnabled) Debugging.Assert(!flushingWriters.ContainsKey(dwpt), "DWPT is already flushing"); // Record the flushing DWPT to reduce flushBytes in doAfterFlush flushingWriters[dwpt] = bytes; @@ -518,7 +518,7 @@ private class IteratorAnonymousInnerClassHelper : IEnumerator { private readonly DocumentsWriterFlushControl outerInstance; private ThreadState current; - private int upto; + private readonly int upto; private int i; public IteratorAnonymousInnerClassHelper(DocumentsWriterFlushControl outerInstance, int upto) @@ -594,7 +594,7 @@ public void SetApplyAllDeletes() internal ThreadState ObtainAndLock() { - ThreadState perThread = perThreadPool.GetAndLock(Thread.CurrentThread, documentsWriter); + ThreadState perThread = perThreadPool.GetAndLock(/* Thread.CurrentThread, documentsWriter // LUCENENET: Not used */); bool success = false; try { @@ -646,7 +646,7 @@ internal void MarkForFullFlush() { if (closed && next.IsActive) { - perThreadPool.DeactivateThreadState(next); + DocumentsWriterPerThreadPool.DeactivateThreadState(next); // LUCENENET specific - made method static per CA1822 } continue; } @@ -739,7 +739,7 @@ internal void AddFlushableState(ThreadState perThread) } else { - perThreadPool.Reset(perThread, closed); // make this state inactive + DocumentsWriterPerThreadPool.Reset(perThread, closed); // make this state inactive // LUCENENET specific - made method static per CA1822 } } diff --git a/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs b/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs index 0b7b1eba20..1c5dd96556 100644 --- a/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs +++ b/src/Lucene.Net/Index/DocumentsWriterFlushQueue.cs @@ -242,7 +242,7 @@ protected FlushTicket(FrozenBufferedUpdates frozenUpdates) /// publishing operation is synced on IW -> BDS so that the 's /// delete generation is always () + 1 /// - protected void PublishFlushedSegment(IndexWriter indexWriter, FlushedSegment newSegment, FrozenBufferedUpdates globalPacket) + protected static void PublishFlushedSegment(IndexWriter indexWriter, FlushedSegment newSegment, FrozenBufferedUpdates globalPacket) // LUCENENET: CA1822: Mark members as static { if (Debugging.AssertsEnabled) { diff --git a/src/Lucene.Net/Index/DocumentsWriterPerThread.cs b/src/Lucene.Net/Index/DocumentsWriterPerThread.cs index 189a28decd..bea4cb531f 100644 --- a/src/Lucene.Net/Index/DocumentsWriterPerThread.cs +++ b/src/Lucene.Net/Index/DocumentsWriterPerThread.cs @@ -175,10 +175,9 @@ internal virtual void Abort(ISet createdFiles) { consumer.Abort(); } -#pragma warning disable 168 - catch (Exception t) -#pragma warning restore 168 + catch (Exception) // LUCENENET: IDE0059: Remove unnecessary value assignment { + // ignore } pendingUpdates.Clear(); @@ -211,7 +210,7 @@ internal virtual void Abort(ISet createdFiles) internal bool aborting = false; // True if an abort is pending internal bool hasAborted = false; // True if the last exception throws by #updateDocument was aborting - private FieldInfos.Builder fieldInfos; + private readonly FieldInfos.Builder fieldInfos; // LUCENENET: marked readonly private readonly InfoStream infoStream; private int numDocsInRAM; internal readonly DocumentsWriterDeleteQueue deleteQueue; diff --git a/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs b/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs index 60a34f1cb6..d20e69fca0 100644 --- a/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs +++ b/src/Lucene.Net/Index/DocumentsWriterPerThreadPool.cs @@ -3,6 +3,7 @@ using Lucene.Net.Support.Threading; using System; using System.Diagnostics; +using System.Runtime.CompilerServices; using System.Threading; namespace Lucene.Net.Index @@ -118,7 +119,7 @@ internal bool IsInitialized return IsActive && dwpt != null; } } - + /// /// Returns the number of currently active bytes in this ThreadState's @@ -220,13 +221,13 @@ public ThreadState NewThreadState() { // unreleased thread states are deactivated during DW#close() numThreadStatesActive++; // increment will publish the ThreadState - //System.out.println("activeCount=" + numThreadStatesActive); - if (Debugging.AssertsEnabled) Debugging.Assert(threadState.dwpt == null); + //System.out.println("activeCount=" + numThreadStatesActive); + if (Debugging.AssertsEnabled) Debugging.Assert(threadState.dwpt == null); unlock = false; return threadState; } // we are closed: unlock since the threadstate is not active anymore - if (Debugging.AssertsEnabled) Debugging.Assert(AssertUnreleasedThreadStatesInactive()); + if (Debugging.AssertsEnabled) Debugging.Assert(AssertUnreleasedThreadStatesInactive()); return null; } finally @@ -282,7 +283,7 @@ internal void DeactivateUnreleasedStates() } } - internal DocumentsWriterPerThread Reset(ThreadState threadState, bool closed) + internal static DocumentsWriterPerThread Reset(ThreadState threadState, bool closed) // LUCENENET: CA1822: Mark members as static { if (Debugging.AssertsEnabled) Debugging.Assert(threadState.IsHeldByCurrentThread); DocumentsWriterPerThread dwpt = threadState.dwpt; @@ -297,14 +298,15 @@ internal DocumentsWriterPerThread Reset(ThreadState threadState, bool closed) return dwpt; } - internal void Recycle(DocumentsWriterPerThread dwpt) - { - // don't recycle DWPT by default - } + // LUCENENET: Called in one place, but since there is no implementation it is just wasted CPU + //internal void Recycle(DocumentsWriterPerThread dwpt) + //{ + // // don't recycle DWPT by default + //} // you cannot subclass this without being in o.a.l.index package anyway, so // the class is already pkg-private... fix me: see LUCENE-4013 - public ThreadState GetAndLock(Thread requestingThread, DocumentsWriter documentsWriter) + public ThreadState GetAndLock(/* Thread requestingThread, DocumentsWriter documentsWriter // LUCENENET: Not referenced */) { ThreadState threadState = null; lock (this) @@ -383,6 +385,7 @@ public void Release(ThreadState state) /// the ordinal of the /// the ith active where i is the /// given ord. + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal ThreadState GetThreadState(int ord) { return threadStates[ord]; @@ -441,7 +444,8 @@ internal int NumDeactivatedThreadStates() /// if the parent is closed or aborted. /// /// the state to deactivate - internal void DeactivateThreadState(ThreadState threadState) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + internal static void DeactivateThreadState(ThreadState threadState) // LUCENENET: CA1822: Mark members as static { if (Debugging.AssertsEnabled) Debugging.Assert(threadState.IsActive); threadState.Deactivate(); diff --git a/src/Lucene.Net/Index/FieldInfo.cs b/src/Lucene.Net/Index/FieldInfo.cs index 5830693d9d..2d7c3a685e 100644 --- a/src/Lucene.Net/Index/FieldInfo.cs +++ b/src/Lucene.Net/Index/FieldInfo.cs @@ -269,8 +269,7 @@ public string GetAttribute(string key) } else { - string ret; - attributes.TryGetValue(key, out ret); + attributes.TryGetValue(key, out string ret); return ret; } } @@ -292,9 +291,8 @@ public string PutAttribute(string key, string value) attributes = new Dictionary(); } - string ret; // The key was not previously assigned, null will be returned - if (!attributes.TryGetValue(key, out ret)) + if (!attributes.TryGetValue(key, out string ret)) { ret = null; } diff --git a/src/Lucene.Net/Index/FieldInfos.cs b/src/Lucene.Net/Index/FieldInfos.cs index c3ec781902..7ff08ad118 100644 --- a/src/Lucene.Net/Index/FieldInfos.cs +++ b/src/Lucene.Net/Index/FieldInfos.cs @@ -64,9 +64,7 @@ public FieldInfos(FieldInfo[] infos) throw new ArgumentException("illegal field number: " + info.Number + " for field " + info.Name); } - FieldInfo previous; - - if (byNumber.TryGetValue(info.Number, out previous)) + if (byNumber.TryGetValue(info.Number, out FieldInfo previous)) { throw new ArgumentException("duplicate field numbers: " + previous.Name + " and " + info.Name + " have: " + info.Number); } @@ -163,8 +161,7 @@ IEnumerator IEnumerable.GetEnumerator() /// doesn't exist. public virtual FieldInfo FieldInfo(string fieldName) { - FieldInfo ret; - byName.TryGetValue(fieldName, out ret); + byName.TryGetValue(fieldName, out FieldInfo ret); return ret; } @@ -180,8 +177,7 @@ public virtual FieldInfo FieldInfo(int fieldNumber) { throw new ArgumentException("Illegal field number: " + fieldNumber); } - Index.FieldInfo ret; - byNumber.TryGetValue(fieldNumber, out ret); + byNumber.TryGetValue(fieldNumber, out FieldInfo ret); return ret; } @@ -219,9 +215,7 @@ internal int AddOrGet(string fieldName, int preferredFieldNumber, DocValuesType { if (dvType != DocValuesType.NONE) { - DocValuesType currentDVType; - docValuesType.TryGetValue(fieldName, out currentDVType); - if (currentDVType == DocValuesType.NONE) // default value in .NET (value type 0) + if (!docValuesType.TryGetValue(fieldName, out DocValuesType currentDVType) || currentDVType == DocValuesType.NONE) // default value in .NET (value type 0) { docValuesType[fieldName] = dvType; } @@ -230,9 +224,7 @@ internal int AddOrGet(string fieldName, int preferredFieldNumber, DocValuesType throw new ArgumentException("cannot change DocValues type from " + currentDVType + " to " + dvType + " for field \"" + fieldName + "\""); } } - int? fieldNumber; - nameToNumber.TryGetValue(fieldName, out fieldNumber); - if (fieldNumber == null) + if (!nameToNumber.TryGetValue(fieldName, out int? fieldNumber) || fieldNumber == null) { int? preferredBoxed = preferredFieldNumber; @@ -264,17 +256,13 @@ internal bool ContainsConsistent(int? number, string name, DocValuesType dvType) { lock (this) { - string numberToNameStr; - int? nameToNumberVal; - DocValuesType docValuesType_E; - - numberToName.TryGetValue(number, out numberToNameStr); - nameToNumber.TryGetValue(name, out nameToNumberVal); - docValuesType.TryGetValue(name, out docValuesType_E); + numberToName.TryGetValue(number, out string numberToNameStr); + nameToNumber.TryGetValue(name, out int? nameToNumberVal); + this.docValuesType.TryGetValue(name, out DocValuesType docValuesType); return name.Equals(numberToNameStr, StringComparison.Ordinal) && number.Equals(nameToNumber[name]) && - (dvType == DocValuesType.NONE || docValuesType_E == DocValuesType.NONE || dvType == docValuesType_E); + (dvType == DocValuesType.NONE || docValuesType == DocValuesType.NONE || dvType == docValuesType); } } @@ -294,8 +282,7 @@ internal bool Contains(string fieldName, DocValuesType dvType) else { // only return true if the field has the same dvType as the requested one - DocValuesType dvCand; - docValuesType.TryGetValue(fieldName, out dvCand); + docValuesType.TryGetValue(fieldName, out DocValuesType dvCand); // LUCENENET NOTE: This could be NONE even if TryGetValue returns false return dvType == dvCand; } } diff --git a/src/Lucene.Net/Index/FilterAtomicReader.cs b/src/Lucene.Net/Index/FilterAtomicReader.cs index 11795f5fca..db9e359561 100644 --- a/src/Lucene.Net/Index/FilterAtomicReader.cs +++ b/src/Lucene.Net/Index/FilterAtomicReader.cs @@ -52,9 +52,9 @@ public class FilterAtomicReader : AtomicReader /// public static AtomicReader Unwrap(AtomicReader reader) { - while (reader is FilterAtomicReader) + while (reader is FilterAtomicReader filterAtomicReader) { - reader = ((FilterAtomicReader)reader).m_input; + reader = filterAtomicReader.m_input; } return reader; } diff --git a/src/Lucene.Net/Index/FilterDirectoryReader.cs b/src/Lucene.Net/Index/FilterDirectoryReader.cs index 014630e248..600ff259e2 100644 --- a/src/Lucene.Net/Index/FilterDirectoryReader.cs +++ b/src/Lucene.Net/Index/FilterDirectoryReader.cs @@ -52,7 +52,7 @@ internal virtual AtomicReader[] Wrap(IList readers) // LUCENENET sp /// /// Constructor - public SubReaderWrapper() + protected SubReaderWrapper() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { } @@ -88,7 +88,7 @@ public override AtomicReader Wrap(AtomicReader reader) /// /// Create a new that filters a passed in . /// the to filter - public FilterDirectoryReader(DirectoryReader input) + protected FilterDirectoryReader(DirectoryReader input) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : this(input, new StandardReaderWrapper()) { } @@ -98,7 +98,7 @@ public FilterDirectoryReader(DirectoryReader input) /// using the supplied to wrap its subreader. /// the to filter /// the to use to wrap subreaders - public FilterDirectoryReader(DirectoryReader input, SubReaderWrapper wrapper) + protected FilterDirectoryReader(DirectoryReader input, SubReaderWrapper wrapper) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : base(input.Directory, wrapper.Wrap(input.GetSequentialSubReaders())) { this.m_input = input; diff --git a/src/Lucene.Net/Index/FilteredTermsEnum.cs b/src/Lucene.Net/Index/FilteredTermsEnum.cs index 9e4548c5a9..f41fc441b5 100644 --- a/src/Lucene.Net/Index/FilteredTermsEnum.cs +++ b/src/Lucene.Net/Index/FilteredTermsEnum.cs @@ -86,7 +86,7 @@ protected internal enum AcceptStatus /// /// Creates a filtered on a terms enum. /// the terms enumeration to filter. - public FilteredTermsEnum(TermsEnum tenum) + protected FilteredTermsEnum(TermsEnum tenum) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : this(tenum, true) { } @@ -95,7 +95,7 @@ public FilteredTermsEnum(TermsEnum tenum) /// Creates a filtered on a terms enum. /// the terms enumeration to filter. /// start with seek - public FilteredTermsEnum(TermsEnum tenum, bool startWithSeek) + protected FilteredTermsEnum(TermsEnum tenum, bool startWithSeek) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { if (Debugging.AssertsEnabled) Debugging.Assert(tenum != null); this.tenum = tenum; diff --git a/src/Lucene.Net/Index/FreqProxTermsWriter.cs b/src/Lucene.Net/Index/FreqProxTermsWriter.cs index 3bb8fc68ff..99dc07c32b 100644 --- a/src/Lucene.Net/Index/FreqProxTermsWriter.cs +++ b/src/Lucene.Net/Index/FreqProxTermsWriter.cs @@ -93,7 +93,7 @@ public override void Flush(IDictionary fields termsHash = perField.termsHash; int numPostings = perField.bytesHash.Count; perField.Reset(); - perField.ShrinkHash(numPostings); + perField.ShrinkHash(/* numPostings // LUCENENET: Not used */); fieldWriter.Reset(); } diff --git a/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs b/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs index 5ed775ccc2..eff652967f 100644 --- a/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs +++ b/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs @@ -487,9 +487,7 @@ internal void Flush(string fieldName, FieldsConsumer consumer, SegmentWriteState if (segDeletes != null) { protoTerm.Bytes = text; - int? docIDUpto; - segDeletes.TryGetValue(protoTerm, out docIDUpto); - if (docIDUpto != null) + if (segDeletes.TryGetValue(protoTerm, out int? docIDUpto) && docIDUpto != null) { delDocLimit = docIDUpto; } diff --git a/src/Lucene.Net/Index/FrozenBufferedUpdates.cs b/src/Lucene.Net/Index/FrozenBufferedUpdates.cs index 4a7a810ab0..b4ee244abf 100644 --- a/src/Lucene.Net/Index/FrozenBufferedUpdates.cs +++ b/src/Lucene.Net/Index/FrozenBufferedUpdates.cs @@ -204,7 +204,8 @@ IEnumerator IEnumerable.GetEnumerator() private class IteratorAnonymousInnerClassHelper : IEnumerator { private readonly IterableAnonymousInnerClassHelper2 outerInstance; - private int upto, i; + private readonly int upto; + private int i; private QueryAndLimit current; public IteratorAnonymousInnerClassHelper(IterableAnonymousInnerClassHelper2 outerInstance) diff --git a/src/Lucene.Net/Index/IndexCommit.cs b/src/Lucene.Net/Index/IndexCommit.cs index 449c8e5c74..c49cff630b 100644 --- a/src/Lucene.Net/Index/IndexCommit.cs +++ b/src/Lucene.Net/Index/IndexCommit.cs @@ -95,9 +95,8 @@ protected IndexCommit() /// Two IndexCommits are equal if both their and versions are equal. public override bool Equals(object other) { - if (other is IndexCommit) + if (other is IndexCommit otherCommit) { - IndexCommit otherCommit = (IndexCommit)other; return otherCommit.Directory == Directory && otherCommit.Generation == Generation; } else diff --git a/src/Lucene.Net/Index/IndexFileDeleter.cs b/src/Lucene.Net/Index/IndexFileDeleter.cs index d2a07c09e5..fd52c455cf 100644 --- a/src/Lucene.Net/Index/IndexFileDeleter.cs +++ b/src/Lucene.Net/Index/IndexFileDeleter.cs @@ -78,7 +78,7 @@ internal sealed class IndexFileDeleter : IDisposable /// Reference count for all files in the index. /// Counts how many existing commits reference a file. /// - private IDictionary refCounts = new Dictionary(); + private readonly IDictionary refCounts = new Dictionary(); // LUCENENET: marked readonly /// /// Holds all commits (segments_N) currently in the index. @@ -87,7 +87,7 @@ internal sealed class IndexFileDeleter : IDisposable /// Other policies may leave commit points live for longer /// in which case this list would be longer than 1: /// - private IList commits = new List(); + private readonly IList commits = new List(); // LUCENENET: marked readonly /// /// Holds files we had incref'd from the previous @@ -98,14 +98,14 @@ internal sealed class IndexFileDeleter : IDisposable /// /// Commits that the IndexDeletionPolicy have decided to delete: /// - private IList commitsToDelete = new List(); + private readonly IList commitsToDelete = new List(); // LUCENENET: marked readonly private readonly InfoStream infoStream; - private Directory directory; - private IndexDeletionPolicy policy; + private readonly Directory directory; // LUCENENET: marked readonly + private readonly IndexDeletionPolicy policy; // LUCENENET: marked readonly internal readonly bool startingCommitDeleted; - private SegmentInfos lastSegmentInfos; + private readonly SegmentInfos lastSegmentInfos; // LUCENENET: marked readonly /// /// Change to true to see details of reference counts when @@ -146,14 +146,12 @@ public IndexFileDeleter(Directory directory, IndexDeletionPolicy policy, Segment long currentGen = segmentInfos.Generation; CommitPoint currentCommitPoint = null; - string[] files = null; + string[] files/* = null*/; try { files = directory.ListAll(); } -#pragma warning disable 168 - catch (DirectoryNotFoundException e) -#pragma warning restore 168 + catch (DirectoryNotFoundException) // LUCENENET: IDE0059: Remove unnecessary value assignment { // it means the directory is empty, so ignore it. files = Arrays.Empty(); diff --git a/src/Lucene.Net/Index/IndexReader.cs b/src/Lucene.Net/Index/IndexReader.cs index fabcf91b39..651bd52bfe 100644 --- a/src/Lucene.Net/Index/IndexReader.cs +++ b/src/Lucene.Net/Index/IndexReader.cs @@ -207,6 +207,7 @@ private void ReportCloseToParentReaders() /// /// /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void IncRef() { if (!TryIncRef()) @@ -303,6 +304,7 @@ public void DecRef() /// Throws if this or any /// of its child readers is disposed, otherwise returns. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected internal void EnsureOpen() { if (refCount <= 0) diff --git a/src/Lucene.Net/Index/IndexUpgrader.cs b/src/Lucene.Net/Index/IndexUpgrader.cs index ee266a7f1b..4a617fef49 100644 --- a/src/Lucene.Net/Index/IndexUpgrader.cs +++ b/src/Lucene.Net/Index/IndexUpgrader.cs @@ -123,7 +123,7 @@ public static IndexUpgrader ParseArgs(string[] args) PrintUsage(); } - Directory dir = null; + Directory dir/* = null*/; // LUCENENET: IDE0059: Remove unnecessary value assignment if (dirImpl == null) { dir = FSDirectory.Open(new DirectoryInfo(path)); diff --git a/src/Lucene.Net/Index/IndexWriter.cs b/src/Lucene.Net/Index/IndexWriter.cs index 0fee329454..928251db3a 100644 --- a/src/Lucene.Net/Index/IndexWriter.cs +++ b/src/Lucene.Net/Index/IndexWriter.cs @@ -473,9 +473,7 @@ public virtual void Drop(SegmentCommitInfo info) { lock (this) { - ReadersAndUpdates rld; - readerMap.TryGetValue(info, out rld); - if (rld != null) + if (readerMap.TryGetValue(info, out ReadersAndUpdates rld) && rld != null) { if (Debugging.AssertsEnabled) Debugging.Assert(info == rld.Info); // System.out.println("[" + Thread.currentThread().getName() + "] ReaderPool.drop: " + info); @@ -661,8 +659,7 @@ public virtual void Commit(SegmentInfos infos) { foreach (SegmentCommitInfo info in infos.Segments) { - ReadersAndUpdates rld; - if (readerMap.TryGetValue(info, out rld)) + if (readerMap.TryGetValue(info, out ReadersAndUpdates rld)) { if (Debugging.AssertsEnabled) Debugging.Assert(rld.Info == info); if (rld.WriteLiveDocs(outerInstance.directory)) @@ -694,9 +691,7 @@ public virtual ReadersAndUpdates Get(SegmentCommitInfo info, bool create) { if (Debugging.AssertsEnabled) Debugging.Assert(info.Info.Dir == outerInstance.directory,"info.dir={0} vs {1}", info.Info.Dir, outerInstance.directory); - ReadersAndUpdates rld; - readerMap.TryGetValue(info, out rld); - if (rld == null) + if (!readerMap.TryGetValue(info, out ReadersAndUpdates rld) || rld == null) { if (!create) { @@ -1636,13 +1631,7 @@ public virtual bool TryDeleteDocument(IndexReader readerIn, int docID) { lock (this) { - AtomicReader reader; - if (readerIn is AtomicReader) - { - // Reader is already atomic: use the incoming docID: - reader = (AtomicReader)readerIn; - } - else + if (!(readerIn is AtomicReader reader)) { // Composite reader: lookup sub-reader and re-base docID: IList leaves = readerIn.Leaves; @@ -1655,13 +1644,14 @@ public virtual bool TryDeleteDocument(IndexReader readerIn, int docID) Debugging.Assert(docID < reader.MaxDoc); } } + // else: Reader is already atomic: use the incoming docID - if (!(reader is SegmentReader)) + if (!(reader is SegmentReader segmentReader)) { throw new ArgumentException("the reader must be a SegmentReader or composite reader containing only SegmentReaders"); } - SegmentCommitInfo info = ((SegmentReader)reader).SegmentInfo; + SegmentCommitInfo info = segmentReader.SegmentInfo; // TODO: this is a slow linear search, but, number of // segments should be contained unless something is @@ -4019,7 +4009,7 @@ private void EnsureValidMerge(MergePolicy.OneMerge merge) } } - private void SkipDeletedDoc(DocValuesFieldUpdates.Iterator[] updatesIters, int deletedDoc) + private static void SkipDeletedDoc(DocValuesFieldUpdates.Iterator[] updatesIters, int deletedDoc) // LUCENENET: CA1822: Mark members as static { foreach (DocValuesFieldUpdates.Iterator iter in updatesIters) { @@ -4824,9 +4814,11 @@ private void MergeInitImpl(MergePolicy.OneMerge merge) // LUCENENET specific: re // names. string mergeSegmentName = NewSegmentName(); SegmentInfo si = new SegmentInfo(directory, Constants.LUCENE_MAIN_VERSION, mergeSegmentName, -1, false, codec, null); - IDictionary details = new Dictionary(); - details["mergeMaxNumSegments"] = "" + merge.MaxNumSegments; - details["mergeFactor"] = Convert.ToString(merge.Segments.Count); + IDictionary details = new Dictionary + { + ["mergeMaxNumSegments"] = "" + merge.MaxNumSegments, + ["mergeFactor"] = Convert.ToString(merge.Segments.Count) + }; SetDiagnostics(si, SOURCE_MERGE, details); merge.Info = new SegmentCommitInfo(si, 0, -1L, -1L); @@ -4849,15 +4841,17 @@ internal static void SetDiagnostics(SegmentInfo info, string source) private static void SetDiagnostics(SegmentInfo info, string source, IDictionary details) { - IDictionary diagnostics = new Dictionary(); - diagnostics["source"] = source; - diagnostics["lucene.version"] = Constants.LUCENE_VERSION; - diagnostics["os"] = Constants.OS_NAME; - diagnostics["os.arch"] = Constants.OS_ARCH; - diagnostics["os.version"] = Constants.OS_VERSION; - diagnostics["java.version"] = Constants.RUNTIME_VERSION; - diagnostics["java.vendor"] = Constants.RUNTIME_VENDOR; - diagnostics["timestamp"] = Convert.ToString((DateTime.Now)); + IDictionary diagnostics = new Dictionary + { + ["source"] = source, + ["lucene.version"] = Constants.LUCENE_VERSION, + ["os"] = Constants.OS_NAME, + ["os.arch"] = Constants.OS_ARCH, + ["os.version"] = Constants.OS_VERSION, + ["java.version"] = Constants.RUNTIME_VERSION, + ["java.vendor"] = Constants.RUNTIME_VENDOR, + ["timestamp"] = Convert.ToString((DateTime.Now)) + }; if (details != null) { diagnostics.PutAll(details); @@ -5420,8 +5414,7 @@ internal virtual SegmentInfos ToLiveInfos(SegmentInfos sis) foreach (SegmentCommitInfo info in sis.Segments) { SegmentCommitInfo infoMod = info; - SegmentCommitInfo liveInfo; - if (liveSIS.TryGetValue(info, out liveInfo)) + if (liveSIS.TryGetValue(info, out SegmentCommitInfo liveInfo)) { infoMod = liveInfo; } @@ -5583,7 +5576,7 @@ public static bool IsLocked(Directory directory) /// public static void Unlock(Directory directory) { - using (var _ = directory.MakeLock(IndexWriter.WRITE_LOCK_NAME)) { } + using var _ = directory.MakeLock(IndexWriter.WRITE_LOCK_NAME); } /// @@ -5710,13 +5703,8 @@ public virtual void DeleteUnusedFiles() } } - private void DeletePendingFiles() - { - lock (this) - { - deleter.DeletePendingFiles(); - } - } + // LUCENENET specific - DeletePendingFiles() excluded because it is not referenced - IDE0051 + /// /// NOTE: this method creates a compound file for all files returned by @@ -5779,9 +5767,11 @@ internal static ICollection CreateCompoundFile(InfoStream infoStream, Di } // Replace all previous files with the CFS/CFE files: - JCG.HashSet siFiles = new JCG.HashSet(); - siFiles.Add(fileName); - siFiles.Add(Lucene.Net.Index.IndexFileNames.SegmentFileName(info.Name, "", Lucene.Net.Index.IndexFileNames.COMPOUND_FILE_ENTRIES_EXTENSION)); + JCG.HashSet siFiles = new JCG.HashSet + { + fileName, + Lucene.Net.Index.IndexFileNames.SegmentFileName(info.Name, "", Lucene.Net.Index.IndexFileNames.COMPOUND_FILE_ENTRIES_EXTENSION) + }; info.SetFiles(siFiles); return files; @@ -5869,9 +5859,8 @@ private bool ProcessEvents(bool triggerMerge, bool forcePurge) private bool ProcessEvents(ConcurrentQueue queue, bool triggerMerge, bool forcePurge) { - IEvent @event; bool processed = false; - while (queue.TryDequeue(out @event)) + while (queue.TryDequeue(out IEvent @event)) { processed = true; @event.Process(this, triggerMerge, forcePurge); diff --git a/src/Lucene.Net/Index/IndexWriterConfig.cs b/src/Lucene.Net/Index/IndexWriterConfig.cs index 6f7b230335..a525a9bd69 100644 --- a/src/Lucene.Net/Index/IndexWriterConfig.cs +++ b/src/Lucene.Net/Index/IndexWriterConfig.cs @@ -390,10 +390,7 @@ public object Clone() throw new InvalidOperationException(cce.Message, cce); } } - set - { - this.indexerThreadPool = new DocumentsWriterPerThreadPool(value); - } + set => this.indexerThreadPool = new DocumentsWriterPerThreadPool(value); } /// diff --git a/src/Lucene.Net/Index/LogMergePolicy.cs b/src/Lucene.Net/Index/LogMergePolicy.cs index 1424d99cc3..98916dbe08 100644 --- a/src/Lucene.Net/Index/LogMergePolicy.cs +++ b/src/Lucene.Net/Index/LogMergePolicy.cs @@ -111,7 +111,7 @@ public abstract class LogMergePolicy : MergePolicy /// Sole constructor. (For invocation by subclass /// constructors, typically implicit.) /// - public LogMergePolicy() + protected LogMergePolicy() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : base(DEFAULT_NO_CFS_RATIO, MergePolicy.DEFAULT_MAX_CFS_SEGMENT_SIZE) { } @@ -230,9 +230,7 @@ protected virtual bool IsMerged(SegmentInfos infos, int maxNumSegments, IDiction for (int i = 0; i < numSegments && numToMerge <= maxNumSegments; i++) { SegmentCommitInfo info = infos.Info(i); - bool? isOriginal; - segmentsToMerge.TryGetValue(info, out isOriginal); - if (isOriginal != null) + if (segmentsToMerge.TryGetValue(info, out bool? isOriginal) && isOriginal != null) { segmentIsOriginal = isOriginal.Value; numToMerge++; @@ -243,15 +241,16 @@ protected virtual bool IsMerged(SegmentInfos infos, int maxNumSegments, IDiction return numToMerge <= maxNumSegments && (numToMerge != 1 || !segmentIsOriginal || IsMerged(infos, mergeInfo)); } - /// - /// Returns the merges necessary to merge the index, taking the max merge - /// size or max merge docs into consideration. this method attempts to respect - /// the parameter, however it might be, due to size - /// constraints, that more than that number of segments will remain in the - /// index. Also, this method does not guarantee that exactly - /// will remain, but <= that number. - /// - private MergeSpecification FindForcedMergesSizeLimit(SegmentInfos infos, int maxNumSegments, int last) + // LUCENENET: This documentation ic clearly out of date because it refers to an unused parameter + ///// + ///// Returns the merges necessary to merge the index, taking the max merge + ///// size or max merge docs into consideration. this method attempts to respect + ///// the parameter, however it might be, due to size + ///// constraints, that more than that number of segments will remain in the + ///// index. Also, this method does not guarantee that exactly + ///// will remain, but <= that number. + ///// + private MergeSpecification FindForcedMergesSizeLimit(SegmentInfos infos, /* int maxNumSegments, LUCENENET: Not referenced */ int last) { MergeSpecification spec = new MergeSpecification(); IList segments = infos.AsList(); @@ -443,7 +442,7 @@ public override MergeSpecification FindForcedMerges(SegmentInfos infos, int maxN if (anyTooLarge) { - return FindForcedMergesSizeLimit(infos, maxNumSegments, last); + return FindForcedMergesSizeLimit(infos, /* maxNumSegments, // LUCENENET: Not referenced */ last); } else { @@ -526,13 +525,13 @@ private class SegmentInfoAndLevel : IComparable { internal readonly SegmentCommitInfo info; internal readonly float level; - private int index; + //private int index; // LUCENENET: Never read - public SegmentInfoAndLevel(SegmentCommitInfo info, float level, int index) + public SegmentInfoAndLevel(SegmentCommitInfo info, float level /*, int index // LUCENENET: Never read */) { this.info = info; this.level = level; - this.index = index; + //this.index = index; // LUCENENET: Never read } // Sorts largest to smallest @@ -577,7 +576,7 @@ public override MergeSpecification FindMerges(MergeTrigger mergeTrigger, Segment size = 1; } - SegmentInfoAndLevel infoLevel = new SegmentInfoAndLevel(info, (float)Math.Log(size) / norm, i); + SegmentInfoAndLevel infoLevel = new SegmentInfoAndLevel(info, (float)Math.Log(size) / norm /*, i*/); // LUCENENET: index is never read levels.Add(infoLevel); if (IsVerbose) diff --git a/src/Lucene.Net/Index/MergePolicy.cs b/src/Lucene.Net/Index/MergePolicy.cs index e00218b12f..ef63e995de 100644 --- a/src/Lucene.Net/Index/MergePolicy.cs +++ b/src/Lucene.Net/Index/MergePolicy.cs @@ -213,18 +213,11 @@ public virtual SegmentCommitInfo Info /// public virtual DocMap GetDocMap(MergeState mergeState) { - return new DocMapAnonymousInnerClassHelper(this); + return new DocMapAnonymousInnerClassHelper(); } private class DocMapAnonymousInnerClassHelper : DocMap { - private readonly OneMerge outerInstance; - - public DocMapAnonymousInnerClassHelper(OneMerge outerInstance) - { - this.outerInstance = outerInstance; - } - public override int Map(int docID) { return docID; @@ -471,7 +464,7 @@ public virtual string SegString(Directory dir) #endif public class MergeException : Exception { - private Directory dir; + private readonly Directory dir; // LUCENENET: marked readonly /// /// Create a . @@ -598,7 +591,7 @@ public virtual object Clone() /// without passing it to , you should call /// . /// - public MergePolicy() + protected MergePolicy() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : this(DEFAULT_NO_CFS_RATIO, DEFAULT_MAX_CFS_SEGMENT_SIZE) { } diff --git a/src/Lucene.Net/Index/MergeState.cs b/src/Lucene.Net/Index/MergeState.cs index d9a7b68d10..8434200832 100644 --- a/src/Lucene.Net/Index/MergeState.cs +++ b/src/Lucene.Net/Index/MergeState.cs @@ -101,10 +101,10 @@ internal static DocMap Build(int maxDoc, IBits liveDocs) private class DocMapAnonymousInnerClassHelper : DocMap { - private int maxDoc; - private IBits liveDocs; - private MonotonicAppendingInt64Buffer docMap; - private int numDeletedDocs; + private readonly int maxDoc; + private readonly IBits liveDocs; + private readonly MonotonicAppendingInt64Buffer docMap; + private readonly int numDeletedDocs; public DocMapAnonymousInnerClassHelper(int maxDoc, IBits liveDocs, MonotonicAppendingInt64Buffer docMap, int numDeletedDocs) { diff --git a/src/Lucene.Net/Index/MultiDocValues.cs b/src/Lucene.Net/Index/MultiDocValues.cs index dd83fa0c93..a628b38600 100644 --- a/src/Lucene.Net/Index/MultiDocValues.cs +++ b/src/Lucene.Net/Index/MultiDocValues.cs @@ -46,14 +46,8 @@ namespace Lucene.Net.Index /// @lucene.experimental /// @lucene.internal /// - public class MultiDocValues + public static class MultiDocValues // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { - /// - /// No instantiation - private MultiDocValues() - { - } - /// /// Returns a for a reader's norms (potentially merging on-the-fly). /// @@ -106,8 +100,8 @@ public static NumericDocValues GetNormValues(IndexReader r, string field) private class NumericDocValuesAnonymousInnerClassHelper : NumericDocValues { - private NumericDocValues[] values; - private int[] starts; + private readonly NumericDocValues[] values; + private readonly int[] starts; public NumericDocValuesAnonymousInnerClassHelper(NumericDocValues[] values, int[] starts) { @@ -174,8 +168,8 @@ public static NumericDocValues GetNumericValues(IndexReader r, string field) private class NumericDocValuesAnonymousInnerClassHelper2 : NumericDocValues { - private NumericDocValues[] values; - private int[] starts; + private readonly NumericDocValues[] values; + private readonly int[] starts; public NumericDocValuesAnonymousInnerClassHelper2(NumericDocValues[] values, int[] starts) { @@ -303,8 +297,8 @@ public static BinaryDocValues GetBinaryValues(IndexReader r, string field) private class BinaryDocValuesAnonymousInnerClassHelper : BinaryDocValues { - private BinaryDocValues[] values; - private int[] starts; + private readonly BinaryDocValues[] values; + private readonly int[] starts; public BinaryDocValuesAnonymousInnerClassHelper(BinaryDocValues[] values, int[] starts) { diff --git a/src/Lucene.Net/Index/MultiFields.cs b/src/Lucene.Net/Index/MultiFields.cs index 11ff79cf84..7f070e54e1 100644 --- a/src/Lucene.Net/Index/MultiFields.cs +++ b/src/Lucene.Net/Index/MultiFields.cs @@ -254,8 +254,7 @@ public override IEnumerator GetEnumerator() public override Terms GetTerms(string field) { - Terms result; - if (terms.TryGetValue(field, out result) && result != null) + if (terms.TryGetValue(field, out Terms result) && result != null) { return result; } diff --git a/src/Lucene.Net/Index/MultiTermsEnum.cs b/src/Lucene.Net/Index/MultiTermsEnum.cs index ef420cded6..e007219d56 100644 --- a/src/Lucene.Net/Index/MultiTermsEnum.cs +++ b/src/Lucene.Net/Index/MultiTermsEnum.cs @@ -432,31 +432,10 @@ public override long TotalTermFreq public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) { - MultiDocsEnum docsEnum; // Can only reuse if incoming enum is also a MultiDocsEnum - if (reuse != null && reuse is MultiDocsEnum) - { - docsEnum = (MultiDocsEnum)reuse; - // ... and was previously created w/ this MultiTermsEnum: - if (!docsEnum.CanReuse(this)) - { - docsEnum = new MultiDocsEnum(this, subs.Length); - } - } - else - { + // ... and was previously created w/ this MultiTermsEnum: + if (reuse is null || !(reuse is MultiDocsEnum docsEnum) || !docsEnum.CanReuse(this)) docsEnum = new MultiDocsEnum(this, subs.Length); - } - - MultiBits multiLiveDocs; - if (liveDocs is MultiBits) - { - multiLiveDocs = (MultiBits)liveDocs; - } - else - { - multiLiveDocs = null; - } int upto = 0; @@ -466,7 +445,7 @@ public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) IBits b; - if (multiLiveDocs != null) + if (liveDocs is MultiBits multiLiveDocs) { // optimize for common case: requested skip docs is a // congruent sub-slice of MultiBits: in this case, we @@ -523,31 +502,12 @@ public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPositionsEnum reuse, DocsAndPositionsFlags flags) { - MultiDocsAndPositionsEnum docsAndPositionsEnum; // Can only reuse if incoming enum is also a MultiDocsAndPositionsEnum - if (reuse != null && reuse is MultiDocsAndPositionsEnum) - { - docsAndPositionsEnum = (MultiDocsAndPositionsEnum)reuse; - // ... and was previously created w/ this MultiTermsEnum: - if (!docsAndPositionsEnum.CanReuse(this)) - { - docsAndPositionsEnum = new MultiDocsAndPositionsEnum(this, subs.Length); - } - } - else - { + // ... and was previously created w/ this MultiTermsEnum: + if (reuse is null || !(reuse is MultiDocsAndPositionsEnum docsAndPositionsEnum) || !docsAndPositionsEnum.CanReuse(this)) + docsAndPositionsEnum = new MultiDocsAndPositionsEnum(this, subs.Length); - } - MultiBits multiLiveDocs; - if (liveDocs is MultiBits) - { - multiLiveDocs = (MultiBits)liveDocs; - } - else - { - multiLiveDocs = null; - } int upto = 0; @@ -557,7 +517,7 @@ public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPos IBits b; - if (multiLiveDocs != null) + if (liveDocs is MultiBits multiLiveDocs) { // Optimize for common case: requested skip docs is a // congruent sub-slice of MultiBits: in this case, we diff --git a/src/Lucene.Net/Index/NormsConsumer.cs b/src/Lucene.Net/Index/NormsConsumer.cs index e8b038029b..80b342d5cf 100644 --- a/src/Lucene.Net/Index/NormsConsumer.cs +++ b/src/Lucene.Net/Index/NormsConsumer.cs @@ -98,7 +98,7 @@ internal override void StartDocument() internal override InvertedDocEndConsumerPerField AddField(DocInverterPerField docInverterPerField, FieldInfo fieldInfo) { - return new NormsConsumerPerField(docInverterPerField, fieldInfo, this); + return new NormsConsumerPerField(docInverterPerField, fieldInfo /* , this // LUCENENET: Not referenced */); } } } \ No newline at end of file diff --git a/src/Lucene.Net/Index/NormsConsumerPerField.cs b/src/Lucene.Net/Index/NormsConsumerPerField.cs index 045ee52803..3e18932b12 100644 --- a/src/Lucene.Net/Index/NormsConsumerPerField.cs +++ b/src/Lucene.Net/Index/NormsConsumerPerField.cs @@ -32,7 +32,7 @@ internal sealed class NormsConsumerPerField : InvertedDocEndConsumerPerField, IC private readonly FieldInvertState fieldState; private NumericDocValuesWriter consumer; - public NormsConsumerPerField(DocInverterPerField docInverterPerField, FieldInfo fieldInfo, NormsConsumer parent) + public NormsConsumerPerField(DocInverterPerField docInverterPerField, FieldInfo fieldInfo /*, NormsConsumer parent // LUCENENET: Not referenced */) { this.fieldInfo = fieldInfo; docState = docInverterPerField.docState; diff --git a/src/Lucene.Net/Index/NumericDocValuesFieldUpdates.cs b/src/Lucene.Net/Index/NumericDocValuesFieldUpdates.cs index 96add831a7..6885b46f05 100644 --- a/src/Lucene.Net/Index/NumericDocValuesFieldUpdates.cs +++ b/src/Lucene.Net/Index/NumericDocValuesFieldUpdates.cs @@ -145,22 +145,19 @@ public override DocValuesFieldUpdates.Iterator GetIterator() PagedMutable docs = this.docs; PagedGrowableWriter values = this.values; FixedBitSet docsWithField = this.docsWithField; - new InPlaceMergeSorterAnonymousInnerClassHelper(this, docs, values, docsWithField).Sort(0, size); + new InPlaceMergeSorterAnonymousInnerClassHelper(docs, values, docsWithField).Sort(0, size); return new Iterator(size, values, docsWithField, docs); } private class InPlaceMergeSorterAnonymousInnerClassHelper : InPlaceMergeSorter { - private readonly NumericDocValuesFieldUpdates outerInstance; - - private PagedMutable docs; - private PagedGrowableWriter values; - private FixedBitSet docsWithField; + private readonly PagedMutable docs; + private readonly PagedGrowableWriter values; + private readonly FixedBitSet docsWithField; - public InPlaceMergeSorterAnonymousInnerClassHelper(NumericDocValuesFieldUpdates outerInstance, PagedMutable docs, PagedGrowableWriter values, FixedBitSet docsWithField) + public InPlaceMergeSorterAnonymousInnerClassHelper(PagedMutable docs, PagedGrowableWriter values, FixedBitSet docsWithField) { - this.outerInstance = outerInstance; this.docs = docs; this.values = values; this.docsWithField = docsWithField; diff --git a/src/Lucene.Net/Index/NumericDocValuesWriter.cs b/src/Lucene.Net/Index/NumericDocValuesWriter.cs index 658c03c366..4941203659 100644 --- a/src/Lucene.Net/Index/NumericDocValuesWriter.cs +++ b/src/Lucene.Net/Index/NumericDocValuesWriter.cs @@ -37,7 +37,7 @@ internal class NumericDocValuesWriter : DocValuesWriter { private const long MISSING = 0L; - private AppendingDeltaPackedInt64Buffer pending; + private readonly AppendingDeltaPackedInt64Buffer pending; // LUCENENET: marked readonly private readonly Counter iwBytesUsed; private long bytesUsed; private FixedBitSet docsWithField; diff --git a/src/Lucene.Net/Index/ParallelAtomicReader.cs b/src/Lucene.Net/Index/ParallelAtomicReader.cs index 739c76c654..354e113075 100644 --- a/src/Lucene.Net/Index/ParallelAtomicReader.cs +++ b/src/Lucene.Net/Index/ParallelAtomicReader.cs @@ -48,13 +48,8 @@ namespace Lucene.Net.Index /// public class ParallelAtomicReader : AtomicReader { - private void InitializeInstanceFields() - { - fields = new ParallelFields(this); - } - private readonly FieldInfos fieldInfos; - private ParallelFields fields; + private readonly ParallelFields fields = new ParallelFields(); private readonly AtomicReader[] parallelReaders, storedFieldsReaders; private readonly ISet completeReaderSet = new JCG.HashSet(IdentityEqualityComparer.Default); private readonly bool closeSubReaders; @@ -90,7 +85,6 @@ public ParallelAtomicReader(bool closeSubReaders, params AtomicReader[] readers) /// public ParallelAtomicReader(bool closeSubReaders, AtomicReader[] readers, AtomicReader[] storedFieldsReaders) { - InitializeInstanceFields(); this.closeSubReaders = closeSubReaders; if (readers.Length == 0 && storedFieldsReaders.Length > 0) { @@ -194,14 +188,11 @@ public override string ToString() // Single instance of this, per ParallelReader instance private sealed class ParallelFields : Fields { - private readonly ParallelAtomicReader outerInstance; - // LUCENENET specific: Use StringComparer.Ordinal to get the same ordering as Java internal readonly IDictionary fields = new JCG.SortedDictionary(StringComparer.Ordinal); - internal ParallelFields(ParallelAtomicReader outerInstance) + internal ParallelFields() { - this.outerInstance = outerInstance; } internal void AddField(string fieldName, Terms terms) @@ -216,8 +207,7 @@ public override IEnumerator GetEnumerator() public override Terms GetTerms(string field) { - Terms result; - fields.TryGetValue(field, out result); + fields.TryGetValue(field, out Terms result); return result; } @@ -282,7 +272,7 @@ public override Fields GetTermVectors(int docID) { if (fields == null) { - fields = new ParallelFields(this); + fields = new ParallelFields(); } fields.AddField(fieldName, vector); } @@ -328,44 +318,38 @@ protected internal override void DoClose() public override NumericDocValues GetNumericDocValues(string field) { EnsureOpen(); - AtomicReader reader; - return fieldToReader.TryGetValue(field, out reader) ? reader.GetNumericDocValues(field) : null; + return fieldToReader.TryGetValue(field, out AtomicReader reader) ? reader.GetNumericDocValues(field) : null; } public override BinaryDocValues GetBinaryDocValues(string field) { EnsureOpen(); - AtomicReader reader; - return fieldToReader.TryGetValue(field, out reader) ? reader.GetBinaryDocValues(field) : null; + return fieldToReader.TryGetValue(field, out AtomicReader reader) ? reader.GetBinaryDocValues(field) : null; } public override SortedDocValues GetSortedDocValues(string field) { EnsureOpen(); - AtomicReader reader; - return fieldToReader.TryGetValue(field, out reader) ? reader.GetSortedDocValues(field) : null; + return fieldToReader.TryGetValue(field, out AtomicReader reader) ? reader.GetSortedDocValues(field) : null; } public override SortedSetDocValues GetSortedSetDocValues(string field) { EnsureOpen(); - AtomicReader reader; - return fieldToReader.TryGetValue(field, out reader) ? reader.GetSortedSetDocValues(field) : null; + return fieldToReader.TryGetValue(field, out AtomicReader reader) ? reader.GetSortedSetDocValues(field) : null; } public override IBits GetDocsWithField(string field) { EnsureOpen(); - AtomicReader reader; - return fieldToReader.TryGetValue(field, out reader) ? reader.GetDocsWithField(field) : null; + return fieldToReader.TryGetValue(field, out AtomicReader reader) ? reader.GetDocsWithField(field) : null; } public override NumericDocValues GetNormValues(string field) { EnsureOpen(); - AtomicReader reader; NumericDocValues values = null; - if (fieldToReader.TryGetValue(field, out reader)) + if (fieldToReader.TryGetValue(field, out AtomicReader reader)) { values = reader.GetNormValues(field); } diff --git a/src/Lucene.Net/Index/ParallelPostingsArray.cs b/src/Lucene.Net/Index/ParallelPostingsArray.cs index dc1b31416b..95c837064b 100644 --- a/src/Lucene.Net/Index/ParallelPostingsArray.cs +++ b/src/Lucene.Net/Index/ParallelPostingsArray.cs @@ -24,7 +24,7 @@ namespace Lucene.Net.Index internal class ParallelPostingsArray { - internal static readonly int BYTES_PER_POSTING = 3 * RamUsageEstimator.NUM_BYTES_INT32; + internal const int BYTES_PER_POSTING = 3 * RamUsageEstimator.NUM_BYTES_INT32; internal readonly int size; internal readonly int[] textStarts; diff --git a/src/Lucene.Net/Index/PersistentSnapshotDeletionPolicy.cs b/src/Lucene.Net/Index/PersistentSnapshotDeletionPolicy.cs index 730df12a83..01acd85ac1 100644 --- a/src/Lucene.Net/Index/PersistentSnapshotDeletionPolicy.cs +++ b/src/Lucene.Net/Index/PersistentSnapshotDeletionPolicy.cs @@ -53,11 +53,11 @@ public class PersistentSnapshotDeletionPolicy : SnapshotDeletionPolicy { /// /// Prefix used for the save file. - public static readonly string SNAPSHOTS_PREFIX = "snapshots_"; + public const string SNAPSHOTS_PREFIX = "snapshots_"; - private static readonly int VERSION_START = 0; - private static readonly int VERSION_CURRENT = VERSION_START; - private static readonly string CODEC_NAME = "snapshots"; + private const int VERSION_START = 0; + private const int VERSION_CURRENT = VERSION_START; + private const string CODEC_NAME = "snapshots"; // The index writer which maintains the snapshots metadata private long nextWriteGen; diff --git a/src/Lucene.Net/Index/PrefixCodedTerms.cs b/src/Lucene.Net/Index/PrefixCodedTerms.cs index 7746d26c38..0b9deee14e 100644 --- a/src/Lucene.Net/Index/PrefixCodedTerms.cs +++ b/src/Lucene.Net/Index/PrefixCodedTerms.cs @@ -65,8 +65,8 @@ internal class PrefixCodedTermsIterator : IEnumerator { private readonly IndexInput input; private string field = ""; - private BytesRef bytes = new BytesRef(); - private Term term; + private readonly BytesRef bytes = new BytesRef(); // LUCENENET: marked readonly + private readonly Term term; // LUCENENET: marked readonly internal PrefixCodedTermsIterator(RAMFile buffer) { @@ -84,8 +84,18 @@ internal PrefixCodedTermsIterator(RAMFile buffer) public virtual Term Current => term; - public virtual void Dispose() + public void Dispose() { + Dispose(true); + GC.SuppressFinalize(true); + } + + protected virtual void Dispose(bool disposing) + { + if (disposing) + { + input?.Dispose(); // LUCENENET specific - call dispose on input + } } object IEnumerator.Current => Current; @@ -132,18 +142,13 @@ public virtual void Reset() public class Builder { public Builder() - { - InitializeInstanceFields(); - } - - internal virtual void InitializeInstanceFields() { output = new RAMOutputStream(buffer); } - private RAMFile buffer = new RAMFile(); - private RAMOutputStream output; - private Term lastTerm = new Term(""); + private readonly RAMFile buffer = new RAMFile(); // LUCENENET: marked readonly + private readonly RAMOutputStream output; // LUCENENET: marked readonly + private readonly Term lastTerm = new Term(""); // LUCENENET: marked readonly /// /// add a term diff --git a/src/Lucene.Net/Index/ReadersAndUpdates.cs b/src/Lucene.Net/Index/ReadersAndUpdates.cs index eb3f8362b1..06ff9013f0 100644 --- a/src/Lucene.Net/Index/ReadersAndUpdates.cs +++ b/src/Lucene.Net/Index/ReadersAndUpdates.cs @@ -483,7 +483,7 @@ public virtual void WriteFieldUpdates(Directory dir, DocValuesFieldUpdates.Conta // reader could be null e.g. for a just merged segment (from // IndexWriter.commitMergedDeletes). - SegmentReader reader = this.reader == null ? new SegmentReader(Info, writer.Config.ReaderTermsIndexDivisor, IOContext.READ_ONCE) : this.reader; + SegmentReader reader = this.reader ?? new SegmentReader(Info, writer.Config.ReaderTermsIndexDivisor, IOContext.READ_ONCE); try { // clone FieldInfos so that we can update their dvGen separately from @@ -607,8 +607,7 @@ public virtual void WriteFieldUpdates(Directory dir, DocValuesFieldUpdates.Conta { foreach (KeyValuePair e in dvUpdates.numericDVUpdates) { - DocValuesFieldUpdates updates; - if (!mergingDVUpdates.TryGetValue(e.Key, out updates)) + if (!mergingDVUpdates.TryGetValue(e.Key, out DocValuesFieldUpdates updates)) { mergingDVUpdates[e.Key] = e.Value; } @@ -619,8 +618,7 @@ public virtual void WriteFieldUpdates(Directory dir, DocValuesFieldUpdates.Conta } foreach (KeyValuePair e in dvUpdates.binaryDVUpdates) { - DocValuesFieldUpdates updates; - if (!mergingDVUpdates.TryGetValue(e.Key, out updates)) + if (!mergingDVUpdates.TryGetValue(e.Key, out DocValuesFieldUpdates updates)) { mergingDVUpdates[e.Key] = e.Value; } diff --git a/src/Lucene.Net/Index/SegmentCoreReaders.cs b/src/Lucene.Net/Index/SegmentCoreReaders.cs index b4d896d89f..4eacc17099 100644 --- a/src/Lucene.Net/Index/SegmentCoreReaders.cs +++ b/src/Lucene.Net/Index/SegmentCoreReaders.cs @@ -164,10 +164,7 @@ internal NumericDocValues GetNormValues(FieldInfo fi) IDictionary normFields = normsLocal.Value; - object ret; - normFields.TryGetValue(fi.Name, out ret); - var norms = ret as NumericDocValues; - if (norms == null) + if (!normFields.TryGetValue(fi.Name, out object ret) || !(ret is NumericDocValues norms)) { norms = normsProducer.GetNumeric(fi); normFields[fi.Name] = norms; diff --git a/src/Lucene.Net/Index/SegmentDocValues.cs b/src/Lucene.Net/Index/SegmentDocValues.cs index 126274ca76..4a6931e177 100644 --- a/src/Lucene.Net/Index/SegmentDocValues.cs +++ b/src/Lucene.Net/Index/SegmentDocValues.cs @@ -56,7 +56,7 @@ private RefCount NewDocValuesProducer(SegmentCommitInfo si, I private class RefCountHelper : RefCount { private readonly SegmentDocValues outerInstance; - private long? gen; + private readonly long? gen; // LUCENENET: marked readonly public RefCountHelper(SegmentDocValues outerInstance, DocValuesProducer fieldsProducer, long? gen) : base(fieldsProducer) @@ -81,8 +81,7 @@ internal DocValuesProducer GetDocValuesProducer(long? gen, SegmentCommitInfo si, { lock (this) { - RefCount dvp; - if (!(genDVProducers.TryGetValue(gen, out dvp))) + if (!genDVProducers.TryGetValue(gen, out RefCount dvp)) { dvp = NewDocValuesProducer(si, context, dir, dvFormat, gen, infos, termsIndexDivisor); if (Debugging.AssertsEnabled) Debugging.Assert(dvp != null); diff --git a/src/Lucene.Net/Index/SegmentInfo.cs b/src/Lucene.Net/Index/SegmentInfo.cs index 3998629835..f310bb14b0 100644 --- a/src/Lucene.Net/Index/SegmentInfo.cs +++ b/src/Lucene.Net/Index/SegmentInfo.cs @@ -196,7 +196,7 @@ public override string ToString() public string ToString(Directory dir, int delCount) { StringBuilder s = new StringBuilder(); - s.Append(Name).Append('(').Append(version == null ? "?" : version).Append(')').Append(':'); + s.Append(Name).Append('(').Append(version ?? "?").Append(')').Append(':'); char cfs = UseCompoundFile ? 'c' : 'C'; s.Append(cfs); @@ -226,9 +226,8 @@ public override bool Equals(object obj) { return true; } - if (obj is SegmentInfo) + if (obj is SegmentInfo other) { - SegmentInfo other = (SegmentInfo)obj; return other.Dir == Dir && other.Name.Equals(Name, StringComparison.Ordinal); } else @@ -289,7 +288,7 @@ public void AddFile(string file) setFiles.Add(file); } - private void CheckFileNames(ICollection files) + private static void CheckFileNames(ICollection files) // LUCENENET: CA1822: Mark members as static { Regex r = IndexFileNames.CODEC_FILE_PATTERN; foreach (string file in files) @@ -313,8 +312,7 @@ public string GetAttribute(string key) } else { - string attribute; - attributes.TryGetValue(key, out attribute); + attributes.TryGetValue(key, out string attribute); return attribute; } } diff --git a/src/Lucene.Net/Index/SegmentInfos.cs b/src/Lucene.Net/Index/SegmentInfos.cs index 15be276221..bdfc679df2 100644 --- a/src/Lucene.Net/Index/SegmentInfos.cs +++ b/src/Lucene.Net/Index/SegmentInfos.cs @@ -128,14 +128,14 @@ public sealed class SegmentInfos : IEnumerable /// /// The file format version for the segments_N codec header, since 4.8+ - public static readonly int VERSION_48 = 2; + public const int VERSION_48 = 2; // Used for the segments.gen file only! // Whenever you add a new format, make it 1 smaller (negative version logic)! - private static readonly int FORMAT_SEGMENTS_GEN_47 = -2; + private const int FORMAT_SEGMENTS_GEN_47 = -2; - private static readonly int FORMAT_SEGMENTS_GEN_CHECKSUM = -3; - private static readonly int FORMAT_SEGMENTS_GEN_START = FORMAT_SEGMENTS_GEN_47; + private const int FORMAT_SEGMENTS_GEN_CHECKSUM = -3; + private const int FORMAT_SEGMENTS_GEN_START = FORMAT_SEGMENTS_GEN_47; /// /// Current format of segments.gen @@ -799,7 +799,7 @@ public abstract class FindSegmentsFile /// /// Sole constructor. - public FindSegmentsFile(Directory directory) + protected FindSegmentsFile(Directory directory) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.directory = directory; } @@ -826,7 +826,7 @@ public virtual object Run(IndexCommit commit) return DoBody(commit.SegmentsFileName); } - string segmentFileName = null; + string segmentFileName/* = null*/; // LUCENENET: IDE0059: Remove unnecessary value assignment long lastGen = -1; long gen = 0; int genLookaheadCount = 0; @@ -860,12 +860,10 @@ public virtual object Run(IndexCommit commit) // as there is no stale caching on the directory // contents (NOTE: NFS clients often have such stale // caching): - string[] files = null; + string[] files = directory.ListAll(); // LUCENENET: IDE0059: Remove unnecessary value assignment long genA = -1; - files = directory.ListAll(); - if (files != null) { genA = GetLastCommitGeneration(files); @@ -1041,9 +1039,7 @@ public virtual object Run(IndexCommit commit) directory.OpenInput(prevSegmentFileName, IOContext.DEFAULT).Dispose(); prevExists = true; } -#pragma warning disable 168 - catch (IOException ioe) -#pragma warning restore 168 + catch (IOException) // LUCENENET: IDE0059: Remove unnecessary value assignment { prevExists = false; } diff --git a/src/Lucene.Net/Index/SegmentMerger.cs b/src/Lucene.Net/Index/SegmentMerger.cs index 3846f3ac32..34d617ec1b 100644 --- a/src/Lucene.Net/Index/SegmentMerger.cs +++ b/src/Lucene.Net/Index/SegmentMerger.cs @@ -330,9 +330,8 @@ private void SetMatchingSegmentReaders() // required? But... this'd also require exposing // bulk-copy (TVs and stored fields) API in foreign // readers.. - if (reader is SegmentReader) + if (reader is SegmentReader segmentReader) { - SegmentReader segmentReader = (SegmentReader)reader; bool same = true; FieldInfos segmentFieldInfos = segmentReader.FieldInfos; foreach (FieldInfo fi in segmentFieldInfos) diff --git a/src/Lucene.Net/Index/SegmentReader.cs b/src/Lucene.Net/Index/SegmentReader.cs index 86e8cb1e54..20c3f920c8 100644 --- a/src/Lucene.Net/Index/SegmentReader.cs +++ b/src/Lucene.Net/Index/SegmentReader.cs @@ -185,7 +185,7 @@ internal SegmentReader(SegmentCommitInfo si, SegmentReader sr, IBits liveDocs, i // initialize the per-field DocValuesProducer private void InitDocValuesProducers(Codec codec) { - Directory dir = core.cfsReader != null ? core.cfsReader : si.Info.Dir; + Directory dir = core.cfsReader ?? si.Info.Dir; DocValuesFormat dvFormat = codec.DocValuesFormat; IDictionary> genInfos = GetGenInfos(); @@ -255,9 +255,7 @@ internal static FieldInfos ReadFieldInfos(SegmentCommitInfo info) continue; } long gen = fi.DocValuesGen; - IList infos; - genInfos.TryGetValue(gen, out infos); - if (infos == null) + if (!genInfos.TryGetValue(gen, out IList infos) || infos == null) { infos = new List(); genInfos[gen] = infos; @@ -456,14 +454,9 @@ public override NumericDocValues GetNumericDocValues(string field) IDictionary dvFields = docValuesLocal.Value; - NumericDocValues dvs; - object dvsDummy; - dvFields.TryGetValue(field, out dvsDummy); - dvs = (NumericDocValues)dvsDummy; - if (dvs == null) + if (!dvFields.TryGetValue(field, out object dvsDummy) || !(dvsDummy is NumericDocValues dvs)) { - DocValuesProducer dvProducer; - dvProducersByField.TryGetValue(field, out dvProducer); + dvProducersByField.TryGetValue(field, out DocValuesProducer dvProducer); if (Debugging.AssertsEnabled) Debugging.Assert(dvProducer != null); dvs = dvProducer.GetNumeric(fi); dvFields[field] = dvs; @@ -489,11 +482,9 @@ public override IBits GetDocsWithField(string field) IDictionary dvFields = docsWithFieldLocal.Value; - dvFields.TryGetValue(field, out IBits dvs); - if (dvs == null) + if (!dvFields.TryGetValue(field, out IBits dvs) || dvs == null) { - DocValuesProducer dvProducer; - dvProducersByField.TryGetValue(field, out dvProducer); + dvProducersByField.TryGetValue(field, out DocValuesProducer dvProducer); if (Debugging.AssertsEnabled) Debugging.Assert(dvProducer != null); dvs = dvProducer.GetDocsWithField(fi); dvFields[field] = dvs; @@ -513,11 +504,7 @@ public override BinaryDocValues GetBinaryDocValues(string field) IDictionary dvFields = docValuesLocal.Value; - object ret; - BinaryDocValues dvs; - dvFields.TryGetValue(field, out ret); - dvs = (BinaryDocValues)ret; - if (dvs == null) + if (!dvFields.TryGetValue(field, out object ret) || !(ret is BinaryDocValues dvs)) { dvProducersByField.TryGetValue(field, out DocValuesProducer dvProducer); if (Debugging.AssertsEnabled) Debugging.Assert(dvProducer != null); @@ -539,11 +526,7 @@ public override SortedDocValues GetSortedDocValues(string field) IDictionary dvFields = docValuesLocal.Value; - SortedDocValues dvs; - object ret; - dvFields.TryGetValue(field, out ret); - dvs = (SortedDocValues)ret; - if (dvs == null) + if (!dvFields.TryGetValue(field, out object ret) || !(ret is SortedDocValues dvs)) { dvProducersByField.TryGetValue(field, out DocValuesProducer dvProducer); if (Debugging.AssertsEnabled) Debugging.Assert(dvProducer != null); @@ -565,11 +548,7 @@ public override SortedSetDocValues GetSortedSetDocValues(string field) IDictionary dvFields = docValuesLocal.Value; - object ret; - SortedSetDocValues dvs; - dvFields.TryGetValue(field, out ret); - dvs = (SortedSetDocValues)ret; - if (dvs == null) + if (!dvFields.TryGetValue(field, out object ret) || !(ret is SortedSetDocValues dvs)) { dvProducersByField.TryGetValue(field, out DocValuesProducer dvProducer); if (Debugging.AssertsEnabled) Debugging.Assert(dvProducer != null); diff --git a/src/Lucene.Net/Index/SlowCompositeReaderWrapper.cs b/src/Lucene.Net/Index/SlowCompositeReaderWrapper.cs index d720c27a9c..887030c654 100644 --- a/src/Lucene.Net/Index/SlowCompositeReaderWrapper.cs +++ b/src/Lucene.Net/Index/SlowCompositeReaderWrapper.cs @@ -53,8 +53,7 @@ public sealed class SlowCompositeReaderWrapper : AtomicReader /// public static AtomicReader Wrap(IndexReader reader) { - CompositeReader compositeReader = reader as CompositeReader; - if (compositeReader != null) + if (reader is CompositeReader compositeReader) { return new SlowCompositeReaderWrapper(compositeReader); } @@ -116,8 +115,7 @@ public override SortedDocValues GetSortedDocValues(string field) { // uncached, or not a multi dv SortedDocValues dv = MultiDocValues.GetSortedValues(@in, field); - MultiSortedDocValues docValues = dv as MultiSortedDocValues; - if (docValues != null) + if (dv is MultiSortedDocValues docValues) { map = docValues.Mapping; if (map.owner == CoreCacheKey) @@ -157,8 +155,7 @@ public override SortedSetDocValues GetSortedSetDocValues(string field) { // uncached, or not a multi dv SortedSetDocValues dv = MultiDocValues.GetSortedSetValues(@in, field); - MultiSortedSetDocValues docValues = dv as MultiSortedSetDocValues; - if (docValues != null) + if (dv is MultiSortedSetDocValues docValues) { map = docValues.Mapping; if (map.owner == CoreCacheKey) diff --git a/src/Lucene.Net/Index/SnapshotDeletionPolicy.cs b/src/Lucene.Net/Index/SnapshotDeletionPolicy.cs index dc8ea9be08..1ef2218f30 100644 --- a/src/Lucene.Net/Index/SnapshotDeletionPolicy.cs +++ b/src/Lucene.Net/Index/SnapshotDeletionPolicy.cs @@ -150,16 +150,15 @@ protected internal virtual void IncRef(IndexCommit ic) lock (this) { long gen = ic.Generation; - int refCount; int refCountInt; - if (!m_refCounts.TryGetValue(gen, out refCount)) + if (!m_refCounts.TryGetValue(gen, out int refCount)) { m_indexCommits[gen] = m_lastCommit; refCountInt = 0; } else { - refCountInt = (int)refCount; + refCountInt = refCount; } m_refCounts[gen] = refCountInt + 1; } diff --git a/src/Lucene.Net/Index/SortedDocValuesWriter.cs b/src/Lucene.Net/Index/SortedDocValuesWriter.cs index 54842edea3..57c4cbd016 100644 --- a/src/Lucene.Net/Index/SortedDocValuesWriter.cs +++ b/src/Lucene.Net/Index/SortedDocValuesWriter.cs @@ -32,7 +32,7 @@ namespace Lucene.Net.Index internal class SortedDocValuesWriter : DocValuesWriter { internal readonly BytesRefHash hash; - private AppendingDeltaPackedInt64Buffer pending; + private readonly AppendingDeltaPackedInt64Buffer pending; // LUCENENET: marked readonly private readonly Counter iwBytesUsed; private long bytesUsed; // this currently only tracks differences in 'pending' private readonly FieldInfo fieldInfo; diff --git a/src/Lucene.Net/Index/SortedSetDocValuesWriter.cs b/src/Lucene.Net/Index/SortedSetDocValuesWriter.cs index e9af287f21..fc2484eeaa 100644 --- a/src/Lucene.Net/Index/SortedSetDocValuesWriter.cs +++ b/src/Lucene.Net/Index/SortedSetDocValuesWriter.cs @@ -41,8 +41,8 @@ namespace Lucene.Net.Index internal class SortedSetDocValuesWriter : DocValuesWriter { internal readonly BytesRefHash hash; - private AppendingPackedInt64Buffer pending; // stream of all termIDs - private AppendingDeltaPackedInt64Buffer pendingCounts; // termIDs per doc + private readonly AppendingPackedInt64Buffer pending; // stream of all termIDs // LUCENENET: marked readonly + private readonly AppendingDeltaPackedInt64Buffer pendingCounts; // termIDs per doc // LUCENENET: marked readonly private readonly Counter iwBytesUsed; private long bytesUsed; // this only tracks differences in 'pending' and 'pendingCounts' private readonly FieldInfo fieldInfo; diff --git a/src/Lucene.Net/Index/StandardDirectoryReader.cs b/src/Lucene.Net/Index/StandardDirectoryReader.cs index ed0ee8be37..9c022dcec0 100644 --- a/src/Lucene.Net/Index/StandardDirectoryReader.cs +++ b/src/Lucene.Net/Index/StandardDirectoryReader.cs @@ -156,9 +156,7 @@ internal static DirectoryReader Open(IndexWriter writer, SegmentInfos infos, boo { r.DecRef(); } -#pragma warning disable 168 - catch (Exception th) -#pragma warning restore 168 + catch (Exception) // LUCENENET: IDE0059: Remove unnecessary value assignment { // ignore any exception that is thrown here to not mask any original // exception. @@ -195,9 +193,7 @@ private static DirectoryReader Open(Directory directory, SegmentInfos infos, ILi for (int i = infos.Count - 1; i >= 0; i--) { // find SegmentReader for this segment - int? oldReaderIndex; - segmentReaders.TryGetValue(infos.Info(i).Info.Name, out oldReaderIndex); - if (oldReaderIndex == null) + if (!segmentReaders.TryGetValue(infos.Info(i).Info.Name, out int? oldReaderIndex) || oldReaderIndex == null) { // this is a new segment, no old SegmentReader can be reused newReaders[i] = null; @@ -487,9 +483,7 @@ protected internal override void DoClose() { writer.DecRefDeleter(segmentInfos); } -#pragma warning disable 168 - catch (ObjectDisposedException ex) -#pragma warning restore 168 + catch (ObjectDisposedException) // LUCENENET: IDE0059: Remove unnecessary value assignment { // this is OK, it just means our original writer was // closed before we were, and this may leave some diff --git a/src/Lucene.Net/Index/TermVectorsConsumerPerField.cs b/src/Lucene.Net/Index/TermVectorsConsumerPerField.cs index f7875353e1..754f39b892 100644 --- a/src/Lucene.Net/Index/TermVectorsConsumerPerField.cs +++ b/src/Lucene.Net/Index/TermVectorsConsumerPerField.cs @@ -227,7 +227,7 @@ internal void FinishDocument() internal void ShrinkHash() { - termsHashPerField.ShrinkHash(maxNumPostings); + termsHashPerField.ShrinkHash(/* maxNumPostings // LUCENENET: Not used */); maxNumPostings = 0; } diff --git a/src/Lucene.Net/Index/Terms.cs b/src/Lucene.Net/Index/Terms.cs index e2be239ac1..b016ff2acb 100644 --- a/src/Lucene.Net/Index/Terms.cs +++ b/src/Lucene.Net/Index/Terms.cs @@ -93,20 +93,17 @@ public virtual TermsEnum Intersect(CompiledAutomaton compiled, BytesRef startTer } else { - return new AutomatonTermsEnumAnonymousInnerClassHelper(this, GetEnumerator(), compiled, startTerm); + return new AutomatonTermsEnumAnonymousInnerClassHelper(GetEnumerator(), compiled, startTerm); } } private class AutomatonTermsEnumAnonymousInnerClassHelper : AutomatonTermsEnum { - private readonly Terms outerInstance; + private readonly BytesRef startTerm; - private BytesRef startTerm; - - public AutomatonTermsEnumAnonymousInnerClassHelper(Terms outerInstance, Lucene.Net.Index.TermsEnum iterator, CompiledAutomaton compiled, BytesRef startTerm) + public AutomatonTermsEnumAnonymousInnerClassHelper(TermsEnum iterator, CompiledAutomaton compiled, BytesRef startTerm) : base(iterator, compiled) { - this.outerInstance = outerInstance; this.startTerm = startTerm; } diff --git a/src/Lucene.Net/Index/TermsEnum.cs b/src/Lucene.Net/Index/TermsEnum.cs index f0b6de4e82..8937433804 100644 --- a/src/Lucene.Net/Index/TermsEnum.cs +++ b/src/Lucene.Net/Index/TermsEnum.cs @@ -268,18 +268,11 @@ public DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPositionsEnu /// public virtual TermState GetTermState() // LUCENENET NOTE: Renamed from TermState() { - return new TermStateAnonymousInnerClassHelper(this); + return new TermStateAnonymousInnerClassHelper(); } private class TermStateAnonymousInnerClassHelper : TermState { - private readonly TermsEnum outerInstance; - - public TermStateAnonymousInnerClassHelper(TermsEnum outerInstance) - { - this.outerInstance = outerInstance; - } - public override void CopyFrom(TermState other) { throw new NotSupportedException(); diff --git a/src/Lucene.Net/Index/TermsHashPerField.cs b/src/Lucene.Net/Index/TermsHashPerField.cs index fd2ba6cbb6..5d7429c208 100644 --- a/src/Lucene.Net/Index/TermsHashPerField.cs +++ b/src/Lucene.Net/Index/TermsHashPerField.cs @@ -84,7 +84,7 @@ public TermsHashPerField(DocInverterPerField docInverterPerField, TermsHash term } } - internal void ShrinkHash(int targetSize) + internal void ShrinkHash(/* int targetSize // LUCENENET: Not referenced */) { // Fully free the bytesHash on each flush but keep the pool untouched // bytesHash.clear will clear the ByteStartArray and in turn the ParallelPostingsArray too diff --git a/src/Lucene.Net/Index/TieredMergePolicy.cs b/src/Lucene.Net/Index/TieredMergePolicy.cs index c086a3fc7d..5df6a74364 100644 --- a/src/Lucene.Net/Index/TieredMergePolicy.cs +++ b/src/Lucene.Net/Index/TieredMergePolicy.cs @@ -545,20 +545,17 @@ protected virtual MergeScore Score(IList candidate, bool hitT double finalMergeScore = mergeScore; - return new MergeScoreAnonymousInnerClassHelper(this, skew, nonDelRatio, finalMergeScore); + return new MergeScoreAnonymousInnerClassHelper(skew, nonDelRatio, finalMergeScore); } private class MergeScoreAnonymousInnerClassHelper : MergeScore { - private readonly TieredMergePolicy outerInstance; - - private double skew; - private double nonDelRatio; - private double finalMergeScore; + private readonly double skew; + private readonly double nonDelRatio; + private readonly double finalMergeScore; - public MergeScoreAnonymousInnerClassHelper(TieredMergePolicy outerInstance, double skew, double nonDelRatio, double finalMergeScore) + public MergeScoreAnonymousInnerClassHelper(double skew, double nonDelRatio, double finalMergeScore) { - this.outerInstance = outerInstance; this.skew = skew; this.nonDelRatio = nonDelRatio; this.finalMergeScore = finalMergeScore; @@ -584,8 +581,7 @@ public override MergeSpecification FindForcedMerges(SegmentInfos infos, int maxS bool? segmentIsOriginal = false; foreach (SegmentCommitInfo info in infos.Segments) { - bool? isOriginal; - if (segmentsToMerge.TryGetValue(info, out isOriginal)) + if (segmentsToMerge.TryGetValue(info, out bool? isOriginal)) { segmentIsOriginal = isOriginal; if (!merging.Contains(info)) diff --git a/src/Lucene.Net/Index/TwoPhaseCommitTool.cs b/src/Lucene.Net/Index/TwoPhaseCommitTool.cs index f20aa47442..25f0fb3971 100644 --- a/src/Lucene.Net/Index/TwoPhaseCommitTool.cs +++ b/src/Lucene.Net/Index/TwoPhaseCommitTool.cs @@ -126,10 +126,9 @@ private static void Rollback(params ITwoPhaseCommit[] objects) { tpc.Rollback(); } -#pragma warning disable 168 - catch (Exception t) -#pragma warning restore 168 + catch (Exception) // LUCENENET: IDE0059: Remove unnecessary value assignment { + // ignore } } } diff --git a/src/Lucene.Net/Search/BooleanScorer.cs b/src/Lucene.Net/Search/BooleanScorer.cs index 563ae3551e..d1b997a61c 100644 --- a/src/Lucene.Net/Search/BooleanScorer.cs +++ b/src/Lucene.Net/Search/BooleanScorer.cs @@ -62,8 +62,8 @@ internal sealed class BooleanScorer : BulkScorer { private sealed class BooleanScorerCollector : ICollector { - private BucketTable bucketTable; - private int mask; + private readonly BucketTable bucketTable; // LUCENENET: marked readonly + private readonly int mask; // LUCENENET: marked readonly private Scorer scorer; public BooleanScorerCollector(int mask, BucketTable bucketTable) @@ -133,8 +133,8 @@ public Bucket() /// A simple hash table of document scores within a range. internal sealed class BucketTable { - public static readonly int SIZE = 1 << 11; - public static readonly int MASK = SIZE - 1; + public const int SIZE = 1 << 11; + public const int MASK = SIZE - 1; internal readonly Bucket[] buckets = new Bucket[SIZE]; internal Bucket first = null; // head of valid list @@ -154,7 +154,7 @@ public ICollector NewCollector(int mask) return new BooleanScorerCollector(mask, this); } - public int Count => SIZE; // LUCENENET NOTE: This was size() in Lucene. + public static int Count => SIZE; // LUCENENET NOTE: This was size() in Lucene. // LUCENENET: CA1822: Mark members as static } internal sealed class SubScorer @@ -185,8 +185,8 @@ public SubScorer(BulkScorer scorer, bool required, bool prohibited, ICollector c } } - private SubScorer scorers = null; - private BucketTable bucketTable = new BucketTable(); + private readonly SubScorer scorers = null; // LUCENENET: marked readonly + private readonly BucketTable bucketTable = new BucketTable(); // LUCENENET: marked readonly private readonly float[] coordFactors; // TODO: re-enable this if BQ ever sends us required clauses @@ -199,12 +199,12 @@ public SubScorer(BulkScorer scorer, bool required, bool prohibited, ICollector c // Any time a prohibited clause matches we set bit 0: private const int PROHIBITED_MASK = 1; - private readonly Weight weight; + //private readonly Weight weight; // LUCENENET: Never read internal BooleanScorer(BooleanWeight weight, bool disableCoord, int minNrShouldMatch, IList optionalScorers, IList prohibitedScorers, int maxCoord) { this.minNrShouldMatch = minNrShouldMatch; - this.weight = weight; + //this.weight = weight; // LUCENENET: Never read foreach (BulkScorer scorer in optionalScorers) { diff --git a/src/Lucene.Net/Search/BooleanScorer2.cs b/src/Lucene.Net/Search/BooleanScorer2.cs index 3daf92ecbe..b5499114be 100644 --- a/src/Lucene.Net/Search/BooleanScorer2.cs +++ b/src/Lucene.Net/Search/BooleanScorer2.cs @@ -40,13 +40,10 @@ internal class BooleanScorer2 : Scorer private class Coordinator { - private readonly BooleanScorer2 outerInstance; - internal readonly float[] coordFactors; internal Coordinator(BooleanScorer2 outerInstance, int maxCoord, bool disableCoord) { - this.outerInstance = outerInstance; coordFactors = new float[outerInstance.optionalScorers.Count + outerInstance.requiredScorers.Count + 1]; for (int i = 0; i < coordFactors.Length; i++) { @@ -107,7 +104,7 @@ public BooleanScorer2(BooleanWeight weight, bool disableCoord, int minNrShouldMa prohibitedScorers = prohibited; coordinator = new Coordinator(this, maxCoord, disableCoord); - countingSumScorer = MakeCountingSumScorer(disableCoord); + countingSumScorer = MakeCountingSumScorer(/* disableCoord // LUCENENET: Not referenced */); } /// @@ -183,7 +180,7 @@ private class MinShouldMatchSumScorerAnonymousInnerClassHelper : MinShouldMatchS { private readonly BooleanScorer2 outerInstance; - public MinShouldMatchSumScorerAnonymousInnerClassHelper(BooleanScorer2 outerInstance, Lucene.Net.Search.Weight weight, IList scorers, int minNrShouldMatch) + public MinShouldMatchSumScorerAnonymousInnerClassHelper(BooleanScorer2 outerInstance, Weight weight, IList scorers, int minNrShouldMatch) : base(weight, scorers, minNrShouldMatch) { this.outerInstance = outerInstance; @@ -213,7 +210,7 @@ public override float GetScore() } } - private Scorer CountingConjunctionSumScorer(bool disableCoord, IList requiredScorers) + private Scorer CountingConjunctionSumScorer(/* bool disableCoord, // LUCENENET: Not Referenced */ IList requiredScorers) { // each scorer from the list counted as a single matcher int requiredNrMatchers = requiredScorers.Count; @@ -224,7 +221,7 @@ private class ConjunctionScorerAnonymousInnerClassHelper : ConjunctionScorer { private readonly BooleanScorer2 outerInstance; - private int requiredNrMatchers; + private readonly int requiredNrMatchers; public ConjunctionScorerAnonymousInnerClassHelper(BooleanScorer2 outerInstance, Weight weight, Scorer[] scorers, int requiredNrMatchers) : base(weight, scorers) @@ -261,7 +258,7 @@ public override float GetScore() } } - private Scorer DualConjunctionSumScorer(bool disableCoord, Scorer req1, Scorer req2) // non counting. + private Scorer DualConjunctionSumScorer(/* bool disableCoord, // LUCENENET: Not Referenced */ Scorer req1, Scorer req2) // non counting. { return new ConjunctionScorer(m_weight, new Scorer[] { req1, req2 }); // All scorers match, so defaultSimilarity always has 1 as @@ -274,14 +271,14 @@ private Scorer DualConjunctionSumScorer(bool disableCoord, Scorer req1, Scorer r /// Returns the scorer to be used for match counting and score summing. /// Uses requiredScorers, optionalScorers and prohibitedScorers. /// - private Scorer MakeCountingSumScorer(bool disableCoord) // each scorer counted as a single matcher + private Scorer MakeCountingSumScorer(/* bool disableCoord // LUCENENET: Not Referenced */) // each scorer counted as a single matcher { return (requiredScorers.Count == 0) - ? MakeCountingSumScorerNoReq(disableCoord) - : MakeCountingSumScorerSomeReq(disableCoord); + ? MakeCountingSumScorerNoReq(/* disableCoord // LUCENENET: Not Referenced */) + : MakeCountingSumScorerSomeReq(/* disableCoord // LUCENENET: Not Referenced */); } - private Scorer MakeCountingSumScorerNoReq(bool disableCoord) // No required scorers + private Scorer MakeCountingSumScorerNoReq(/* bool disableCoord // LUCENENET: Not Referenced */) // No required scorers { // minNrShouldMatch optional scorers are required, but at least 1 int nrOptRequired = (minNrShouldMatch < 1) ? 1 : minNrShouldMatch; @@ -296,25 +293,25 @@ private Scorer MakeCountingSumScorerNoReq(bool disableCoord) // No required scor } else { - requiredCountingSumScorer = CountingConjunctionSumScorer(disableCoord, optionalScorers); + requiredCountingSumScorer = CountingConjunctionSumScorer(/* disableCoord, // LUCENENET: Not Referenced */ optionalScorers); } return AddProhibitedScorers(requiredCountingSumScorer); } - private Scorer MakeCountingSumScorerSomeReq(bool disableCoord) // At least one required scorer. + private Scorer MakeCountingSumScorerSomeReq(/* bool disableCoord // LUCENENET: Not Referenced */) // At least one required scorer. { if (optionalScorers.Count == minNrShouldMatch) // all optional scorers also required. { List allReq = new List(requiredScorers); allReq.AddRange(optionalScorers); - return AddProhibitedScorers(CountingConjunctionSumScorer(disableCoord, allReq)); + return AddProhibitedScorers(CountingConjunctionSumScorer(/* disableCoord, // LUCENENET: Not Referenced */ allReq)); } // optionalScorers.size() > minNrShouldMatch, and at least one required scorer else { - Scorer requiredCountingSumScorer = requiredScorers.Count == 1 ? new SingleMatchScorer(this, requiredScorers[0]) : CountingConjunctionSumScorer(disableCoord, requiredScorers); + Scorer requiredCountingSumScorer = requiredScorers.Count == 1 ? new SingleMatchScorer(this, requiredScorers[0]) : CountingConjunctionSumScorer(/* disableCoord, // LUCENENET: Not Referenced */ requiredScorers); if (minNrShouldMatch > 0) // use a required disjunction scorer over the optional scorers { - return AddProhibitedScorers(DualConjunctionSumScorer(disableCoord, requiredCountingSumScorer, CountingDisjunctionSumScorer(optionalScorers, minNrShouldMatch))); // non counting + return AddProhibitedScorers(DualConjunctionSumScorer(/* disableCoord, // LUCENENET: Not Referenced */ requiredCountingSumScorer, CountingDisjunctionSumScorer(optionalScorers, minNrShouldMatch))); // non counting } // minNrShouldMatch == 0 else { diff --git a/src/Lucene.Net/Search/CachingCollector.cs b/src/Lucene.Net/Search/CachingCollector.cs index d46ce9f821..0c74d23542 100644 --- a/src/Lucene.Net/Search/CachingCollector.cs +++ b/src/Lucene.Net/Search/CachingCollector.cs @@ -380,7 +380,7 @@ public static CachingCollector Create(bool acceptDocsOutOfOrder, bool cacheScore private class CollectorAnonymousInnerClassHelper : ICollector { - private bool acceptDocsOutOfOrder; + private readonly bool acceptDocsOutOfOrder; public CollectorAnonymousInnerClassHelper(bool acceptDocsOutOfOrder) { diff --git a/src/Lucene.Net/Search/CachingWrapperFilter.cs b/src/Lucene.Net/Search/CachingWrapperFilter.cs index daf2a0d344..9e990a7836 100644 --- a/src/Lucene.Net/Search/CachingWrapperFilter.cs +++ b/src/Lucene.Net/Search/CachingWrapperFilter.cs @@ -138,11 +138,8 @@ public override string ToString() public override bool Equals(object o) { - var other = o as CachingWrapperFilter; - if (other == null) - { - return false; - } + if (o is null) return false; + if (!(o is CachingWrapperFilter other)) return false; return _filter.Equals(other._filter); } diff --git a/src/Lucene.Net/Search/ConstantScoreAutoRewrite.cs b/src/Lucene.Net/Search/ConstantScoreAutoRewrite.cs index b263eeafd7..15a3457b2b 100644 --- a/src/Lucene.Net/Search/ConstantScoreAutoRewrite.cs +++ b/src/Lucene.Net/Search/ConstantScoreAutoRewrite.cs @@ -135,14 +135,9 @@ public override Query Rewrite(IndexReader reader, MultiTermQuery query) internal sealed class CutOffTermCollector : TermCollector { - private void InitializeInstanceFields() - { - pendingTerms = new BytesRefHash(new ByteBlockPool(new ByteBlockPool.DirectAllocator()), 16, array); - } - internal CutOffTermCollector(int docCountCutoff, int termCountLimit) { - InitializeInstanceFields(); + pendingTerms = new BytesRefHash(new ByteBlockPool(new ByteBlockPool.DirectAllocator()), 16, array); this.docCountCutoff = docCountCutoff; this.termCountLimit = termCountLimit; } diff --git a/src/Lucene.Net/Search/ConstantScoreQuery.cs b/src/Lucene.Net/Search/ConstantScoreQuery.cs index 0bbaa4c7f2..f97a7225b1 100644 --- a/src/Lucene.Net/Search/ConstantScoreQuery.cs +++ b/src/Lucene.Net/Search/ConstantScoreQuery.cs @@ -88,9 +88,8 @@ public override Query Rewrite(IndexReader reader) // Fix outdated usage pattern from Lucene 2.x/early-3.x: // because ConstantScoreQuery only accepted filters, // QueryWrapperFilter was used to wrap queries. - if (m_filter is QueryWrapperFilter) + if (m_filter is QueryWrapperFilter qwf) { - QueryWrapperFilter qwf = (QueryWrapperFilter)m_filter; Query rewritten = new ConstantScoreQuery(qwf.Query.Rewrite(reader)); rewritten.Boost = this.Boost; return rewritten; @@ -122,7 +121,7 @@ protected class ConstantWeight : Weight public ConstantWeight(ConstantScoreQuery outerInstance, IndexSearcher searcher) { this.outerInstance = outerInstance; - this.innerWeight = (outerInstance.m_query == null) ? null : outerInstance.m_query.CreateWeight(searcher); + this.innerWeight = outerInstance.m_query?.CreateWeight(searcher); } public override Query Query => outerInstance; @@ -257,9 +256,9 @@ private class CollectorAnonymousInnerClassHelper : ICollector { private readonly ConstantBulkScorer outerInstance; - private ICollector collector; + private readonly ICollector collector; - public CollectorAnonymousInnerClassHelper(ConstantBulkScorer outerInstance, Lucene.Net.Search.ICollector collector) + public CollectorAnonymousInnerClassHelper(ConstantBulkScorer outerInstance, ICollector collector) { this.outerInstance = outerInstance; this.collector = collector; @@ -359,9 +358,8 @@ public override bool Equals(object o) { return false; } - if (o is ConstantScoreQuery) + if (o is ConstantScoreQuery other) { - ConstantScoreQuery other = (ConstantScoreQuery)o; return ((this.m_filter == null) ? other.m_filter == null : this.m_filter.Equals(other.m_filter)) && ((this.m_query == null) ? other.m_query == null : this.m_query.Equals(other.m_query)); } return false; @@ -369,7 +367,7 @@ public override bool Equals(object o) public override int GetHashCode() { - return 31 * base.GetHashCode() + ((m_query == null) ? (object)m_filter : m_query).GetHashCode(); + return 31 * base.GetHashCode() + (m_query ?? (object)m_filter).GetHashCode(); } } } \ No newline at end of file diff --git a/src/Lucene.Net/Search/ControlledRealTimeReopenThread.cs b/src/Lucene.Net/Search/ControlledRealTimeReopenThread.cs index 40dbd10509..bdbd2c50fc 100644 --- a/src/Lucene.Net/Search/ControlledRealTimeReopenThread.cs +++ b/src/Lucene.Net/Search/ControlledRealTimeReopenThread.cs @@ -52,8 +52,8 @@ public class ControlledRealTimeReopenThread : ThreadJob, IDisposable private long searchingGen; private long refreshStartGen; - private EventWaitHandle reopenCond = new AutoResetEvent(false); - private EventWaitHandle available = new AutoResetEvent(false); + private readonly EventWaitHandle reopenCond = new AutoResetEvent(false); // LUCENENET: marked readonly + private readonly EventWaitHandle available = new AutoResetEvent(false); // LUCENENET: marked readonly /// /// Create , to periodically @@ -113,25 +113,45 @@ private void RefreshDone() reopenCond.Reset(); } + /// + /// Releases all resources used by the . + /// public void Dispose() { - finish = true; - reopenCond.Set(); -//#if FEATURE_THREAD_INTERRUPT -// try -// { -//#endif + Dispose(true); + GC.SuppressFinalize(this); + } + + /// + /// Releases resources used by the and + /// if overridden in a derived class, optionally releases unmanaged resources. + /// + /// true to release both managed and unmanaged resources; + /// false to release only unmanaged resources. + + // LUCENENET specific - implemented proper dispose pattern + protected virtual void Dispose(bool disposing) + { + if (disposing) + { + finish = true; + reopenCond.Set(); + //#if FEATURE_THREAD_INTERRUPT + // try + // { + //#endif Join(); -//#if FEATURE_THREAD_INTERRUPT // LUCENENET NOTE: Senseless to catch and rethrow the same exception type -// } -// catch (ThreadInterruptedException ie) -// { -// throw new ThreadInterruptedException(ie.ToString(), ie); -// } -//#endif - // LUCENENET specific: dispose reset event - reopenCond.Dispose(); - available.Dispose(); + //#if FEATURE_THREAD_INTERRUPT // LUCENENET NOTE: Senseless to catch and rethrow the same exception type + // } + // catch (ThreadInterruptedException ie) + // { + // throw new ThreadInterruptedException(ie.ToString(), ie); + // } + //#endif + // LUCENENET specific: dispose reset event + reopenCond.Dispose(); + available.Dispose(); + } } /// diff --git a/src/Lucene.Net/Search/FieldCache.cs b/src/Lucene.Net/Search/FieldCache.cs index fbd3737fb3..365718c338 100644 --- a/src/Lucene.Net/Search/FieldCache.cs +++ b/src/Lucene.Net/Search/FieldCache.cs @@ -1125,11 +1125,8 @@ public AcceptableOverheadRatio(float value) public override bool Equals(object obj) { - if (obj is AcceptableOverheadRatio) + if (obj is AcceptableOverheadRatio other) { -#pragma warning disable IDE0020 // Use pattern matching - AcceptableOverheadRatio other = (AcceptableOverheadRatio)obj; -#pragma warning restore IDE0020 // Use pattern matching return Value.Equals(other.Value); } return false; diff --git a/src/Lucene.Net/Search/FieldCacheImpl.cs b/src/Lucene.Net/Search/FieldCacheImpl.cs index a82214f9e9..569cdec928 100644 --- a/src/Lucene.Net/Search/FieldCacheImpl.cs +++ b/src/Lucene.Net/Search/FieldCacheImpl.cs @@ -204,22 +204,18 @@ public void OnClose(IndexReader owner) private void InitReader(AtomicReader reader) { -#pragma warning disable IDE0038 // Use pattern matching - if (reader is SegmentReader) -#pragma warning restore IDE0038 // Use pattern matching + if (reader is SegmentReader segmentReader) { - ((SegmentReader)reader).AddCoreDisposedListener(purgeCore); + segmentReader.AddCoreDisposedListener(purgeCore); } else { // we have a slow reader of some sort, try to register a purge event // rather than relying on gc: object key = reader.CoreCacheKey; -#pragma warning disable IDE0038 // Use pattern matching - if (key is AtomicReader) -#pragma warning restore IDE0038 // Use pattern matching + if (key is AtomicReader atomicReader) { - ((AtomicReader)key).AddReaderClosedListener(purgeReader); + atomicReader.AddReaderClosedListener(purgeReader); } else { @@ -326,13 +322,10 @@ public virtual TValue Get(AtomicReader reader, TKey key, bool setDocsWithField) } #endif object value = innerCache.GetOrAdd(key, (cacheKey) => new FieldCache.CreationPlaceholder()); -#pragma warning disable IDE0038 // Use pattern matching - if (value is FieldCache.CreationPlaceholder) -#pragma warning restore IDE0038 // Use pattern matching + if (value is FieldCache.CreationPlaceholder progress) { lock (value) { - var progress = (FieldCache.CreationPlaceholder)value; if (progress.Value is null) { progress.Value = CreateValue(reader, key, setDocsWithField); @@ -401,21 +394,15 @@ internal CacheKey(string field) /// Two of these are equal if they reference the same field and type. public override bool Equals(object o) { - if (o is CacheKey) + if (o is CacheKey other && other.field.Equals(field, StringComparison.Ordinal)) { -#pragma warning disable IDE0020 // Use pattern matching - CacheKey other = (CacheKey)o; -#pragma warning restore IDE0020 // Use pattern matching - if (other.field.Equals(field, StringComparison.Ordinal)) + if (other.Custom is null) { - if (other.Custom is null) - { - return Custom is null; - } - else if (other.Custom.Equals(Custom)) - { - return true; - } + return Custom is null; + } + else if (other.Custom.Equals(Custom)) + { + return true; } } return false; @@ -452,11 +439,8 @@ internal CacheKey(string field, TCustom custom) /// Two of these are equal if they reference the same field and type. public override bool Equals(object o) { - if (o is CacheKey) + if (o is CacheKey other) { -#pragma warning disable IDE0020 // Use pattern matching - CacheKey other = (CacheKey)o; -#pragma warning restore IDE0020 // Use pattern matching if (other.field.Equals(field, StringComparison.Ordinal)) { if (other.custom is null) @@ -548,11 +532,9 @@ internal virtual void SetDocsWithField(AtomicReader reader, string field, IBits { bits = new Lucene.Net.Util.Bits.MatchNoBits(maxDoc); } -#pragma warning disable IDE0038 // Use pattern matching - else if (docsWithField is FixedBitSet) -#pragma warning restore IDE0038 // Use pattern matching + else if (docsWithField is FixedBitSet fixedBitSet) { - int numSet = ((FixedBitSet)docsWithField).Cardinality(); + int numSet = fixedBitSet.Cardinality(); if (numSet >= maxDoc) { // The cardinality of the BitSet is maxDoc if all documents have a value. diff --git a/src/Lucene.Net/Search/FieldComparator.cs b/src/Lucene.Net/Search/FieldComparator.cs index c633d1674e..140e3042b7 100644 --- a/src/Lucene.Net/Search/FieldComparator.cs +++ b/src/Lucene.Net/Search/FieldComparator.cs @@ -182,11 +182,11 @@ public abstract class FieldComparer : FieldComparer /// public virtual int CompareValues(T first, T second) { - if (object.ReferenceEquals(first, default(T))) + if (object.ReferenceEquals(first, default)) { - return object.ReferenceEquals(second, default(T)) ? 0 : -1; + return object.ReferenceEquals(second, default) ? 0 : -1; } - else if (object.ReferenceEquals(second, default(T))) + else if (object.ReferenceEquals(second, default)) { return 1; } @@ -330,7 +330,7 @@ public abstract class NumericComparer : FieldComparer protected readonly string m_field; protected IBits m_docsWithField; - public NumericComparer(string field, T? missingValue) + protected NumericComparer(string field, T? missingValue) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.m_field = field; this.m_missingValue = missingValue; @@ -1413,7 +1413,7 @@ public sealed class TermValComparer : FieldComparer private static readonly byte[] NON_MISSING_BYTES = Arrays.Empty(); - private BytesRef[] values; + private readonly BytesRef[] values; // LUCENENET: marked readonly private BinaryDocValues docTerms; private IBits docsWithField; private readonly string field; diff --git a/src/Lucene.Net/Search/FieldValueFilter.cs b/src/Lucene.Net/Search/FieldValueFilter.cs index c11d1ab02e..72cc8da60e 100644 --- a/src/Lucene.Net/Search/FieldValueFilter.cs +++ b/src/Lucene.Net/Search/FieldValueFilter.cs @@ -87,11 +87,11 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo { return null; } - if (docsWithField is DocIdSet) + if (docsWithField is DocIdSet docIdSetWithField) { // UweSays: this is always the case for our current impl - but who knows // :-) - return BitsFilteredDocIdSet.Wrap((DocIdSet)docsWithField, acceptDocs); + return BitsFilteredDocIdSet.Wrap(docIdSetWithField, acceptDocs); } return new FieldCacheDocIdSet(context.AtomicReader.MaxDoc, acceptDocs, (doc) => docsWithField.Get(doc)); } diff --git a/src/Lucene.Net/Search/FieldValueHitQueue.cs b/src/Lucene.Net/Search/FieldValueHitQueue.cs index 310e692ad3..0815426780 100644 --- a/src/Lucene.Net/Search/FieldValueHitQueue.cs +++ b/src/Lucene.Net/Search/FieldValueHitQueue.cs @@ -49,7 +49,7 @@ public override string ToString() internal sealed class OneComparerFieldValueHitQueue : FieldValueHitQueue where T : FieldValueHitQueue.Entry { - private int oneReverseMul; + private readonly int oneReverseMul; // LUCENENET: marked readonly public OneComparerFieldValueHitQueue(SortField[] fields, int size) : base(fields, size) diff --git a/src/Lucene.Net/Search/FilteredDocIdSet.cs b/src/Lucene.Net/Search/FilteredDocIdSet.cs index fb8b8d0832..aa5c46bc82 100644 --- a/src/Lucene.Net/Search/FilteredDocIdSet.cs +++ b/src/Lucene.Net/Search/FilteredDocIdSet.cs @@ -43,7 +43,7 @@ public abstract class FilteredDocIdSet : DocIdSet /// /// Constructor. /// Underlying - public FilteredDocIdSet(DocIdSet innerSet) + protected FilteredDocIdSet(DocIdSet innerSet) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.innerSet = innerSet; } @@ -65,7 +65,7 @@ private class BitsAnonymousInnerClassHelper : IBits { private readonly FilteredDocIdSet outerInstance; - private IBits bits; + private readonly IBits bits; public BitsAnonymousInnerClassHelper(FilteredDocIdSet outerInstance, IBits bits) { diff --git a/src/Lucene.Net/Search/FilteredDocIdSetIterator.cs b/src/Lucene.Net/Search/FilteredDocIdSetIterator.cs index 01162e3a94..7723dd3acd 100644 --- a/src/Lucene.Net/Search/FilteredDocIdSetIterator.cs +++ b/src/Lucene.Net/Search/FilteredDocIdSetIterator.cs @@ -33,7 +33,7 @@ public abstract class FilteredDocIdSetIterator : DocIdSetIterator /// /// Constructor. /// Underlying . - public FilteredDocIdSetIterator(DocIdSetIterator innerIter) + protected FilteredDocIdSetIterator(DocIdSetIterator innerIter) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { m_innerIter = innerIter ?? throw new ArgumentNullException(nameof(innerIter), "null iterator"); doc = -1; diff --git a/src/Lucene.Net/Search/FilteredQuery.cs b/src/Lucene.Net/Search/FilteredQuery.cs index e876693639..1df1e4737f 100644 --- a/src/Lucene.Net/Search/FilteredQuery.cs +++ b/src/Lucene.Net/Search/FilteredQuery.cs @@ -84,9 +84,9 @@ private class WeightAnonymousInnerClassHelper : Weight { private readonly FilteredQuery outerInstance; - private Lucene.Net.Search.Weight weight; + private readonly Weight weight; - public WeightAnonymousInnerClassHelper(FilteredQuery outerInstance, Lucene.Net.Search.Weight weight) + public WeightAnonymousInnerClassHelper(FilteredQuery outerInstance, Weight weight) { this.outerInstance = outerInstance; this.weight = weight; diff --git a/src/Lucene.Net/Search/FuzzyTermsEnum.cs b/src/Lucene.Net/Search/FuzzyTermsEnum.cs index 2125243b8d..8f56d3eb60 100644 --- a/src/Lucene.Net/Search/FuzzyTermsEnum.cs +++ b/src/Lucene.Net/Search/FuzzyTermsEnum.cs @@ -58,15 +58,10 @@ namespace Lucene.Net.Search /// public class FuzzyTermsEnum : TermsEnum { - private void InitializeInstanceFields() - { - boostAtt = Attributes.AddAttribute(); - } - private TermsEnum actualEnum; private IBoostAttribute actualBoostAtt; - private IBoostAttribute boostAtt; + private readonly IBoostAttribute boostAtt; private readonly IMaxNonCompetitiveBoostAttribute maxBoostAtt; private readonly ILevenshteinAutomataAttribute dfaAtt; @@ -112,7 +107,7 @@ private void InitializeInstanceFields() /// if there is a low-level IO error public FuzzyTermsEnum(Terms terms, AttributeSource atts, Term term, float minSimilarity, int prefixLength, bool transpositions) { - InitializeInstanceFields(); + boostAtt = Attributes.AddAttribute(); if (minSimilarity >= 1.0f && minSimilarity != (int)minSimilarity) { throw new ArgumentException("fractional edit distances are not allowed"); @@ -361,25 +356,20 @@ public override void SeekExact(long ord) /// private class AutomatonFuzzyTermsEnum : FilteredTermsEnum { - internal virtual void InitializeInstanceFields() - { - boostAtt = Attributes.AddAttribute(); - } - private readonly FuzzyTermsEnum outerInstance; private readonly ByteRunAutomaton[] matchers; private readonly BytesRef termRef; - private IBoostAttribute boostAtt; + private readonly IBoostAttribute boostAtt; public AutomatonFuzzyTermsEnum(FuzzyTermsEnum outerInstance, TermsEnum tenum, CompiledAutomaton[] compiled) : base(tenum, false) { this.outerInstance = outerInstance; - InitializeInstanceFields(); + boostAtt = Attributes.AddAttribute(); this.matchers = new ByteRunAutomaton[compiled.Length]; for (int i = 0; i < compiled.Length; i++) { diff --git a/src/Lucene.Net/Search/IndexSearcher.cs b/src/Lucene.Net/Search/IndexSearcher.cs index b9d54523ff..bccb82300d 100644 --- a/src/Lucene.Net/Search/IndexSearcher.cs +++ b/src/Lucene.Net/Search/IndexSearcher.cs @@ -900,11 +900,7 @@ public bool MoveNext() return false; } - // LUCENENET NOTE: Not supported in .NET anyway - //public override void Remove() - //{ - // throw new NotSupportedException(); - //} + // LUCENENET NOTE: Remove() excluded because it is not applicable in .NET public IEnumerator GetEnumerator() { diff --git a/src/Lucene.Net/Search/LiveFieldValues.cs b/src/Lucene.Net/Search/LiveFieldValues.cs index d9f13a3a55..6bf68d355b 100644 --- a/src/Lucene.Net/Search/LiveFieldValues.cs +++ b/src/Lucene.Net/Search/LiveFieldValues.cs @@ -1,6 +1,7 @@ using System; using System.Collections.Concurrent; using System.Collections.Generic; +using JCG = J2N.Collections.Generic; namespace Lucene.Net.Search { @@ -33,7 +34,6 @@ namespace Lucene.Net.Search /// the same time by two threads, because in this case you /// cannot in general know which thread "won". /// - public abstract class LiveFieldValues : ReferenceManager.IRefreshListener, IDisposable where S : class { @@ -42,16 +42,36 @@ public abstract class LiveFieldValues : ReferenceManager.IRefreshListener, private readonly ReferenceManager mgr; private readonly T missingValue; - public LiveFieldValues(ReferenceManager mgr, T missingValue) + protected LiveFieldValues(ReferenceManager mgr, T missingValue) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.missingValue = missingValue; this.mgr = mgr; mgr.AddListener(this); } + /// + /// Releases all resources used by the . + /// public void Dispose() { - mgr.RemoveListener(this); + Dispose(true); + GC.SuppressFinalize(this); + } + + /// + /// Releases resources used by the and + /// if overridden in a derived class, optionally releases unmanaged resources. + /// + /// true to release both managed and unmanaged resources; + /// false to release only unmanaged resources. + + // LUCENENET specific - implemented proper dispose pattern + protected virtual void Dispose(bool disposing) + { + if (disposing) + { + mgr.RemoveListener(this); + } } public virtual void BeforeRefresh() @@ -109,16 +129,15 @@ public virtual void Delete(string id) public virtual T Get(string id) { // First try to get the "live" value: - T value; - current.TryGetValue(id, out value); - var comparer = EqualityComparer.Default; + current.TryGetValue(id, out T value); + var comparer = JCG.EqualityComparer.Default; if (comparer.Equals(value, missingValue)) { // Deleted but the deletion is not yet reflected in // the reader: - return default(T); + return default; } - else if (!comparer.Equals(value, default(T))) + else if (!comparer.Equals(value, default)) { return value; } @@ -129,9 +148,9 @@ public virtual T Get(string id) { // Deleted but the deletion is not yet reflected in // the reader: - return default(T); + return default; } - else if (!comparer.Equals(value, default(T))) + else if (!comparer.Equals(value, default)) { return value; } diff --git a/src/Lucene.Net/Search/MatchAllDocsQuery.cs b/src/Lucene.Net/Search/MatchAllDocsQuery.cs index 47a55ad3fb..f76beed91f 100644 --- a/src/Lucene.Net/Search/MatchAllDocsQuery.cs +++ b/src/Lucene.Net/Search/MatchAllDocsQuery.cs @@ -33,17 +33,14 @@ public class MatchAllDocsQuery : Query { private class MatchAllScorer : Scorer { - private readonly MatchAllDocsQuery outerInstance; - internal readonly float score; private int doc = -1; private readonly int maxDoc; private readonly IBits liveDocs; - internal MatchAllScorer(MatchAllDocsQuery outerInstance, IndexReader reader, IBits liveDocs, Weight w, float score) + internal MatchAllScorer(IndexReader reader, IBits liveDocs, Weight w, float score) : base(w) { - this.outerInstance = outerInstance; this.liveDocs = liveDocs; this.score = score; maxDoc = reader.MaxDoc; @@ -91,7 +88,7 @@ private class MatchAllDocsWeight : Weight private float queryWeight; private float queryNorm; - public MatchAllDocsWeight(MatchAllDocsQuery outerInstance, IndexSearcher searcher) + public MatchAllDocsWeight(MatchAllDocsQuery outerInstance /*, IndexSearcher searcher // LUCENENET: Never read */) { this.outerInstance = outerInstance; } @@ -117,7 +114,7 @@ public override void Normalize(float queryNorm, float topLevelBoost) public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) { - return new MatchAllScorer(outerInstance, context.Reader, acceptDocs, this, queryWeight); + return new MatchAllScorer(context.Reader, acceptDocs, this, queryWeight); } public override Explanation Explain(AtomicReaderContext context, int doc) @@ -136,7 +133,7 @@ public override Explanation Explain(AtomicReaderContext context, int doc) public override Weight CreateWeight(IndexSearcher searcher) { - return new MatchAllDocsWeight(this, searcher); + return new MatchAllDocsWeight(this /*, searcher // LUCENENET: Never read */); } public override void ExtractTerms(ISet terms) diff --git a/src/Lucene.Net/Search/MultiPhraseQuery.cs b/src/Lucene.Net/Search/MultiPhraseQuery.cs index dcb761af53..78faddd6b9 100644 --- a/src/Lucene.Net/Search/MultiPhraseQuery.cs +++ b/src/Lucene.Net/Search/MultiPhraseQuery.cs @@ -71,7 +71,7 @@ namespace Lucene.Net.Search public class MultiPhraseQuery : Query, IEnumerable // LUCENENET specific - implemented IEnumerable, which allows for use of collection initializer. See: https://stackoverflow.com/a/9195144 { private string field; - private IList termArrays = new JCG.List(); + private readonly IList termArrays = new JCG.List(); // LUCENENET: marked readonly private readonly IList positions = new JCG.List(); private int slop = 0; @@ -196,9 +196,7 @@ public MultiPhraseWeight(MultiPhraseQuery outerInstance, IndexSearcher searcher) { foreach (Term term in terms) { - TermContext termContext; - termContexts.TryGetValue(term, out termContext); - if (termContext == null) + if (!termContexts.TryGetValue(term, out TermContext termContext) || termContext == null) { termContext = TermContext.Build(context, term); termContexts[term] = termContext; @@ -484,18 +482,16 @@ private bool TermArraysEquals(IList termArrays1, IList termArray return false; } using (IEnumerator iterator1 = termArrays1.GetEnumerator()) + using (IEnumerator iterator2 = termArrays2.GetEnumerator()) { - using (IEnumerator iterator2 = termArrays2.GetEnumerator()) + while (iterator1.MoveNext()) { - while (iterator1.MoveNext()) + Term[] termArray1 = iterator1.Current; + iterator2.MoveNext(); + Term[] termArray2 = iterator2.Current; + if (!(termArray1 == null ? termArray2 == null : Arrays.Equals(termArray1, termArray2))) { - Term[] termArray1 = iterator1.Current; - iterator2.MoveNext(); - Term[] termArray2 = iterator2.Current; - if (!(termArray1 == null ? termArray2 == null : Arrays.Equals(termArray1, termArray2))) - { - return false; - } + return false; } } } @@ -556,11 +552,6 @@ protected internal override bool LessThan(DocsAndPositionsEnum a, DocsAndPositio private sealed class Int32Queue { public Int32Queue() - { - InitializeInstanceFields(); - } - - internal void InitializeInstanceFields() { _array = new int[_arraySize]; } diff --git a/src/Lucene.Net/Search/MultiTermQuery.cs b/src/Lucene.Net/Search/MultiTermQuery.cs index fdc891ad56..2b990919a8 100644 --- a/src/Lucene.Net/Search/MultiTermQuery.cs +++ b/src/Lucene.Net/Search/MultiTermQuery.cs @@ -267,7 +267,7 @@ public override double DocCountPercent /// Constructs a query matching terms that cannot be represented with a single /// . /// - public MultiTermQuery(string field) + protected MultiTermQuery(string field) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.m_field = field ?? throw new ArgumentNullException(nameof(field), "field must not be null"); } diff --git a/src/Lucene.Net/Search/NumericRangeQuery.cs b/src/Lucene.Net/Search/NumericRangeQuery.cs index 9262c53260..70f7cde644 100644 --- a/src/Lucene.Net/Search/NumericRangeQuery.cs +++ b/src/Lucene.Net/Search/NumericRangeQuery.cs @@ -233,9 +233,8 @@ public override bool Equals(object o) { return false; } - if (o is NumericRangeQuery) + if (o is NumericRangeQuery q) { - var q = (NumericRangeQuery)o; return ((q.min == null ? min == null : q.min.Equals(min)) && (q.max == null ? max == null : q.max.Equals(max)) && minInclusive == q.minInclusive && maxInclusive == q.maxInclusive && precisionStep == q.precisionStep); } return false; diff --git a/src/Lucene.Net/Search/Payloads/AveragePayloadFunction.cs b/src/Lucene.Net/Search/Payloads/AveragePayloadFunction.cs index 8dbe2747de..859300e8d7 100644 --- a/src/Lucene.Net/Search/Payloads/AveragePayloadFunction.cs +++ b/src/Lucene.Net/Search/Payloads/AveragePayloadFunction.cs @@ -44,13 +44,13 @@ public override int GetHashCode() public override bool Equals(object obj) { - if (object.ReferenceEquals(obj, this)) + if (ReferenceEquals(obj, this)) { return true; } - if (object.ReferenceEquals(null, obj)) + if (obj is null) { - return object.ReferenceEquals(null, this); + return false; } if (this.GetType() != obj.GetType()) { diff --git a/src/Lucene.Net/Search/Payloads/PayloadNearQuery.cs b/src/Lucene.Net/Search/Payloads/PayloadNearQuery.cs index d7ce2d0b7e..c24acaeea3 100644 --- a/src/Lucene.Net/Search/Payloads/PayloadNearQuery.cs +++ b/src/Lucene.Net/Search/Payloads/PayloadNearQuery.cs @@ -211,7 +211,9 @@ public class PayloadNearSpanScorer : SpanScorer protected internal float m_payloadScore; internal int payloadsSeen; +#pragma warning disable IDE0060 // Remove unused parameter protected internal PayloadNearSpanScorer(PayloadNearQuery outerInstance, Spans spans, Weight weight, Similarity similarity, Similarity.SimScorer docScorer) +#pragma warning restore IDE0060 // Remove unused parameter : base(spans, weight, docScorer) { this.outerInstance = outerInstance; @@ -223,8 +225,7 @@ public virtual void GetPayloads(Spans[] subSpans) { for (var i = 0; i < subSpans.Length; i++) { - var span = subSpans[i] as NearSpansOrdered; - if (span != null) + if (subSpans[i] is NearSpansOrdered span) { if (span.IsPayloadAvailable) { @@ -234,8 +235,7 @@ public virtual void GetPayloads(Spans[] subSpans) } else { - var unordered = subSpans[i] as NearSpansUnordered; - if (unordered != null) + if (subSpans[i] is NearSpansUnordered unordered) { if (unordered.IsPayloadAvailable) { diff --git a/src/Lucene.Net/Search/Payloads/PayloadSpanUtil.cs b/src/Lucene.Net/Search/Payloads/PayloadSpanUtil.cs index 6f49a1e80e..3bb322c16a 100644 --- a/src/Lucene.Net/Search/Payloads/PayloadSpanUtil.cs +++ b/src/Lucene.Net/Search/Payloads/PayloadSpanUtil.cs @@ -40,7 +40,7 @@ namespace Lucene.Net.Search.Payloads /// public class PayloadSpanUtil { - private IndexReaderContext context; + private readonly IndexReaderContext context; // LUCENENET: marked readonly /// /// that contains doc with payloads to extract @@ -66,9 +66,9 @@ public virtual ICollection GetPayloadsForQuery(Query query) private void QueryToSpanQuery(Query query, ICollection payloads) { - if (query is BooleanQuery) + if (query is BooleanQuery booleanQuery) { - BooleanClause[] queryClauses = ((BooleanQuery)query).GetClauses(); + BooleanClause[] queryClauses = booleanQuery.GetClauses(); for (int i = 0; i < queryClauses.Length; i++) { @@ -78,16 +78,16 @@ private void QueryToSpanQuery(Query query, ICollection payloads) } } } - else if (query is PhraseQuery) + else if (query is PhraseQuery phraseQuery) { - Term[] phraseQueryTerms = ((PhraseQuery)query).GetTerms(); + Term[] phraseQueryTerms = phraseQuery.GetTerms(); SpanQuery[] clauses = new SpanQuery[phraseQueryTerms.Length]; for (int i = 0; i < phraseQueryTerms.Length; i++) { clauses[i] = new SpanTermQuery(phraseQueryTerms[i]); } - int slop = ((PhraseQuery)query).Slop; + int slop = phraseQuery.Slop; bool inorder = false; if (slop == 0) @@ -95,34 +95,31 @@ private void QueryToSpanQuery(Query query, ICollection payloads) inorder = true; } - SpanNearQuery sp = new SpanNearQuery(clauses, slop, inorder); - sp.Boost = query.Boost; + SpanNearQuery sp = new SpanNearQuery(clauses, slop, inorder) { Boost = query.Boost }; GetPayloads(payloads, sp); } - else if (query is TermQuery) + else if (query is TermQuery termQuery) { - SpanTermQuery stq = new SpanTermQuery(((TermQuery)query).Term); - stq.Boost = query.Boost; + SpanTermQuery stq = new SpanTermQuery(termQuery.Term) { Boost = query.Boost }; GetPayloads(payloads, stq); } - else if (query is SpanQuery) + else if (query is SpanQuery spanQuery) { - GetPayloads(payloads, (SpanQuery)query); + GetPayloads(payloads, spanQuery); } - else if (query is FilteredQuery) + else if (query is FilteredQuery filteredQuery) { - QueryToSpanQuery(((FilteredQuery)query).Query, payloads); + QueryToSpanQuery(filteredQuery.Query, payloads); } - else if (query is DisjunctionMaxQuery) + else if (query is DisjunctionMaxQuery disjunctionMaxQuery) { - foreach (var q in ((DisjunctionMaxQuery)query)) + foreach (var q in disjunctionMaxQuery) { QueryToSpanQuery(q, payloads); } } - else if (query is MultiPhraseQuery) + else if (query is MultiPhraseQuery mpq) { - MultiPhraseQuery mpq = (MultiPhraseQuery)query; IList termArrays = mpq.GetTermArrays(); int[] positions = mpq.GetPositions(); if (positions.Length > 0) diff --git a/src/Lucene.Net/Search/Payloads/PayloadTermQuery.cs b/src/Lucene.Net/Search/Payloads/PayloadTermQuery.cs index 1175bd84b3..904b5e0461 100644 --- a/src/Lucene.Net/Search/Payloads/PayloadTermQuery.cs +++ b/src/Lucene.Net/Search/Payloads/PayloadTermQuery.cs @@ -46,7 +46,7 @@ namespace Lucene.Net.Search.Payloads public class PayloadTermQuery : SpanTermQuery { protected PayloadFunction m_function; - private bool includeSpanScore; + private readonly bool includeSpanScore; // LUCENENET: marked readonly public PayloadTermQuery(Term term, PayloadFunction function) : this(term, function, true) diff --git a/src/Lucene.Net/Search/PhraseQuery.cs b/src/Lucene.Net/Search/PhraseQuery.cs index 33142b5f12..0c590288b7 100644 --- a/src/Lucene.Net/Search/PhraseQuery.cs +++ b/src/Lucene.Net/Search/PhraseQuery.cs @@ -66,8 +66,8 @@ namespace Lucene.Net.Search public class PhraseQuery : Query, IEnumerable // LUCENENET specific - implemented IEnumerable, which allows for use of collection initializer. See: https://stackoverflow.com/a/9195144 { private string field; - private IList terms = new JCG.List(4); - private IList positions = new JCG.List(4); + private readonly IList terms = new JCG.List(4); // LUCENENET: marked readonly + private readonly IList positions = new JCG.List(4); // LUCENENET: marked readonly private int maxPosition = 0; private int slop = 0; diff --git a/src/Lucene.Net/Search/QueryRescorer.cs b/src/Lucene.Net/Search/QueryRescorer.cs index e4e7bd0293..7a83e9fa2c 100644 --- a/src/Lucene.Net/Search/QueryRescorer.cs +++ b/src/Lucene.Net/Search/QueryRescorer.cs @@ -37,7 +37,7 @@ public abstract class QueryRescorer : Rescorer /// Sole constructor, passing the 2nd pass query to /// assign scores to the 1st pass hits. /// - public QueryRescorer(Query query) + protected QueryRescorer(Query query) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.query = query; } @@ -188,7 +188,7 @@ public static TopDocs Rescore(IndexSearcher searcher, TopDocs topDocs, Query que private class QueryRescorerAnonymousInnerClassHelper : QueryRescorer { - private double weight; + private readonly double weight; public QueryRescorerAnonymousInnerClassHelper(Lucene.Net.Search.Query query, double weight) : base(query) diff --git a/src/Lucene.Net/Search/QueryWrapperFilter.cs b/src/Lucene.Net/Search/QueryWrapperFilter.cs index f60915fe97..e23781c6d5 100644 --- a/src/Lucene.Net/Search/QueryWrapperFilter.cs +++ b/src/Lucene.Net/Search/QueryWrapperFilter.cs @@ -53,20 +53,17 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo // get a private context that is used to rewrite, createWeight and score eventually AtomicReaderContext privateContext = context.AtomicReader.AtomicContext; Weight weight = (new IndexSearcher(privateContext)).CreateNormalizedWeight(query); - return new DocIdSetAnonymousInnerClassHelper(this, acceptDocs, privateContext, weight); + return new DocIdSetAnonymousInnerClassHelper(acceptDocs, privateContext, weight); } private class DocIdSetAnonymousInnerClassHelper : DocIdSet { - private readonly QueryWrapperFilter outerInstance; + private readonly IBits acceptDocs; + private readonly AtomicReaderContext privateContext; + private readonly Weight weight; - private IBits acceptDocs; - private AtomicReaderContext privateContext; - private Lucene.Net.Search.Weight weight; - - public DocIdSetAnonymousInnerClassHelper(QueryWrapperFilter outerInstance, IBits acceptDocs, AtomicReaderContext privateContext, Lucene.Net.Search.Weight weight) + public DocIdSetAnonymousInnerClassHelper(IBits acceptDocs, AtomicReaderContext privateContext, Weight weight) { - this.outerInstance = outerInstance; this.acceptDocs = acceptDocs; this.privateContext = privateContext; this.weight = weight; diff --git a/src/Lucene.Net/Search/ReferenceManager.cs b/src/Lucene.Net/Search/ReferenceManager.cs index a6374e9dfe..3dcfe3e119 100644 --- a/src/Lucene.Net/Search/ReferenceManager.cs +++ b/src/Lucene.Net/Search/ReferenceManager.cs @@ -154,11 +154,8 @@ the reference. */ /// If the underlying reader of the current reference could not be disposed public void Dispose() { - lock (this) - { - Dispose(true); - GC.SuppressFinalize(this); - } + Dispose(true); + GC.SuppressFinalize(this); } /// @@ -167,17 +164,25 @@ public void Dispose() protected abstract int GetRefCount(G reference); /// - /// Called after , so subclass can free any resources. + /// Called after , so subclass can free any resources. + /// + /// When overriding, be sure to include a call to base.Dispose(disposing) in your implementation. /// if the after dispose operation in a sub-class throws an /// protected virtual void Dispose(bool disposing) { - if (disposing && current != null) + if (disposing) { - // make sure we can call this more than once - // closeable javadoc says: - // if this is already closed then invoking this method has no effect. - SwapReference(null); + lock (this) + { + if (current != null) + { + // make sure we can call this more than once + // closeable javadoc says: + // if this is already closed then invoking this method has no effect. + SwapReference(null); + } + } } } diff --git a/src/Lucene.Net/Search/ReqOptSumScorer.cs b/src/Lucene.Net/Search/ReqOptSumScorer.cs index fa64e0e093..bad9fe1d77 100644 --- a/src/Lucene.Net/Search/ReqOptSumScorer.cs +++ b/src/Lucene.Net/Search/ReqOptSumScorer.cs @@ -32,7 +32,7 @@ internal class ReqOptSumScorer : Scorer /// The scorers passed from the constructor. /// These are set to null as soon as their Next() or SkipTo() returns false. /// - private Scorer reqScorer; + private readonly Scorer reqScorer; // LUCENENET: marked readonly private Scorer optScorer; @@ -101,10 +101,11 @@ public override int Freq public override ICollection GetChildren() { - List children = new List(2); - children.Add(new ChildScorer(reqScorer, "MUST")); - children.Add(new ChildScorer(optScorer, "SHOULD")); - return children; + return new List(2) + { + new ChildScorer(reqScorer, "MUST"), + new ChildScorer(optScorer, "SHOULD") + }; } public override long GetCost() diff --git a/src/Lucene.Net/Search/ScoringRewrite.cs b/src/Lucene.Net/Search/ScoringRewrite.cs index 08516f7519..fc2fde3320 100644 --- a/src/Lucene.Net/Search/ScoringRewrite.cs +++ b/src/Lucene.Net/Search/ScoringRewrite.cs @@ -143,18 +143,13 @@ public override Query Rewrite(IndexReader reader, MultiTermQuery query) internal sealed class ParallelArraysTermCollector : TermCollector { - internal void InitializeInstanceFields() - { - terms = new BytesRefHash(new ByteBlockPool(new ByteBlockPool.DirectAllocator()), 16, array); - } - private readonly ScoringRewrite outerInstance; public ParallelArraysTermCollector(ScoringRewrite outerInstance) { this.outerInstance = outerInstance; - InitializeInstanceFields(); + terms = new BytesRefHash(new ByteBlockPool(new ByteBlockPool.DirectAllocator()), 16, array); } internal readonly TermFreqBoostByteStart array = new TermFreqBoostByteStart(16); diff --git a/src/Lucene.Net/Search/SearcherLifetimeManager.cs b/src/Lucene.Net/Search/SearcherLifetimeManager.cs index 84aaa1727e..7c351b1c44 100644 --- a/src/Lucene.Net/Search/SearcherLifetimeManager.cs +++ b/src/Lucene.Net/Search/SearcherLifetimeManager.cs @@ -197,8 +197,7 @@ public virtual long Record(IndexSearcher searcher) public virtual IndexSearcher Acquire(long version) { EnsureOpen(); - Lazy tracker; - if (_searchers.TryGetValue(version, out tracker) && tracker.IsValueCreated && tracker.Value.Searcher.IndexReader.TryIncRef()) + if (_searchers.TryGetValue(version, out Lazy tracker) && tracker.IsValueCreated && tracker.Value.Searcher.IndexReader.TryIncRef()) { return tracker.Value.Searcher; } @@ -312,27 +311,43 @@ public virtual void Prune(IPruner pruner) /// otherwise it's possible not all searcher references /// will be freed. /// - public virtual void Dispose() + public void Dispose() { - lock (this) - { - _closed = true; - IList toClose = new List(_searchers.Values.Select(item => item.Value)); + Dispose(true); + GC.SuppressFinalize(this); + } + + /// + /// Releases resources used by the and + /// if overridden in a derived class, optionally releases unmanaged resources. + /// + /// true to release both managed and unmanaged resources; + /// false to release only unmanaged resources. - // Remove up front in case exc below, so we don't - // over-decRef on double-close: - foreach (var tracker in toClose) + // LUCENENET specific - implemented proper dispose pattern + protected virtual void Dispose(bool disposing) + { + if (disposing) + { + lock (this) { - Lazy _; - _searchers.TryRemove(tracker.Version, out _); - } + _closed = true; + IList toClose = new List(_searchers.Values.Select(item => item.Value)); + + // Remove up front in case exc below, so we don't + // over-decRef on double-close: + foreach (var tracker in toClose) + { + _searchers.TryRemove(tracker.Version, out Lazy _); + } - IOUtils.Dispose(toClose); + IOUtils.Dispose(toClose); - // Make some effort to catch mis-use: - if (_searchers.Count != 0) - { - throw new InvalidOperationException("another thread called record while this SearcherLifetimeManager instance was being closed; not all searchers were closed"); + // Make some effort to catch mis-use: + if (_searchers.Count != 0) + { + throw new InvalidOperationException("another thread called record while this SearcherLifetimeManager instance was being disposed; not all searchers were disposed"); + } } } } diff --git a/src/Lucene.Net/Search/Similarities/AfterEffect.cs b/src/Lucene.Net/Search/Similarities/AfterEffect.cs index 3ce318acf0..48468ee389 100644 --- a/src/Lucene.Net/Search/Similarities/AfterEffect.cs +++ b/src/Lucene.Net/Search/Similarities/AfterEffect.cs @@ -33,7 +33,7 @@ public abstract class AfterEffect /// Sole constructor. (For invocation by subclass /// constructors, typically implicit.) /// - public AfterEffect() + protected AfterEffect() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { } diff --git a/src/Lucene.Net/Search/Similarities/BasicModel.cs b/src/Lucene.Net/Search/Similarities/BasicModel.cs index 0d2396d77f..9b385a172a 100644 --- a/src/Lucene.Net/Search/Similarities/BasicModel.cs +++ b/src/Lucene.Net/Search/Similarities/BasicModel.cs @@ -32,7 +32,7 @@ public abstract class BasicModel /// Sole constructor. (For invocation by subclass /// constructors, typically implicit.) /// - public BasicModel() + protected BasicModel() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { } diff --git a/src/Lucene.Net/Search/Similarities/Distribution.cs b/src/Lucene.Net/Search/Similarities/Distribution.cs index ab55bb819f..2b292b029c 100644 --- a/src/Lucene.Net/Search/Similarities/Distribution.cs +++ b/src/Lucene.Net/Search/Similarities/Distribution.cs @@ -30,7 +30,7 @@ public abstract class Distribution /// Sole constructor. (For invocation by subclass /// constructors, typically implicit.) /// - public Distribution() + protected Distribution() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { } diff --git a/src/Lucene.Net/Search/Similarities/LMSimilarity.cs b/src/Lucene.Net/Search/Similarities/LMSimilarity.cs index 2b60ae523a..2acc7b535a 100644 --- a/src/Lucene.Net/Search/Similarities/LMSimilarity.cs +++ b/src/Lucene.Net/Search/Similarities/LMSimilarity.cs @@ -42,14 +42,14 @@ public abstract class LMSimilarity : SimilarityBase /// /// Creates a new instance with the specified collection language model. - public LMSimilarity(ICollectionModel collectionModel) + protected LMSimilarity(ICollectionModel collectionModel) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.m_collectionModel = collectionModel; } /// /// Creates a new instance with the default collection language model. - public LMSimilarity() + protected LMSimilarity() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : this(new DefaultCollectionModel()) { } diff --git a/src/Lucene.Net/Search/Similarities/Lambda.cs b/src/Lucene.Net/Search/Similarities/Lambda.cs index f82b1eff1e..66b3e59cab 100644 --- a/src/Lucene.Net/Search/Similarities/Lambda.cs +++ b/src/Lucene.Net/Search/Similarities/Lambda.cs @@ -30,7 +30,7 @@ public abstract class Lambda /// Sole constructor. (For invocation by subclass /// constructors, typically implicit.) /// - public Lambda() + protected Lambda() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { } diff --git a/src/Lucene.Net/Search/Similarities/Normalization.cs b/src/Lucene.Net/Search/Similarities/Normalization.cs index 3328f9e03a..61bc8328f6 100644 --- a/src/Lucene.Net/Search/Similarities/Normalization.cs +++ b/src/Lucene.Net/Search/Similarities/Normalization.cs @@ -30,7 +30,7 @@ public abstract class Normalization /// Sole constructor. (For invocation by subclass /// constructors, typically implicit.) /// - public Normalization() + protected Normalization() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { } diff --git a/src/Lucene.Net/Search/Similarities/PerFieldSimilarityWrapper.cs b/src/Lucene.Net/Search/Similarities/PerFieldSimilarityWrapper.cs index 9e35e26997..2413a40457 100644 --- a/src/Lucene.Net/Search/Similarities/PerFieldSimilarityWrapper.cs +++ b/src/Lucene.Net/Search/Similarities/PerFieldSimilarityWrapper.cs @@ -34,7 +34,7 @@ public abstract class PerFieldSimilarityWrapper : Similarity /// Sole constructor. (For invocation by subclass /// constructors, typically implicit.) /// - public PerFieldSimilarityWrapper() + protected PerFieldSimilarityWrapper() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { } diff --git a/src/Lucene.Net/Search/Similarities/Similarity.cs b/src/Lucene.Net/Search/Similarities/Similarity.cs index 37505f5a6d..c6341d68fb 100644 --- a/src/Lucene.Net/Search/Similarities/Similarity.cs +++ b/src/Lucene.Net/Search/Similarities/Similarity.cs @@ -102,7 +102,7 @@ public abstract class Similarity /// Sole constructor. (For invocation by subclass /// constructors, typically implicit.) /// - public Similarity() + protected Similarity() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { } @@ -184,7 +184,7 @@ public abstract class SimScorer /// Sole constructor. (For invocation by subclass /// constructors, typically implicit.) /// - public SimScorer() + protected SimScorer() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { } @@ -228,7 +228,7 @@ public abstract class SimWeight /// Sole constructor. (For invocation by subclass /// constructors, typically implicit.) /// - public SimWeight() + protected SimWeight() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { } diff --git a/src/Lucene.Net/Search/Similarities/SimilarityBase.cs b/src/Lucene.Net/Search/Similarities/SimilarityBase.cs index 2c7b93a9d3..ae971e81e5 100644 --- a/src/Lucene.Net/Search/Similarities/SimilarityBase.cs +++ b/src/Lucene.Net/Search/Similarities/SimilarityBase.cs @@ -57,7 +57,7 @@ public abstract class SimilarityBase : Similarity /// Sole constructor. (For invocation by subclass /// constructors, typically implicit.) /// - public SimilarityBase() + protected SimilarityBase() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { } @@ -194,11 +194,11 @@ public virtual Explanation Explain(BasicStats stats, int doc, Explanation freq, public override SimScorer GetSimScorer(SimWeight stats, AtomicReaderContext context) { - if (stats is MultiSimilarity.MultiStats) + if (stats is MultiSimilarity.MultiStats multiStats) { // a multi term query (e.g. phrase). return the summation, // scoring almost as if it were boolean query - SimWeight[] subStats = ((MultiSimilarity.MultiStats)stats).subStats; + SimWeight[] subStats = multiStats.subStats; SimScorer[] subScorers = new SimScorer[subStats.Length]; for (int i = 0; i < subScorers.Length; i++) { diff --git a/src/Lucene.Net/Search/Similarities/TFIDFSimilarity.cs b/src/Lucene.Net/Search/Similarities/TFIDFSimilarity.cs index f5315c0974..f7a5fe595f 100644 --- a/src/Lucene.Net/Search/Similarities/TFIDFSimilarity.cs +++ b/src/Lucene.Net/Search/Similarities/TFIDFSimilarity.cs @@ -482,7 +482,7 @@ public abstract class TFIDFSimilarity : Similarity /// Sole constructor. (For invocation by subclass /// constructors, typically implicit.) /// - public TFIDFSimilarity() + protected TFIDFSimilarity() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { } diff --git a/src/Lucene.Net/Search/SloppyPhraseScorer.cs b/src/Lucene.Net/Search/SloppyPhraseScorer.cs index e526803c69..e8fd5f8aa8 100644 --- a/src/Lucene.Net/Search/SloppyPhraseScorer.cs +++ b/src/Lucene.Net/Search/SloppyPhraseScorer.cs @@ -2,6 +2,7 @@ using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; +using System.Runtime.CompilerServices; using JCG = J2N.Collections.Generic; namespace Lucene.Net.Search @@ -211,7 +212,8 @@ private bool AdvanceRpts(PhrasePositions pp) /// /// Compare two pps, but only by position and offset - private PhrasePositions Lesser(PhrasePositions pp, PhrasePositions pp2) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static PhrasePositions Lesser(PhrasePositions pp, PhrasePositions pp2) // LUCENENET: CA1822: Mark members as static { if (pp.position < pp2.position || (pp.position == pp2.position && pp.offset < pp2.offset)) { @@ -301,6 +303,7 @@ private bool InitComplex() /// /// Move all PPs to their first position + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void PlaceFirstPositions() { for (PhrasePositions pp = min, prev = null; prev != max; pp = (prev = pp).next) // iterate cyclic list: done once handled max @@ -477,8 +480,10 @@ private IList> GatherRptGroups(JCG.LinkedDictionary rl = new List(2); - rl.Add(pp); + List rl = new List(2) + { + pp + }; res.Add(rl); } pp2.rptGroup = g; @@ -521,7 +526,8 @@ private IList> GatherRptGroups(JCG.LinkedDictionary /// Actual position in doc of a PhrasePosition, relies on that position = tpPos - offset) - private int TpPos(PhrasePositions pp) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static int TpPos(PhrasePositions pp) // LUCENENET: CA1822: Mark members as static { return pp.position + pp.offset; } @@ -536,8 +542,7 @@ private int TpPos(PhrasePositions pp) { foreach (Term t in pp.terms) { - int? cnt0; - tcnt.TryGetValue(t, out cnt0); + tcnt.TryGetValue(t, out int? cnt0); int? cnt = cnt0 == null ? new int?(1) : new int?(1 + (int)cnt0); tcnt[t] = cnt; if (cnt == 2) @@ -571,16 +576,15 @@ private PhrasePositions[] RepeatingPPs(IDictionary rptTerms) /// /// bit-sets - for each repeating pp, for each of its repeating terms, the term ordinal values is set - private IList PpTermsBitSets(PhrasePositions[] rpp, IDictionary tord) + private static IList PpTermsBitSets(PhrasePositions[] rpp, IDictionary tord) // LUCENENET: CA1822: Mark members as static { List bb = new List(rpp.Length); foreach (PhrasePositions pp in rpp) { FixedBitSet b = new FixedBitSet(tord.Count); - var ord = new int?(); foreach (var t in pp.terms) { - if (tord.TryGetValue(t, out ord) && ord != null) + if (tord.TryGetValue(t, out int? ord) && ord.HasValue) b.Set((int)ord); } bb.Add(b); @@ -590,7 +594,7 @@ private IList PpTermsBitSets(PhrasePositions[] rpp, IDictionary /// Union (term group) bit-sets until they are disjoint (O(n^^2)), and each group have different terms - private void UnionTermGroups(IList bb) + private static void UnionTermGroups(IList bb) // LUCENENET: CA1822: Mark members as static { int incr; for (int i = 0; i < bb.Count - 1; i += incr) @@ -615,7 +619,7 @@ private void UnionTermGroups(IList bb) /// /// Map each term to the single group that contains it - private IDictionary TermGroups(JCG.LinkedDictionary tord, IList bb) + private static IDictionary TermGroups(JCG.LinkedDictionary tord, IList bb) // LUCENENET: CA1822: Mark members as static { Dictionary tg = new Dictionary(); Term[] t = tord.Keys.ToArray(/*new Term[0]*/); @@ -675,6 +679,7 @@ public override int NextDoc() return Advance(max.doc + 1); // advance to the next doc after #docID() } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override float GetScore() { return docScorer.Score(max.doc, sloppyFreq); @@ -705,6 +710,7 @@ public override int Advance(int target) return max.doc; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long GetCost() { return cost; diff --git a/src/Lucene.Net/Search/SortField.cs b/src/Lucene.Net/Search/SortField.cs index 8dc6829f5f..f2f201f0fc 100644 --- a/src/Lucene.Net/Search/SortField.cs +++ b/src/Lucene.Net/Search/SortField.cs @@ -49,15 +49,15 @@ public class SortField private string field; private SortFieldType type; // defaults to determining type dynamically internal bool reverse = false; // defaults to natural order - private FieldCache.IParser parser; + private readonly FieldCache.IParser parser; // LUCENENET: marked readonly // Used for CUSTOM sort - private FieldComparerSource comparerSource; + private readonly FieldComparerSource comparerSource; // LUCENENET: marked readonly // Used for 'sortMissingFirst/Last' public virtual object MissingValue { - get { return m_missingValue; } + get => m_missingValue; set { if (type == SortFieldType.STRING) diff --git a/src/Lucene.Net/Search/Spans/FieldMaskingSpanQuery.cs b/src/Lucene.Net/Search/Spans/FieldMaskingSpanQuery.cs index 135af94ca6..8a16895c3d 100644 --- a/src/Lucene.Net/Search/Spans/FieldMaskingSpanQuery.cs +++ b/src/Lucene.Net/Search/Spans/FieldMaskingSpanQuery.cs @@ -74,7 +74,7 @@ namespace Lucene.Net.Search.Spans public class FieldMaskingSpanQuery : SpanQuery { private SpanQuery maskedQuery; - private string field; + private readonly string field; // LUCENENET: marked readonly public FieldMaskingSpanQuery(SpanQuery maskedQuery, string maskedField) { diff --git a/src/Lucene.Net/Search/Spans/NearSpansOrdered.cs b/src/Lucene.Net/Search/Spans/NearSpansOrdered.cs index 5a1b6753a7..815a3a11f1 100644 --- a/src/Lucene.Net/Search/Spans/NearSpansOrdered.cs +++ b/src/Lucene.Net/Search/Spans/NearSpansOrdered.cs @@ -73,7 +73,7 @@ public class NearSpansOrdered : Spans private int matchDoc = -1; private int matchStart = -1; private int matchEnd = -1; - private List matchPayload; + private readonly List matchPayload; // LUCENENET: marked readonly private readonly Spans[] subSpansByDoc; @@ -101,8 +101,8 @@ protected override int Compare(int i, int j) } } - private SpanNearQuery query; - private bool collectPayloads = true; + private readonly SpanNearQuery query; // LUCENENET: marked readonly + private readonly bool collectPayloads = true; // LUCENENET: marked readonly public NearSpansOrdered(SpanNearQuery spanNearQuery, AtomicReaderContext context, IBits acceptDocs, IDictionary termContexts) : this(spanNearQuery, context, acceptDocs, termContexts, true) diff --git a/src/Lucene.Net/Search/Spans/NearSpansUnordered.cs b/src/Lucene.Net/Search/Spans/NearSpansUnordered.cs index e6ee67cb55..d3c9fbb346 100644 --- a/src/Lucene.Net/Search/Spans/NearSpansUnordered.cs +++ b/src/Lucene.Net/Search/Spans/NearSpansUnordered.cs @@ -37,18 +37,18 @@ namespace Lucene.Net.Search.Spans /// public class NearSpansUnordered : Spans { - private SpanNearQuery query; + private readonly SpanNearQuery query; // LUCENENET: marked readonly - private IList ordered = new List(); // spans in query order - private Spans[] subSpans; - private int slop; // from query + private readonly IList ordered = new List(); // spans in query order // LUCENENET: marked readonly + private readonly Spans[] subSpans; // LUCENENET: marked readonly + private readonly int slop; // from query // LUCENENET: marked readonly private SpansCell first; // linked list of spans private SpansCell last; // sorted by doc only private int totalLength; // sum of current lengths - private CellQueue queue; // sorted queue of spans + private readonly CellQueue queue; // sorted queue of spans // LUCENENET: marked readonly private SpansCell max; // max element in queue private bool more = true; // true iff not done @@ -56,12 +56,9 @@ public class NearSpansUnordered : Spans private class CellQueue : Util.PriorityQueue { - private readonly NearSpansUnordered outerInstance; - - public CellQueue(NearSpansUnordered outerInstance, int size) + public CellQueue(int size) : base(size) { - this.outerInstance = outerInstance; } protected internal override bool LessThan(SpansCell spans1, SpansCell spans2) @@ -86,7 +83,7 @@ private class SpansCell : Spans internal Spans spans; internal SpansCell next; private int length = -1; - private int index; + private readonly int index; // LUCENENET: marked readonly public SpansCell(NearSpansUnordered outerInstance, Spans spans, int index) { @@ -157,7 +154,7 @@ public NearSpansUnordered(SpanNearQuery query, AtomicReaderContext context, IBit this.slop = query.Slop; SpanQuery[] clauses = query.GetClauses(); - queue = new CellQueue(this, clauses.Length); + queue = new CellQueue(clauses.Length); subSpans = new Spans[clauses.Length]; for (int i = 0; i < clauses.Length; i++) { @@ -220,7 +217,7 @@ public override bool MoveNext() if (queueStale) // maintain the queue { ListToQueue(); - queueStale = false; + //queueStale = false; // LUCENENET: IDE0059: Remove unnecessary value assignment (this is reset to false at the beginning of the loop) } if (AtMatch) diff --git a/src/Lucene.Net/Search/Spans/SpanMultiTermQueryWrapper.cs b/src/Lucene.Net/Search/Spans/SpanMultiTermQueryWrapper.cs index ae7fbf9438..b31ed9bb56 100644 --- a/src/Lucene.Net/Search/Spans/SpanMultiTermQueryWrapper.cs +++ b/src/Lucene.Net/Search/Spans/SpanMultiTermQueryWrapper.cs @@ -59,10 +59,9 @@ public SpanMultiTermQueryWrapper(Q query) this.m_query = query; MultiTermQuery.RewriteMethod method = this.m_query.MultiTermRewriteMethod; - if (method is ITopTermsRewrite) + if (method is ITopTermsRewrite topTermsRewrite) { - int pqsize = ((ITopTermsRewrite)method).Count; - MultiTermRewriteMethod = new TopTermsSpanBooleanQueryRewrite(pqsize); + MultiTermRewriteMethod = new TopTermsSpanBooleanQueryRewrite(topTermsRewrite.Count); } else { @@ -79,11 +78,11 @@ public SpanRewriteMethod MultiTermRewriteMethod get { MultiTermQuery.RewriteMethod m = m_query.MultiTermRewriteMethod; - if (!(m is SpanRewriteMethod)) + if (!(m is SpanRewriteMethod spanRewriteMethod)) { throw new NotSupportedException("You can only use SpanMultiTermQueryWrapper with a suitable SpanRewriteMethod."); } - return (SpanRewriteMethod)m; + return spanRewriteMethod; } set => m_query.MultiTermRewriteMethod = value; } @@ -225,17 +224,14 @@ public sealed class TopTermsSpanBooleanQueryRewrite : SpanRewriteMethod /// public TopTermsSpanBooleanQueryRewrite(int size) { - @delegate = new TopTermsRewriteAnonymousInnerClassHelper(this, size); + @delegate = new TopTermsRewriteAnonymousInnerClassHelper(size); } private class TopTermsRewriteAnonymousInnerClassHelper : TopTermsRewrite { - private readonly TopTermsSpanBooleanQueryRewrite outerInstance; - - public TopTermsRewriteAnonymousInnerClassHelper(TopTermsSpanBooleanQueryRewrite outerInstance, int size) + public TopTermsRewriteAnonymousInnerClassHelper(int size) : base(size) { - this.outerInstance = outerInstance; } protected override int MaxSize => int.MaxValue; diff --git a/src/Lucene.Net/Search/Spans/SpanNearQuery.cs b/src/Lucene.Net/Search/Spans/SpanNearQuery.cs index a4bbe9ab09..e846b6e881 100644 --- a/src/Lucene.Net/Search/Spans/SpanNearQuery.cs +++ b/src/Lucene.Net/Search/Spans/SpanNearQuery.cs @@ -45,7 +45,7 @@ public class SpanNearQuery : SpanQuery protected bool m_inOrder; protected string m_field; - private bool collectPayloads; + private readonly bool collectPayloads; // LUCENENET: marked readonly /// /// Construct a . Matches spans matching a span from each diff --git a/src/Lucene.Net/Search/Spans/SpanNotQuery.cs b/src/Lucene.Net/Search/Spans/SpanNotQuery.cs index ca85bf204a..ab15d25667 100644 --- a/src/Lucene.Net/Search/Spans/SpanNotQuery.cs +++ b/src/Lucene.Net/Search/Spans/SpanNotQuery.cs @@ -127,26 +127,19 @@ private class SpansAnonymousInnerClassHelper : Spans { private readonly SpanNotQuery outerInstance; - private AtomicReaderContext context; - private IBits acceptDocs; - private IDictionary termContexts; - public SpansAnonymousInnerClassHelper(SpanNotQuery outerInstance, AtomicReaderContext context, IBits acceptDocs, IDictionary termContexts) { this.outerInstance = outerInstance; - this.context = context; - this.acceptDocs = acceptDocs; - this.termContexts = termContexts; includeSpans = outerInstance.include.GetSpans(context, acceptDocs, termContexts); moreInclude = true; excludeSpans = outerInstance.exclude.GetSpans(context, acceptDocs, termContexts); moreExclude = excludeSpans.MoveNext(); } - private Spans includeSpans; + private readonly Spans includeSpans; // LUCENENET: marked readonly private bool moreInclude; - private Spans excludeSpans; + private readonly Spans excludeSpans; // LUCENENET: marked readonly private bool moreExclude; public override bool MoveNext() diff --git a/src/Lucene.Net/Search/Spans/SpanOrQuery.cs b/src/Lucene.Net/Search/Spans/SpanOrQuery.cs index 0c51fb22ec..9f298ddeb3 100644 --- a/src/Lucene.Net/Search/Spans/SpanOrQuery.cs +++ b/src/Lucene.Net/Search/Spans/SpanOrQuery.cs @@ -179,12 +179,9 @@ public override int GetHashCode() private class SpanQueue : Util.PriorityQueue { - private readonly SpanOrQuery outerInstance; - - public SpanQueue(SpanOrQuery outerInstance, int size) + public SpanQueue(int size) : base(size) { - this.outerInstance = outerInstance; } protected internal override bool LessThan(Spans spans1, Spans spans2) @@ -221,9 +218,9 @@ private class SpansAnonymousInnerClassHelper : Spans { private readonly SpanOrQuery outerInstance; - private AtomicReaderContext context; - private IBits acceptDocs; - private IDictionary termContexts; + private readonly AtomicReaderContext context; + private readonly IBits acceptDocs; + private readonly IDictionary termContexts; public SpansAnonymousInnerClassHelper(SpanOrQuery outerInstance, AtomicReaderContext context, IBits acceptDocs, IDictionary termContexts) { @@ -239,7 +236,7 @@ public SpansAnonymousInnerClassHelper(SpanOrQuery outerInstance, AtomicReaderCon private bool InitSpanQueue(int target) { - queue = new SpanQueue(outerInstance, outerInstance.clauses.Count); + queue = new SpanQueue(outerInstance.clauses.Count); foreach (var clause in outerInstance.clauses) { Spans spans = clause.GetSpans(context, acceptDocs, termContexts); diff --git a/src/Lucene.Net/Search/Spans/SpanPositionCheckQuery.cs b/src/Lucene.Net/Search/Spans/SpanPositionCheckQuery.cs index 839b75fa7f..791c709a34 100644 --- a/src/Lucene.Net/Search/Spans/SpanPositionCheckQuery.cs +++ b/src/Lucene.Net/Search/Spans/SpanPositionCheckQuery.cs @@ -36,7 +36,7 @@ public abstract class SpanPositionCheckQuery : SpanQuery { protected SpanQuery m_match; - public SpanPositionCheckQuery(SpanQuery match) + protected SpanPositionCheckQuery(SpanQuery match) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.m_match = match; } @@ -116,7 +116,7 @@ protected class PositionCheckSpan : Spans { private readonly SpanPositionCheckQuery outerInstance; - private Spans spans; + private readonly Spans spans; // LUCENENET: marked readonly public PositionCheckSpan(SpanPositionCheckQuery outerInstance, AtomicReaderContext context, IBits acceptDocs, IDictionary termContexts) { diff --git a/src/Lucene.Net/Search/Spans/SpanTermQuery.cs b/src/Lucene.Net/Search/Spans/SpanTermQuery.cs index a878618685..35d70118e3 100644 --- a/src/Lucene.Net/Search/Spans/SpanTermQuery.cs +++ b/src/Lucene.Net/Search/Spans/SpanTermQuery.cs @@ -111,10 +111,8 @@ public override bool Equals(object obj) public override Spans GetSpans(AtomicReaderContext context, IBits acceptDocs, IDictionary termContexts) { - TermContext termContext; - termContexts.TryGetValue(m_term, out termContext); TermState state; - if (termContext == null) + if (!termContexts.TryGetValue(m_term, out TermContext termContext) || termContext == null) { // this happens with span-not query, as it doesn't include the NOT side in extractTerms() // so we seek to the term now in this segment..., this sucks because its ugly mostly! diff --git a/src/Lucene.Net/Search/TermQuery.cs b/src/Lucene.Net/Search/TermQuery.cs index ec08a20d92..3d909b96f0 100644 --- a/src/Lucene.Net/Search/TermQuery.cs +++ b/src/Lucene.Net/Search/TermQuery.cs @@ -114,7 +114,7 @@ private TermsEnum GetTermsEnum(AtomicReaderContext context) return termsEnum; } - private bool TermNotInReader(AtomicReader reader, Term term) + private static bool TermNotInReader(AtomicReader reader, Term term) // LUCENENET: CA1822: Mark members as static { // only called from assert //System.out.println("TQ.termNotInReader reader=" + reader + " term=" + field + ":" + bytes.utf8ToString()); diff --git a/src/Lucene.Net/Search/TermRangeQuery.cs b/src/Lucene.Net/Search/TermRangeQuery.cs index 50e039137a..3de9b3e8fc 100644 --- a/src/Lucene.Net/Search/TermRangeQuery.cs +++ b/src/Lucene.Net/Search/TermRangeQuery.cs @@ -44,10 +44,10 @@ namespace Lucene.Net.Search public class TermRangeQuery : MultiTermQuery { - private BytesRef lowerTerm; - private BytesRef upperTerm; - private bool includeLower; - private bool includeUpper; + private readonly BytesRef lowerTerm; // LUCENENET: marked readonly + private readonly BytesRef upperTerm; // LUCENENET: marked readonly + private readonly bool includeLower; // LUCENENET: marked readonly + private readonly bool includeUpper; // LUCENENET: marked readonly /// /// Constructs a query selecting all terms greater/equal than diff --git a/src/Lucene.Net/Search/TimeLimitingCollector.cs b/src/Lucene.Net/Search/TimeLimitingCollector.cs index 121cc895ff..e90f3d25c1 100644 --- a/src/Lucene.Net/Search/TimeLimitingCollector.cs +++ b/src/Lucene.Net/Search/TimeLimitingCollector.cs @@ -45,9 +45,9 @@ public class TimeLimitingCollector : ICollector #endif public class TimeExceededException : Exception { - private long timeAllowed; - private long timeElapsed; - private int lastDocCollected; + private readonly long timeAllowed; // LUCENENET: marked readonly + private readonly long timeElapsed; // LUCENENET: marked readonly + private readonly int lastDocCollected; // LUCENENET: marked readonly internal TimeExceededException(long timeAllowed, long timeElapsed, int lastDocCollected) : base("Elapsed time: " + timeElapsed + "Exceeded allowed search time: " + timeAllowed + " ms.") @@ -278,7 +278,7 @@ public sealed class TimerThread : ThreadJob // afford losing a tick or two. // // See section 17 of the Java Language Specification for details. - private long time = 0; + private readonly long time = 0; private volatile bool stop = false; private long resolution; diff --git a/src/Lucene.Net/Search/TopDocsCollector.cs b/src/Lucene.Net/Search/TopDocsCollector.cs index 5b9c5d6652..b25848fd8d 100644 --- a/src/Lucene.Net/Search/TopDocsCollector.cs +++ b/src/Lucene.Net/Search/TopDocsCollector.cs @@ -1,4 +1,5 @@ using Lucene.Net.Index; +using Lucene.Net.Support; using Lucene.Net.Util; using System; @@ -39,7 +40,7 @@ public abstract class TopDocsCollector : ICollector, ITopDocsCollector where /// This is used in case is called with illegal parameters, or there /// simply aren't (enough) results. /// - protected static readonly TopDocs EMPTY_TOPDOCS = new TopDocs(0, new ScoreDoc[0], float.NaN); + protected static readonly TopDocs EMPTY_TOPDOCS = new TopDocs(0, Arrays.Empty(), float.NaN); /// /// The priority queue which holds the top documents. Note that different diff --git a/src/Lucene.Net/Search/TopTermsRewrite.cs b/src/Lucene.Net/Search/TopTermsRewrite.cs index acdfb5184b..79d4e09c1a 100644 --- a/src/Lucene.Net/Search/TopTermsRewrite.cs +++ b/src/Lucene.Net/Search/TopTermsRewrite.cs @@ -54,7 +54,7 @@ public abstract class TopTermsRewrite : TermCollectingRewrite, ITopTermsRe /// NOTE: if is smaller than /// , then it will be used instead. /// - public TopTermsRewrite(int count) + protected TopTermsRewrite(int count) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.size = count; } @@ -74,7 +74,7 @@ public override Query Rewrite(IndexReader reader, MultiTermQuery query) { int maxSize = Math.Min(size, MaxSize); JCG.PriorityQueue stQueue = new JCG.PriorityQueue(); - CollectTerms(reader, query, new TermCollectorAnonymousInnerClassHelper(this, maxSize, stQueue)); + CollectTerms(reader, query, new TermCollectorAnonymousInnerClassHelper(maxSize, stQueue)); var q = GetTopLevelQuery(); ScoreTerm[] scoreTerms = stQueue.ToArray(/*new ScoreTerm[stQueue.Count]*/); @@ -92,14 +92,11 @@ public override Query Rewrite(IndexReader reader, MultiTermQuery query) private class TermCollectorAnonymousInnerClassHelper : TermCollector { - private readonly TopTermsRewrite outerInstance; + private readonly int maxSize; + private readonly JCG.PriorityQueue stQueue; - private int maxSize; - private JCG.PriorityQueue stQueue; - - public TermCollectorAnonymousInnerClassHelper(TopTermsRewrite outerInstance, int maxSize, JCG.PriorityQueue stQueue) + public TermCollectorAnonymousInnerClassHelper(int maxSize, JCG.PriorityQueue stQueue) { - this.outerInstance = outerInstance; this.maxSize = maxSize; this.stQueue = stQueue; maxBoostAtt = Attributes.AddAttribute(); diff --git a/src/Lucene.Net/Store/BufferedIndexInput.cs b/src/Lucene.Net/Store/BufferedIndexInput.cs index cfcd26ad16..716f1c05f1 100644 --- a/src/Lucene.Net/Store/BufferedIndexInput.cs +++ b/src/Lucene.Net/Store/BufferedIndexInput.cs @@ -1,6 +1,7 @@ using Lucene.Net.Diagnostics; using System; using System.IO; +using System.Runtime.CompilerServices; namespace Lucene.Net.Store { @@ -57,19 +58,19 @@ public override sealed byte ReadByte() return m_buffer[bufferPosition++]; } - public BufferedIndexInput(string resourceDesc) + protected BufferedIndexInput(string resourceDesc) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : this(resourceDesc, BUFFER_SIZE) { } - public BufferedIndexInput(string resourceDesc, IOContext context) + protected BufferedIndexInput(string resourceDesc, IOContext context) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : this(resourceDesc, GetBufferSize(context)) { } /// /// Inits with a specific - public BufferedIndexInput(string resourceDesc, int bufferSize) + protected BufferedIndexInput(string resourceDesc, int bufferSize) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : base(resourceDesc) { CheckBufferSize(bufferSize); @@ -110,6 +111,7 @@ public void SetBufferSize(int newSize) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected virtual void NewBuffer(byte[] newBuffer) { // Subclasses can do something here @@ -122,7 +124,8 @@ protected virtual void NewBuffer(byte[] newBuffer) /// public int BufferSize => bufferSize; - private void CheckBufferSize(int bufferSize) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static void CheckBufferSize(int bufferSize) // LUCENENET: CA1822: Mark members as static { if (bufferSize <= 0) { @@ -130,6 +133,7 @@ private void CheckBufferSize(int bufferSize) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override sealed void ReadBytes(byte[] b, int offset, int len) { ReadBytes(b, offset, len, true); @@ -220,7 +224,7 @@ public override sealed int ReadInt32() { if (4 <= (bufferLength - bufferPosition)) { - return ((m_buffer[bufferPosition++] & 0xFF) << 24) | ((m_buffer[bufferPosition++] & 0xFF) << 16) + return ((m_buffer[bufferPosition++] & 0xFF) << 24) | ((m_buffer[bufferPosition++] & 0xFF) << 16) | ((m_buffer[bufferPosition++] & 0xFF) << 8) | (m_buffer[bufferPosition++] & 0xFF); } else @@ -236,9 +240,9 @@ public override sealed long ReadInt64() { if (8 <= (bufferLength - bufferPosition)) { - int i1 = ((m_buffer[bufferPosition++] & 0xff) << 24) | ((m_buffer[bufferPosition++] & 0xff) << 16) + int i1 = ((m_buffer[bufferPosition++] & 0xff) << 24) | ((m_buffer[bufferPosition++] & 0xff) << 16) | ((m_buffer[bufferPosition++] & 0xff) << 8) | (m_buffer[bufferPosition++] & 0xff); - int i2 = ((m_buffer[bufferPosition++] & 0xff) << 24) | ((m_buffer[bufferPosition++] & 0xff) << 16) + int i2 = ((m_buffer[bufferPosition++] & 0xff) << 24) | ((m_buffer[bufferPosition++] & 0xff) << 16) | ((m_buffer[bufferPosition++] & 0xff) << 8) | (m_buffer[bufferPosition++] & 0xff); return (((long)i1) << 32) | (i2 & 0xFFFFFFFFL); } @@ -396,6 +400,7 @@ private void Refill() /// the number of bytes to read protected abstract void ReadInternal(byte[] b, int offset, int length); + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override sealed long GetFilePointer() { return bufferStart + bufferPosition; @@ -460,6 +465,7 @@ protected int FlushBuffer(IndexOutput @out, long numBytes) /// /// Returns default buffer sizes for the given /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static int GetBufferSize(IOContext context) // LUCENENET NOTE: Renamed from BufferSize to prevent naming conflict { switch (context.Context) diff --git a/src/Lucene.Net/Store/BufferedIndexOutput.cs b/src/Lucene.Net/Store/BufferedIndexOutput.cs index 4b59b01d18..e76d2ddfcb 100644 --- a/src/Lucene.Net/Store/BufferedIndexOutput.cs +++ b/src/Lucene.Net/Store/BufferedIndexOutput.cs @@ -39,7 +39,7 @@ public abstract class BufferedIndexOutput : IndexOutput /// Creates a new with the default buffer size /// ( bytes see ) /// - public BufferedIndexOutput() + protected BufferedIndexOutput() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) : this(DEFAULT_BUFFER_SIZE) { } @@ -48,7 +48,9 @@ public BufferedIndexOutput() /// Creates a new with the given buffer size. /// the buffer size in bytes used to buffer writes internally. /// if the given buffer size is less or equal to 0 - public BufferedIndexOutput(int bufferSize) : this(bufferSize, new CRC32()) { } + protected BufferedIndexOutput(int bufferSize) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) + : this(bufferSize, new CRC32()) + { } // LUCENENET specific - added constructor overload so FSDirectory can still subclass BufferedIndexOutput, but // utilize its own buffer, since FileStream is already buffered in .NET. diff --git a/src/Lucene.Net/Store/ByteBufferIndexInput.cs b/src/Lucene.Net/Store/ByteBufferIndexInput.cs index d5aaeff2ad..73d6a7bb83 100644 --- a/src/Lucene.Net/Store/ByteBufferIndexInput.cs +++ b/src/Lucene.Net/Store/ByteBufferIndexInput.cs @@ -56,7 +56,7 @@ public abstract class ByteBufferIndexInput : IndexInput private class BoolRefWrapper { - private bool value; + private readonly bool value; // .NET port: this is needed as bool is not a reference type public BoolRefWrapper(bool value) diff --git a/src/Lucene.Net/Store/CompoundFileDirectory.cs b/src/Lucene.Net/Store/CompoundFileDirectory.cs index 284d67cac7..57c2702adc 100644 --- a/src/Lucene.Net/Store/CompoundFileDirectory.cs +++ b/src/Lucene.Net/Store/CompoundFileDirectory.cs @@ -82,12 +82,14 @@ public sealed class FileEntry private readonly Directory directory; private readonly string fileName; - private readonly int readBufferSize; + //private readonly int readBufferSize; // LUCENENET: Never read private readonly IDictionary entries; private readonly bool openForWrite; private static readonly IDictionary SENTINEL = Collections.EmptyMap(); private readonly CompoundFileWriter writer; +#pragma warning disable CA2213 // Disposable fields should be disposed private readonly IndexInputSlicer handle; +#pragma warning restore CA2213 // Disposable fields should be disposed /// /// Create a new . @@ -96,7 +98,7 @@ public CompoundFileDirectory(Directory directory, string fileName, IOContext con { this.directory = directory; this.fileName = fileName; - this.readBufferSize = BufferedIndexInput.GetBufferSize(context); + //this.readBufferSize = BufferedIndexInput.GetBufferSize(context); // LUCENENET: Never read this.IsOpen = false; this.openForWrite = openForWrite; if (!openForWrite) @@ -370,7 +372,9 @@ public override void DeleteFile(string name) /// /// Not implemented /// always: not supported by CFS +#pragma warning disable IDE0060, CA1822 // Remove unused parameter, Mark members as static public void RenameFile(string from, string to) +#pragma warning restore IDE0060, CA1822 // Remove unused parameter, Mark members as static { throw new NotSupportedException(); } @@ -430,7 +434,7 @@ private class IndexInputSlicerAnonymousInnerClassHelper : IndexInputSlicer { private readonly CompoundFileDirectory outerInstance; - private FileEntry entry; + private readonly FileEntry entry; public IndexInputSlicerAnonymousInnerClassHelper(CompoundFileDirectory outerInstance, FileEntry entry) { diff --git a/src/Lucene.Net/Store/CompoundFileWriter.cs b/src/Lucene.Net/Store/CompoundFileWriter.cs index 3cec5169fb..69023aa47f 100644 --- a/src/Lucene.Net/Store/CompoundFileWriter.cs +++ b/src/Lucene.Net/Store/CompoundFileWriter.cs @@ -76,7 +76,9 @@ private sealed class FileEntry // all entries that are written to a sep. file but not yet moved into CFS private readonly LinkedList pendingEntries = new LinkedList(); private bool closed = false; +#pragma warning disable CA2213 // Disposable fields should be disposed private IndexOutput dataOut; +#pragma warning restore CA2213 // Disposable fields should be disposed private readonly AtomicBoolean outputTaken = new AtomicBoolean(false); internal readonly string entryTableName; internal readonly string dataFileName; @@ -179,6 +181,7 @@ public void Dispose() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void EnsureOpen() { if (closed) @@ -191,7 +194,7 @@ private void EnsureOpen() /// Copy the contents of the file with specified extension into the provided /// output stream. /// - private long CopyFileEntry(IndexOutput dataOut, FileEntry fileEntry) + private static long CopyFileEntry(IndexOutput dataOut, FileEntry fileEntry) // LUCENENET: CA1822: Mark members as static { IndexInput @is = fileEntry.Dir.OpenInput(fileEntry.File, IOContext.READ_ONCE); bool success = false; @@ -285,6 +288,7 @@ internal IndexOutput CreateOutput(string name, IOContext context) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal void ReleaseOutputLock() { outputTaken.CompareAndSet(true, false); @@ -323,11 +327,13 @@ internal long FileLength(string name) return fileEntry.Length; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal bool FileExists(string name) { return entries.ContainsKey(name); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal string[] ListAll() { return entries.Keys.ToArray(); @@ -340,7 +346,7 @@ private sealed class DirectCFSIndexOutput : IndexOutput private readonly IndexOutput @delegate; private readonly long offset; private bool closed; - private FileEntry entry; + private readonly FileEntry entry; // LUCENENET: marked readonly private long writtenBytes; private readonly bool isSeparate; @@ -382,12 +388,14 @@ protected override void Dispose(bool disposing) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long GetFilePointer() { return @delegate.GetFilePointer() - offset; } [Obsolete("(4.1) this method will be removed in Lucene 5.0")] + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Seek(long pos) { if (Debugging.AssertsEnabled) Debugging.Assert(!closed); diff --git a/src/Lucene.Net/Store/Directory.cs b/src/Lucene.Net/Store/Directory.cs index b2433816f4..e93249be73 100644 --- a/src/Lucene.Net/Store/Directory.cs +++ b/src/Lucene.Net/Store/Directory.cs @@ -247,26 +247,18 @@ public virtual void Copy(Directory to, string src, string dest, IOContext contex public virtual IndexInputSlicer CreateSlicer(string name, IOContext context) { EnsureOpen(); - return new IndexInputSlicerAnonymousInnerClassHelper(this, name, context); + return new IndexInputSlicerAnonymousInnerClassHelper(OpenInput(name, context)); } private class IndexInputSlicerAnonymousInnerClassHelper : IndexInputSlicer { - private readonly Directory outerInstance; - - private string name; - private IOContext context; + private readonly IndexInput @base; - public IndexInputSlicerAnonymousInnerClassHelper(Directory outerInstance, string name, IOContext context) + public IndexInputSlicerAnonymousInnerClassHelper(IndexInput @base) { - this.outerInstance = outerInstance; - this.name = name; - this.context = context; - @base = outerInstance.OpenInput(name, context); + this.@base = @base; } - private readonly IndexInput @base; - public override IndexInput OpenSlice(string sliceDescription, long offset, long length) { return new SlicedIndexInput("SlicedIndexInput(" + sliceDescription + " in " + @base + ")", @base, offset, length); diff --git a/src/Lucene.Net/Store/FSDirectory.cs b/src/Lucene.Net/Store/FSDirectory.cs index ecfdf1c2cf..8b03dd8549 100644 --- a/src/Lucene.Net/Store/FSDirectory.cs +++ b/src/Lucene.Net/Store/FSDirectory.cs @@ -210,10 +210,9 @@ public override void SetLockFactory(LockFactory lockFactory) // for filesystem based LockFactory, delete the lockPrefix, if the locks are placed // in index dir. If no index dir is given, set ourselves - if (lockFactory is FSLockFactory) + if (lockFactory is FSLockFactory lf) { - FSLockFactory lf = (FSLockFactory)lockFactory; - DirectoryInfo dir = lf.LockDir; + var dir = lf.LockDir; // if the lock factory has no lockDir set, use the this directory as lockDir if (dir == null) { @@ -477,7 +476,9 @@ protected class FSIndexOutput : BufferedIndexOutput private readonly FSDirectory parent; internal readonly string name; +#pragma warning disable CA2213 // Disposable fields should be disposed private readonly FileStream file; +#pragma warning restore CA2213 // Disposable fields should be disposed private volatile bool isOpen; // remember if the file is open, so that we don't try to close it more than once private readonly CRC32 crc = new CRC32(); diff --git a/src/Lucene.Net/Store/Lock.cs b/src/Lucene.Net/Store/Lock.cs index 703bfcacfb..b796d895fc 100644 --- a/src/Lucene.Net/Store/Lock.cs +++ b/src/Lucene.Net/Store/Lock.cs @@ -130,10 +130,9 @@ public bool Obtain(long lockWaitTimeout) { reason += ": " + FailureReason; } - LockObtainFailedException e = new LockObtainFailedException(reason); - e = FailureReason != null - ? new LockObtainFailedException(reason, FailureReason) - : new LockObtainFailedException(reason); + LockObtainFailedException e = FailureReason != null + ? new LockObtainFailedException(reason, FailureReason) + : new LockObtainFailedException(reason); throw e; } @@ -178,8 +177,8 @@ public void Dispose() /// Utility class for executing code with exclusive access. public abstract class With // LUCENENET specific - made generic so we don't need to deal with casting { - private Lock @lock; - private long lockWaitTimeout; + private readonly Lock @lock; // LUCENENET: marked readonly + private readonly long lockWaitTimeout; // LUCENENET: marked readonly /// /// Constructs an executor that will grab the named . @@ -187,7 +186,7 @@ public abstract class With // LUCENENET specific - made generic so we don't n /// length of time to wait in /// milliseconds or /// to retry forever - public With(Lock @lock, long lockWaitTimeout) + protected With(Lock @lock, long lockWaitTimeout) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.@lock = @lock; this.lockWaitTimeout = lockWaitTimeout; diff --git a/src/Lucene.Net/Store/LockStressTest.cs b/src/Lucene.Net/Store/LockStressTest.cs index bcaf9d44d4..95cd51d958 100644 --- a/src/Lucene.Net/Store/LockStressTest.cs +++ b/src/Lucene.Net/Store/LockStressTest.cs @@ -32,7 +32,7 @@ namespace Lucene.Net.Store /// /// /// - public class LockStressTest + public static class LockStressTest // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { [STAThread] public static void Main(string[] args) @@ -114,66 +114,61 @@ public static void Main(string[] args) DirectoryInfo lockDir = new DirectoryInfo(lockDirName); - if (lockFactory is FSLockFactory) + if (lockFactory is FSLockFactory fsLockFactory) { - ((FSLockFactory)lockFactory).SetLockDir(lockDir); + fsLockFactory.SetLockDir(lockDir); } Console.WriteLine("Connecting to server " + addr + " and registering as client " + myID + "..."); - using (Socket socket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp)) + using Socket socket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp); + socket.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, 1); + socket.Connect(verifierHost, verifierPort); + + using Stream stream = new NetworkStream(socket); + BinaryReader intReader = new BinaryReader(stream); + BinaryWriter intWriter = new BinaryWriter(stream); + + intWriter.Write(myID); + stream.Flush(); + + lockFactory.LockPrefix = "test"; + LockFactory verifyLF = new VerifyingLockFactory(lockFactory, stream); + Lock l = verifyLF.MakeLock("test.lock"); + Random rnd = new Random(); + + // wait for starting gun + if (intReader.ReadInt32() != 43) { - socket.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, 1); - socket.Connect(verifierHost, verifierPort); + throw new IOException("Protocol violation"); + } - using (Stream stream = new NetworkStream(socket)) + for (int i = 0; i < count; i++) + { + bool obtained = false; + + try { - BinaryReader intReader = new BinaryReader(stream); - BinaryWriter intWriter = new BinaryWriter(stream); - - intWriter.Write(myID); - stream.Flush(); - - lockFactory.LockPrefix = "test"; - LockFactory verifyLF = new VerifyingLockFactory(lockFactory, stream); - Lock l = verifyLF.MakeLock("test.lock"); - Random rnd = new Random(); - - // wait for starting gun - if (intReader.ReadInt32() != 43) - { - throw new IOException("Protocol violation"); - } - - for (int i = 0; i < count; i++) - { - bool obtained = false; - - try - { - obtained = l.Obtain(rnd.Next(100) + 10); - } + obtained = l.Obtain(rnd.Next(100) + 10); + } #pragma warning disable 168 - catch (LockObtainFailedException e) + catch (LockObtainFailedException e) #pragma warning restore 168 - { - } - - if (obtained) - { - Thread.Sleep(sleepTimeMS); - l.Dispose(); - } - - if (i % 500 == 0) - { - Console.WriteLine((i * 100.0 / count) + "% done."); - } - - Thread.Sleep(sleepTimeMS); - } + { + } + + if (obtained) + { + Thread.Sleep(sleepTimeMS); + l.Dispose(); } - } + if (i % 500 == 0) + { + Console.WriteLine((i * 100.0 / count) + "% done."); + } + + Thread.Sleep(sleepTimeMS); + } Console.WriteLine("Finished " + count + " tries."); } } diff --git a/src/Lucene.Net/Store/LockVerifyServer.cs b/src/Lucene.Net/Store/LockVerifyServer.cs index cb2de3bed7..6746d10895 100644 --- a/src/Lucene.Net/Store/LockVerifyServer.cs +++ b/src/Lucene.Net/Store/LockVerifyServer.cs @@ -37,7 +37,7 @@ namespace Lucene.Net.Store /// /// /// - public class LockVerifyServer + public static class LockVerifyServer // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { [STAThread] public static void Main(string[] args) @@ -56,54 +56,52 @@ public static void Main(string[] args) IPAddress ipAddress = IPAddress.Parse(hostname); - using (Socket s = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp)) - { - s.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, 1); - s.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReceiveTimeout, 30000);// SoTimeout = 30000; // initially 30 secs to give clients enough time to startup + using Socket s = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp); + s.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, 1); + s.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReceiveTimeout, 30000);// SoTimeout = 30000; // initially 30 secs to give clients enough time to startup - s.Bind(new IPEndPoint(ipAddress, 0)); - s.Listen(maxClients); - Console.WriteLine("Listening on " + ((IPEndPoint)s.LocalEndPoint).Port.ToString() + "..."); + s.Bind(new IPEndPoint(ipAddress, 0)); + s.Listen(maxClients); + Console.WriteLine("Listening on " + ((IPEndPoint)s.LocalEndPoint).Port.ToString() + "..."); - // we set the port as a sysprop, so the ANT task can read it. For that to work, this server must run in-process: - Environment.SetEnvironmentVariable("lockverifyserver.port", ((IPEndPoint)s.LocalEndPoint).Port.ToString(CultureInfo.InvariantCulture)); + // we set the port as a sysprop, so the ANT task can read it. For that to work, this server must run in-process: + Environment.SetEnvironmentVariable("lockverifyserver.port", ((IPEndPoint)s.LocalEndPoint).Port.ToString(CultureInfo.InvariantCulture)); - object localLock = new object(); - int[] lockedID = new int[1]; - lockedID[0] = -1; - CountdownEvent startingGun = new CountdownEvent(1); - ThreadJob[] threads = new ThreadJob[maxClients]; + object localLock = new object(); + int[] lockedID = new int[1]; + lockedID[0] = -1; + CountdownEvent startingGun = new CountdownEvent(1); + ThreadJob[] threads = new ThreadJob[maxClients]; - for (int count = 0; count < maxClients; count++) - { - Socket cs = s.Accept(); - threads[count] = new ThreadAnonymousInnerClassHelper(localLock, lockedID, startingGun, cs); - threads[count].Start(); - } + for (int count = 0; count < maxClients; count++) + { + Socket cs = s.Accept(); + threads[count] = new ThreadAnonymousInnerClassHelper(localLock, lockedID, startingGun, cs); + threads[count].Start(); + } - // start - Console.WriteLine("All clients started, fire gun..."); - startingGun.Signal(); + // start + Console.WriteLine("All clients started, fire gun..."); + startingGun.Signal(); - // wait for all threads to finish - foreach (ThreadJob t in threads) - { - t.Join(); - } + // wait for all threads to finish + foreach (ThreadJob t in threads) + { + t.Join(); + } - // cleanup sysprop - Environment.SetEnvironmentVariable("lockverifyserver.port", null); + // cleanup sysprop + Environment.SetEnvironmentVariable("lockverifyserver.port", null); - Console.WriteLine("Server terminated."); - } + Console.WriteLine("Server terminated."); } private class ThreadAnonymousInnerClassHelper : ThreadJob { - private object localLock; - private int[] lockedID; - private CountdownEvent startingGun; - private Socket cs; + private readonly object localLock; + private readonly int[] lockedID; + private readonly CountdownEvent startingGun; + private readonly Socket cs; public ThreadAnonymousInnerClassHelper(object localLock, int[] lockedID, CountdownEvent startingGun, Socket cs) { @@ -115,81 +113,79 @@ public ThreadAnonymousInnerClassHelper(object localLock, int[] lockedID, Countdo public override void Run() { - using (Stream stream = new NetworkStream(cs)) + using Stream stream = new NetworkStream(cs); + BinaryReader intReader = new BinaryReader(stream); + BinaryWriter intWriter = new BinaryWriter(stream); + try { - BinaryReader intReader = new BinaryReader(stream); - BinaryWriter intWriter = new BinaryWriter(stream); - try + int id = intReader.ReadInt32(); + if (id < 0) + { + throw new IOException("Client closed connection before communication started."); + } + + startingGun.Wait(); + intWriter.Write(43); + stream.Flush(); + + while (true) { - int id = intReader.ReadInt32(); - if (id < 0) + int command = stream.ReadByte(); + if (command < 0) { - throw new IOException("Client closed connection before communication started."); + return; // closed } - startingGun.Wait(); - intWriter.Write(43); - stream.Flush(); - - while (true) + lock (localLock) { - int command = stream.ReadByte(); - if (command < 0) + int currentLock = lockedID[0]; + if (currentLock == -2) { - return; // closed + return; // another thread got error, so we exit, too! } - - lock (localLock) + switch (command) { - int currentLock = lockedID[0]; - if (currentLock == -2) - { - return; // another thread got error, so we exit, too! - } - switch (command) - { - case 1: - // Locked - if (currentLock != -1) - { - lockedID[0] = -2; - throw new InvalidOperationException("id " + id + " got lock, but " + currentLock + " already holds the lock"); - } - lockedID[0] = id; - break; - - case 0: - // Unlocked - if (currentLock != id) - { - lockedID[0] = -2; - throw new InvalidOperationException("id " + id + " released the lock, but " + currentLock + " is the one holding the lock"); - } - lockedID[0] = -1; - break; - - default: - throw new Exception("Unrecognized command: " + command); - } - intWriter.Write((byte)command); - stream.Flush(); + case 1: + // Locked + if (currentLock != -1) + { + lockedID[0] = -2; + throw new InvalidOperationException("id " + id + " got lock, but " + currentLock + " already holds the lock"); + } + lockedID[0] = id; + break; + + case 0: + // Unlocked + if (currentLock != id) + { + lockedID[0] = -2; + throw new InvalidOperationException("id " + id + " released the lock, but " + currentLock + " is the one holding the lock"); + } + lockedID[0] = -1; + break; + + default: + throw new Exception("Unrecognized command: " + command); } + intWriter.Write((byte)command); + stream.Flush(); } } - catch (IOException ioe) - { - throw new Exception(ioe.ToString(), ioe); - } - catch (Exception e) - { - // LUCENENET NOTE: We need to throw a new exception - // to ensure this is Exception and not some other type. - throw new Exception(e.ToString(), e); - } - finally - { - IOUtils.DisposeWhileHandlingException(cs); - } + } + catch (IOException ioe) + { + throw new Exception(ioe.ToString(), ioe); + } + catch (Exception e) + { + // LUCENENET NOTE: We need to throw a new exception + // to ensure this is Exception and not some other type. + throw new Exception(e.ToString(), e); + } + finally + { + IOUtils.DisposeWhileHandlingException(cs); } } } diff --git a/src/Lucene.Net/Store/MMapDirectory.cs b/src/Lucene.Net/Store/MMapDirectory.cs index 2a4c94100c..cd01d15b62 100644 --- a/src/Lucene.Net/Store/MMapDirectory.cs +++ b/src/Lucene.Net/Store/MMapDirectory.cs @@ -196,7 +196,7 @@ private class IndexInputSlicerAnonymousInnerClassHelper : IndexInputSlicer { private readonly MMapDirectory outerInstance; - private MMapIndexInput full; + private readonly MMapIndexInput full; public IndexInputSlicerAnonymousInnerClassHelper(MMapDirectory outerInstance, MMapIndexInput full) { @@ -230,12 +230,10 @@ public sealed class MMapIndexInput : ByteBufferIndexInput { internal MemoryMappedFile memoryMappedFile; // .NET port: this is equivalent to FileChannel.map private readonly FileStream fc; - private readonly MMapDirectory outerInstance; internal MMapIndexInput(MMapDirectory outerInstance, string resourceDescription, FileStream fc) : base(resourceDescription, null, fc.Length, outerInstance.chunkSizePower, true) { - this.outerInstance = outerInstance; this.fc = fc ?? throw new ArgumentNullException(nameof(fc)); this.SetBuffers(outerInstance.Map(this, fc, 0, fc.Length)); } diff --git a/src/Lucene.Net/Store/NRTCachingDirectory.cs b/src/Lucene.Net/Store/NRTCachingDirectory.cs index 005cecb5c8..f8bf8ed841 100644 --- a/src/Lucene.Net/Store/NRTCachingDirectory.cs +++ b/src/Lucene.Net/Store/NRTCachingDirectory.cs @@ -72,7 +72,9 @@ public class NRTCachingDirectory : Directory private readonly long maxMergeSizeBytes; private readonly long maxCachedBytes; - private static readonly bool VERBOSE = false; +#pragma warning disable CA1802 // Use literals where appropriate + private static readonly bool VERBOSE = false; // For debugging +#pragma warning restore CA1802 // Use literals where appropriate /// /// We will cache a newly created output if 1) it's a @@ -229,9 +231,7 @@ public override IndexOutput CreateOutput(string name, IOContext context) { @delegate.DeleteFile(name); } -#pragma warning disable 168 - catch (IOException ioe) -#pragma warning restore 168 + catch (IOException) // LUCENENET: IDE0059: Remove unnecessary value assignment { // this is fine: file may not exist } @@ -243,9 +243,7 @@ public override IndexOutput CreateOutput(string name, IOContext context) { cache.DeleteFile(name); } -#pragma warning disable 168 - catch (IOException ioe) -#pragma warning restore 168 + catch (IOException) // LUCENENET: IDE0059: Remove unnecessary value assignment { // this is fine: file may not exist } diff --git a/src/Lucene.Net/Store/NativeFSLockFactory.cs b/src/Lucene.Net/Store/NativeFSLockFactory.cs index 86d258dfa8..b3c59a9f78 100644 --- a/src/Lucene.Net/Store/NativeFSLockFactory.cs +++ b/src/Lucene.Net/Store/NativeFSLockFactory.cs @@ -115,17 +115,13 @@ private static bool LoadIsFileStreamLockingPlatform() { return FileSupport.GetFileIOExceptionHResult(provokeException: (fileName) => { - using (var lockStream = new FileStream(fileName, FileMode.OpenOrCreate, FileAccess.Write, FileShare.ReadWrite)) - { - lockStream.Lock(0, 1); // Create an exclusive lock - using (var stream = new FileStream(fileName, FileMode.Open, FileAccess.Write, FileShare.ReadWrite)) - { - // try to find out if the file is locked by writing a byte. Note that we need to flush the stream to find out. - stream.WriteByte(0); - stream.Flush(); // this *may* throw an IOException if the file is locked, but... - // ... closing the stream is the real test - } - } + using var lockStream = new FileStream(fileName, FileMode.OpenOrCreate, FileAccess.Write, FileShare.ReadWrite); + lockStream.Lock(0, 1); // Create an exclusive lock + using var stream = new FileStream(fileName, FileMode.Open, FileAccess.Write, FileShare.ReadWrite); + // try to find out if the file is locked by writing a byte. Note that we need to flush the stream to find out. + stream.WriteByte(0); + stream.Flush(); // this *may* throw an IOException if the file is locked, but... + // ... closing the stream is the real test }); } @@ -139,11 +135,9 @@ private static bool LoadIsFileStreamLockingPlatform() return FileSupport.GetFileIOExceptionHResult(provokeException: (fileName) => { - using (var lockStream = new FileStream(fileName, FileMode.OpenOrCreate, FileAccess.Write, FileShare.Read, 1, FileOptions.None)) + using var lockStream = new FileStream(fileName, FileMode.OpenOrCreate, FileAccess.Write, FileShare.Read, 1, FileOptions.None); // Try to get an exclusive lock on the file - this should throw an IOException with the current platform's HResult value for FileShare violation - using (var stream = new FileStream(fileName, FileMode.Open, FileAccess.Write, FileShare.None, 1, FileOptions.None)) - { - } + using var stream = new FileStream(fileName, FileMode.Open, FileAccess.Write, FileShare.None, 1, FileOptions.None); }); } @@ -247,7 +241,9 @@ public override void ClearLock(string lockName) // Reference: https://stackoverflow.com/q/46380483 internal class FallbackNativeFSLock : Lock { +#pragma warning disable CA2213 // Disposable fields should be disposed private FileStream channel; +#pragma warning restore CA2213 // Disposable fields should be disposed private readonly string path; private readonly DirectoryInfo lockDir; @@ -416,7 +412,9 @@ public override string ToString() // Locks the entire file. macOS requires this approach. internal class SharingNativeFSLock : Lock { +#pragma warning disable CA2213 // Disposable fields should be disposed private FileStream channel; +#pragma warning restore CA2213 // Disposable fields should be disposed private readonly string path; private readonly DirectoryInfo lockDir; @@ -576,7 +574,9 @@ public override string ToString() // Uses FileStream locking of file pages. internal class NativeFSLock : Lock { +#pragma warning disable CA2213 // Disposable fields should be disposed private FileStream channel; +#pragma warning restore CA2213 // Disposable fields should be disposed private readonly string path; private readonly DirectoryInfo lockDir; diff --git a/src/Lucene.Net/Store/NoLockFactory.cs b/src/Lucene.Net/Store/NoLockFactory.cs index eeda73fe36..11e085b586 100644 --- a/src/Lucene.Net/Store/NoLockFactory.cs +++ b/src/Lucene.Net/Store/NoLockFactory.cs @@ -26,8 +26,8 @@ namespace Lucene.Net.Store public class NoLockFactory : LockFactory { // Single instance returned whenever makeLock is called. - private static NoLock singletonLock = new NoLock(); - private static NoLockFactory singleton = new NoLockFactory(); + private static readonly NoLock singletonLock = new NoLock(); // LUCENENET: marked readonly + private static readonly NoLockFactory singleton = new NoLockFactory(); // LUCENENET: marked readonly private NoLockFactory() { diff --git a/src/Lucene.Net/Store/OutputStreamDataOutput.cs b/src/Lucene.Net/Store/OutputStreamDataOutput.cs index 7e448f0335..f6629ae1be 100644 --- a/src/Lucene.Net/Store/OutputStreamDataOutput.cs +++ b/src/Lucene.Net/Store/OutputStreamDataOutput.cs @@ -42,9 +42,29 @@ public override void WriteBytes(byte[] b, int offset, int length) _writer.Write(b, offset, length); } - public virtual void Dispose() + /// + /// Releases all resources used by the . + /// + public void Dispose() { - _writer.Dispose(); + Dispose(true); + GC.SuppressFinalize(this); + } + + /// + /// Releases resources used by the and + /// if overridden in a derived class, optionally releases unmanaged resources. + /// + /// true to release both managed and unmanaged resources; + /// false to release only unmanaged resources. + + // LUCENENET specific - implemented proper dispose pattern + protected virtual void Dispose(bool disposing) + { + if (disposing) + { + _writer.Dispose(); + } } } } \ No newline at end of file diff --git a/src/Lucene.Net/Store/RAMDirectory.cs b/src/Lucene.Net/Store/RAMDirectory.cs index 7cf0746dda..446ed9dea9 100644 --- a/src/Lucene.Net/Store/RAMDirectory.cs +++ b/src/Lucene.Net/Store/RAMDirectory.cs @@ -59,9 +59,7 @@ public RAMDirectory() { SetLockFactory(new SingleInstanceLockFactory()); } -#pragma warning disable 168 - catch (IOException e) -#pragma warning restore 168 + catch (IOException) // LUCENENET: IDE0059: Remove unnecessary value assignment { // Cannot happen } @@ -140,8 +138,7 @@ public override sealed bool FileExists(string name) public override sealed long FileLength(string name) { EnsureOpen(); - RAMFile file; - if (!m_fileMap.TryGetValue(name, out file) || file == null) + if (!m_fileMap.TryGetValue(name, out RAMFile file) || file == null) { throw new FileNotFoundException(name); } @@ -164,8 +161,7 @@ public long GetSizeInBytes() public override void DeleteFile(string name) { EnsureOpen(); - RAMFile file; - if (m_fileMap.TryRemove(name, out file) && file != null) + if (m_fileMap.TryRemove(name, out RAMFile file) && file != null) { file.directory = null; m_sizeInBytes.AddAndGet(-file.m_sizeInBytes); @@ -182,8 +178,7 @@ public override IndexOutput CreateOutput(string name, IOContext context) { EnsureOpen(); RAMFile file = NewRAMFile(); - RAMFile existing; - if (m_fileMap.TryRemove(name, out existing) && existing != null) + if (m_fileMap.TryRemove(name, out RAMFile existing) && existing != null) { m_sizeInBytes.AddAndGet(-existing.m_sizeInBytes); existing.directory = null; @@ -211,8 +206,7 @@ public override void Sync(ICollection names) public override IndexInput OpenInput(string name, IOContext context) { EnsureOpen(); - RAMFile file; - if (!m_fileMap.TryGetValue(name, out file) || file == null) + if (!m_fileMap.TryGetValue(name, out RAMFile file) || file == null) { throw new FileNotFoundException(name); } diff --git a/src/Lucene.Net/Store/RAMInputStream.cs b/src/Lucene.Net/Store/RAMInputStream.cs index c02bbd92b0..7ac4ce3f85 100644 --- a/src/Lucene.Net/Store/RAMInputStream.cs +++ b/src/Lucene.Net/Store/RAMInputStream.cs @@ -32,8 +32,8 @@ public class RAMInputStream : IndexInput { internal const int BUFFER_SIZE = RAMOutputStream.BUFFER_SIZE; - private RAMFile file; - private long length; + private readonly RAMFile file; // LUCENENET: marked readonly + private readonly long length; // LUCENENET: marked readonly private byte[] currentBuffer; private int currentBufferIndex; diff --git a/src/Lucene.Net/Store/RAMOutputStream.cs b/src/Lucene.Net/Store/RAMOutputStream.cs index 38c475e1d8..a5a5598f76 100644 --- a/src/Lucene.Net/Store/RAMOutputStream.cs +++ b/src/Lucene.Net/Store/RAMOutputStream.cs @@ -31,7 +31,7 @@ public class RAMOutputStream : IndexOutput { internal const int BUFFER_SIZE = 1024; - private RAMFile file; + private readonly RAMFile file; // LUCENENET: marked readonly private byte[] currentBuffer; private int currentBufferIndex; @@ -40,7 +40,7 @@ public class RAMOutputStream : IndexOutput private long bufferStart; private int bufferLength; - private BufferedChecksum crc = new BufferedChecksum(new CRC32()); + private readonly BufferedChecksum crc = new BufferedChecksum(new CRC32()); // LUCENENET: marked readonly /// /// Construct an empty output buffer. diff --git a/src/Lucene.Net/Store/RateLimitedDirectoryWrapper.cs b/src/Lucene.Net/Store/RateLimitedDirectoryWrapper.cs index 8ad4dce9df..e0b8210247 100644 --- a/src/Lucene.Net/Store/RateLimitedDirectoryWrapper.cs +++ b/src/Lucene.Net/Store/RateLimitedDirectoryWrapper.cs @@ -68,8 +68,7 @@ public override void Copy(Directory to, string src, string dest, IOContext conte private RateLimiter GetRateLimiter(IOContext.UsageContext context) { //if (Debugging.AssertsEnabled) Debugging.Assert(context != null); // LUCENENET NOTE: In .NET, enum can never be null - RateLimiter ret; - return _contextRateLimiters.TryGetValue(context, out ret) ? ret : null; + return _contextRateLimiters.TryGetValue(context, out RateLimiter ret) ? ret : null; } /// @@ -98,8 +97,7 @@ public void SetMaxWriteMBPerSec(double? mbPerSec, IOContext.UsageContext context // throw new ArgumentException("Context must not be null"); //} //int ord = context.ordinal(); - RateLimiter limiter; - _contextRateLimiters.TryGetValue(context, out limiter); + _contextRateLimiters.TryGetValue(context, out RateLimiter limiter); if (mbPerSec == null) { diff --git a/src/Lucene.Net/Store/RateLimitedIndexOutput.cs b/src/Lucene.Net/Store/RateLimitedIndexOutput.cs index 1daa5c4bae..92ea575b6e 100644 --- a/src/Lucene.Net/Store/RateLimitedIndexOutput.cs +++ b/src/Lucene.Net/Store/RateLimitedIndexOutput.cs @@ -34,9 +34,9 @@ internal sealed class RateLimitedIndexOutput : BufferedIndexOutput internal RateLimitedIndexOutput(RateLimiter rateLimiter, IndexOutput @delegate) { // TODO should we make buffer size configurable - if (@delegate is BufferedIndexOutput) + if (@delegate is BufferedIndexOutput bufferedIndexOutput) { - bufferedDelegate = (BufferedIndexOutput)@delegate; + bufferedDelegate = bufferedIndexOutput; this.@delegate = @delegate; } else diff --git a/src/Lucene.Net/Store/SimpleFSLockFactory.cs b/src/Lucene.Net/Store/SimpleFSLockFactory.cs index 92588e460f..29733cd43b 100644 --- a/src/Lucene.Net/Store/SimpleFSLockFactory.cs +++ b/src/Lucene.Net/Store/SimpleFSLockFactory.cs @@ -92,12 +92,10 @@ public override void ClearLock(string lockName) { lockFile.Delete(); } -#pragma warning disable 168 catch (Exception e) -#pragma warning restore 168 { if (lockFile.Exists) // Delete failed and lockFile exists - throw new IOException("Cannot delete " + lockFile); + throw new IOException("Cannot delete " + lockFile, e); // LUCENENET specific: wrapped inner exception } } } diff --git a/src/Lucene.Net/Store/SingleInstanceLockFactory.cs b/src/Lucene.Net/Store/SingleInstanceLockFactory.cs index 7752c23c7e..e4323405b9 100644 --- a/src/Lucene.Net/Store/SingleInstanceLockFactory.cs +++ b/src/Lucene.Net/Store/SingleInstanceLockFactory.cs @@ -31,7 +31,7 @@ namespace Lucene.Net.Store /// public class SingleInstanceLockFactory : LockFactory { - private JCG.HashSet locks = new JCG.HashSet(); + private readonly JCG.HashSet locks = new JCG.HashSet(); // LUCENENET: marked readonly public override Lock MakeLock(string lockName) { @@ -56,7 +56,7 @@ public override void ClearLock(string lockName) internal class SingleInstanceLock : Lock { internal string lockName; - private JCG.HashSet locks; + private readonly JCG.HashSet locks; public SingleInstanceLock(JCG.HashSet locks, string lockName) { diff --git a/src/Lucene.Net/Support/AssemblyUtils.cs b/src/Lucene.Net/Support/AssemblyUtils.cs index 5beaab2059..370b12a557 100644 --- a/src/Lucene.Net/Support/AssemblyUtils.cs +++ b/src/Lucene.Net/Support/AssemblyUtils.cs @@ -147,7 +147,7 @@ public static bool IsFrameworkAssembly(AssemblyName assembly) var publicKey = assembly.GetPublicKeyToken(); - if (publicKey == default(byte[])) + if (publicKey == default) { return false; } diff --git a/src/Lucene.Net/Support/CRC32.cs b/src/Lucene.Net/Support/CRC32.cs index c816a69f2f..3681576494 100644 --- a/src/Lucene.Net/Support/CRC32.cs +++ b/src/Lucene.Net/Support/CRC32.cs @@ -25,14 +25,14 @@ namespace Lucene.Net.Support { internal class CRC32 : IChecksum { - private static readonly UInt32[] crcTable = InitializeCRCTable(); + private static readonly uint[] crcTable = InitializeCRCTable(); - private static UInt32[] InitializeCRCTable() + private static uint[] InitializeCRCTable() { - UInt32[] crcTable = new UInt32[256]; - for (UInt32 n = 0; n < 256; n++) + uint[] crcTable = new uint[256]; + for (uint n = 0; n < 256; n++) { - UInt32 c = n; + uint c = n; for (int k = 8; --k >= 0; ) { if ((c & 1) != 0) @@ -45,7 +45,7 @@ private static UInt32[] InitializeCRCTable() return crcTable; } - private UInt32 crc = 0; + private uint crc = 0; public long Value => crc & 0xffffffffL; @@ -56,14 +56,14 @@ public void Reset() public void Update(int bval) { - UInt32 c = ~crc; + uint c = ~crc; c = crcTable[(c ^ bval) & 0xff] ^ (c >> 8); crc = ~c; } public void Update(byte[] buf, int off, int len) { - UInt32 c = ~crc; + uint c = ~crc; while (--len >= 0) c = crcTable[(c ^ buf[off++]) & 0xff] ^ (c >> 8); crc = ~c; diff --git a/src/Lucene.Net/Support/Codecs/ExcludeCodecFromScanAttribute.cs b/src/Lucene.Net/Support/Codecs/ExcludeCodecFromScanAttribute.cs index 0892271cf2..1e8d3299b8 100644 --- a/src/Lucene.Net/Support/Codecs/ExcludeCodecFromScanAttribute.cs +++ b/src/Lucene.Net/Support/Codecs/ExcludeCodecFromScanAttribute.cs @@ -29,7 +29,7 @@ namespace Lucene.Net.Codecs /// . /// [AttributeUsage(AttributeTargets.Class, AllowMultiple = false, Inherited = true)] - public class ExcludeCodecFromScanAttribute : ExcludeServiceAttribute + public sealed class ExcludeCodecFromScanAttribute : ExcludeServiceAttribute { } } diff --git a/src/Lucene.Net/Support/Codecs/ExcludeDocValuesFormatFromScanAttribute.cs b/src/Lucene.Net/Support/Codecs/ExcludeDocValuesFormatFromScanAttribute.cs index 0d99954063..e58eb544eb 100644 --- a/src/Lucene.Net/Support/Codecs/ExcludeDocValuesFormatFromScanAttribute.cs +++ b/src/Lucene.Net/Support/Codecs/ExcludeDocValuesFormatFromScanAttribute.cs @@ -29,7 +29,7 @@ namespace Lucene.Net.Codecs /// . /// [AttributeUsage(AttributeTargets.Class, AllowMultiple = false, Inherited = true)] - public class ExcludeDocValuesFormatFromScanAttribute : ExcludeServiceAttribute + public sealed class ExcludeDocValuesFormatFromScanAttribute : ExcludeServiceAttribute { } } diff --git a/src/Lucene.Net/Support/Codecs/ExcludePostingsFormatFromScanAttribute.cs b/src/Lucene.Net/Support/Codecs/ExcludePostingsFormatFromScanAttribute.cs index b499c40a76..f47f10d222 100644 --- a/src/Lucene.Net/Support/Codecs/ExcludePostingsFormatFromScanAttribute.cs +++ b/src/Lucene.Net/Support/Codecs/ExcludePostingsFormatFromScanAttribute.cs @@ -29,7 +29,7 @@ namespace Lucene.Net.Codecs /// . /// [AttributeUsage(AttributeTargets.Class, AllowMultiple = false, Inherited = true)] - public class ExcludePostingsFormatFromScanAttribute : ExcludeServiceAttribute + public sealed class ExcludePostingsFormatFromScanAttribute : ExcludeServiceAttribute { } } diff --git a/src/Lucene.Net/Support/Collections.cs b/src/Lucene.Net/Support/Collections.cs index a5a4d4c5fc..ebc077adf5 100644 --- a/src/Lucene.Net/Support/Collections.cs +++ b/src/Lucene.Net/Support/Collections.cs @@ -80,8 +80,8 @@ public static IComparer ReverseOrder(IComparer cmp) if (cmp == null) return ReverseOrder(); - if (cmp is ReverseComparer2) - return ((ReverseComparer2)cmp).cmp; + if (cmp is ReverseComparer2 reverseComparer2) + return reverseComparer2.cmp; return new ReverseComparer2(cmp); } @@ -107,21 +107,19 @@ public static string ToString(ICollection collection) } bool isValueType = typeof(T).IsValueType; - using (var it = collection.GetEnumerator()) + using var it = collection.GetEnumerator(); + StringBuilder sb = new StringBuilder(); + sb.Append('['); + it.MoveNext(); + while (true) { - StringBuilder sb = new StringBuilder(); - sb.Append('['); - it.MoveNext(); - while (true) + T e = it.Current; + sb.Append(object.ReferenceEquals(e, collection) ? "(this Collection)" : (isValueType ? e.ToString() : ToString(e))); + if (!it.MoveNext()) { - T e = it.Current; - sb.Append(object.ReferenceEquals(e, collection) ? "(this Collection)" : (isValueType ? e.ToString() : ToString(e))); - if (!it.MoveNext()) - { - return sb.Append(']').ToString(); - } - sb.Append(',').Append(' '); + return sb.Append(']').ToString(); } + sb.Append(',').Append(' '); } } @@ -133,10 +131,8 @@ public static string ToString(ICollection collection) /// public static string ToString(ICollection collection, CultureInfo culture) { - using (var context = new CultureContext(culture)) - { - return ToString(collection); - } + using var context = new CultureContext(culture); + return ToString(collection); } /// @@ -155,25 +151,23 @@ public static string ToString(IDictionary dictionary bool keyIsValueType = typeof(TKey).IsValueType; bool valueIsValueType = typeof(TValue).IsValueType; - using (var i = dictionary.GetEnumerator()) + using var i = dictionary.GetEnumerator(); + StringBuilder sb = new StringBuilder(); + sb.Append('{'); + i.MoveNext(); + while (true) { - StringBuilder sb = new StringBuilder(); - sb.Append('{'); - i.MoveNext(); - while (true) + KeyValuePair e = i.Current; + TKey key = e.Key; + TValue value = e.Value; + sb.Append(object.ReferenceEquals(key, dictionary) ? "(this Dictionary)" : (keyIsValueType ? key.ToString() : ToString(key))); + sb.Append('='); + sb.Append(object.ReferenceEquals(value, dictionary) ? "(this Dictionary)" : (valueIsValueType ? value.ToString() : ToString(value))); + if (!i.MoveNext()) { - KeyValuePair e = i.Current; - TKey key = e.Key; - TValue value = e.Value; - sb.Append(object.ReferenceEquals(key, dictionary) ? "(this Dictionary)" : (keyIsValueType ? key.ToString() : ToString(key))); - sb.Append('='); - sb.Append(object.ReferenceEquals(value, dictionary) ? "(this Dictionary)" : (valueIsValueType ? value.ToString() : ToString(value))); - if (!i.MoveNext()) - { - return sb.Append('}').ToString(); - } - sb.Append(',').Append(' '); + return sb.Append('}').ToString(); } + sb.Append(',').Append(' '); } } @@ -185,10 +179,8 @@ public static string ToString(IDictionary dictionary /// public static string ToString(IDictionary dictionary, CultureInfo culture) { - using (var context = new CultureContext(culture)) - { - return ToString(dictionary); - } + using var context = new CultureContext(culture); + return ToString(dictionary); } /// @@ -217,10 +209,8 @@ public static string ToString(object obj) /// public static string ToString(object obj, CultureInfo culture) { - using (var context = new CultureContext(culture)) - { - return ToString(obj); - } + using var context = new CultureContext(culture); + return ToString(obj); } #region Nested Types @@ -282,8 +272,8 @@ public int Compare(T t1, T t2) public override bool Equals(object o) { return (o == this) || - (o is ReverseComparer2 && - cmp.Equals(((ReverseComparer2)o).cmp)); + (o is ReverseComparer2 reverseComparer2 && + cmp.Equals(reverseComparer2.cmp)); } public override int GetHashCode() diff --git a/src/Lucene.Net/Support/Compatibility/NullableAttributes.cs b/src/Lucene.Net/Support/Compatibility/NullableAttributes.cs index e6db6d9aaa..24b98dfb34 100644 --- a/src/Lucene.Net/Support/Compatibility/NullableAttributes.cs +++ b/src/Lucene.Net/Support/Compatibility/NullableAttributes.cs @@ -8,6 +8,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +#pragma warning disable IDE0021 // Use block body for constructors + namespace System.Diagnostics.CodeAnalysis { /// Specifies that null is allowed as an input even if the corresponding type disallows it. diff --git a/src/Lucene.Net/Support/ConcurrentDictionaryWrapper.cs b/src/Lucene.Net/Support/ConcurrentDictionaryWrapper.cs index c44c198d65..278c4aaf19 100644 --- a/src/Lucene.Net/Support/ConcurrentDictionaryWrapper.cs +++ b/src/Lucene.Net/Support/ConcurrentDictionaryWrapper.cs @@ -123,10 +123,7 @@ public TValue this[TKey key] _lock.EnterReadLock(); try { - TValue result; - if (!_dict.TryGetValue(key, out result)) - return default(TValue); - return result; + return _dict.TryGetValue(key, out TValue result) ? result : default; } finally { diff --git a/src/Lucene.Net/Support/ConcurrentSet.cs b/src/Lucene.Net/Support/ConcurrentSet.cs index a0cfea28c3..14c0138037 100644 --- a/src/Lucene.Net/Support/ConcurrentSet.cs +++ b/src/Lucene.Net/Support/ConcurrentSet.cs @@ -3,6 +3,7 @@ using System.Collections; using System.Collections.Generic; using JCG = J2N.Collections.Generic; +#nullable enable namespace Lucene.Net.Support { @@ -31,7 +32,7 @@ internal sealed class ConcurrentSet : ISet, ICollection, IStructuralEquata #if FEATURE_SERIALIZABLE [NonSerialized] #endif - private object syncRoot; + private object? syncRoot; private readonly ISet set; public ConcurrentSet(ISet set) @@ -144,16 +145,20 @@ void ICollection.CopyTo(Array array, int index) //throw new ArgumentOutOfRangeException(nameof(index), index, SR.ArgumentOutOfRange_NeedNonNegNum); if (array.Length - index < Count) throw new ArgumentException("Destination array is not long enough to copy all the items in the collection. Check array index and length."); - //throw new ArgumentException(SR.Arg_ArrayPlusOffTooSmall); + //throw new ArgumentException(SR.Arg_ArrayPlusOffTooSmall); - T[]/*?*/ tarray = array as T[]; +#pragma warning disable IDE0019 // Use pattern matching + T[]? tarray = array as T[]; +#pragma warning restore IDE0019 // Use pattern matching if (tarray != null) { CopyTo(tarray, index); } else { - object/*?*/[]/*?*/ objects = array as object[]; +#pragma warning disable IDE0019 // Use pattern matching + object?[]? objects = array as object[]; +#pragma warning restore IDE0019 // Use pattern matching if (objects == null) { throw new ArgumentException("Target array type is not compatible with the type of items in the collection.", nameof(array)); @@ -197,7 +202,7 @@ public object SyncRoot { if (set is ICollection col) syncRoot = col.SyncRoot; - System.Threading.Interlocked.CompareExchange(ref syncRoot, new object(), null); + System.Threading.Interlocked.CompareExchange(ref syncRoot, new object(), null); } return syncRoot; } diff --git a/src/Lucene.Net/Support/Configuration/Base/ConfigurationReloadToken.cs b/src/Lucene.Net/Support/Configuration/Base/ConfigurationReloadToken.cs index a7b50e86bc..fd93a48566 100644 --- a/src/Lucene.Net/Support/Configuration/Base/ConfigurationReloadToken.cs +++ b/src/Lucene.Net/Support/Configuration/Base/ConfigurationReloadToken.cs @@ -12,7 +12,7 @@ namespace Lucene.Net.Configuration /// internal class ConfigurationReloadToken : IChangeToken { - private CancellationTokenSource _cts = new CancellationTokenSource(); + private readonly CancellationTokenSource _cts = new CancellationTokenSource(); /// /// Indicates if this token will proactively raise callbacks. Callbacks are still guaranteed to be invoked, eventually. diff --git a/src/Lucene.Net/Support/Configuration/Base/ConfigurationRoot.cs b/src/Lucene.Net/Support/Configuration/Base/ConfigurationRoot.cs index cf41292afd..5ac5286e9f 100644 --- a/src/Lucene.Net/Support/Configuration/Base/ConfigurationRoot.cs +++ b/src/Lucene.Net/Support/Configuration/Base/ConfigurationRoot.cs @@ -20,7 +20,7 @@ internal class ConfigurationRoot : IConfiguration , IConfigurationRoot #endif { - private IList _providers; + private readonly IList _providers; private ConfigurationReloadToken _changeToken = new ConfigurationReloadToken(); /// diff --git a/src/Lucene.Net/Support/Configuration/EnvironmentVariablesConfigurationProvider.cs b/src/Lucene.Net/Support/Configuration/EnvironmentVariablesConfigurationProvider.cs index 350f067f95..364704da14 100644 --- a/src/Lucene.Net/Support/Configuration/EnvironmentVariablesConfigurationProvider.cs +++ b/src/Lucene.Net/Support/Configuration/EnvironmentVariablesConfigurationProvider.cs @@ -118,7 +118,7 @@ private static string Segment(string key, int prefixLength) return indexOf < 0 ? key.Substring(prefixLength) : key.Substring(prefixLength, indexOf - prefixLength); } - private IChangeToken _reloadToken = new ConfigurationReloadToken(); + private readonly IChangeToken _reloadToken = new ConfigurationReloadToken(); /// /// Returns a that can be used to listen when this provider is reloaded. diff --git a/src/Lucene.Net/Support/Document/Extensions/IndexableFieldExtensions.cs b/src/Lucene.Net/Support/Document/Extensions/IndexableFieldExtensions.cs index 471b40f365..f473be1072 100644 --- a/src/Lucene.Net/Support/Document/Extensions/IndexableFieldExtensions.cs +++ b/src/Lucene.Net/Support/Document/Extensions/IndexableFieldExtensions.cs @@ -32,7 +32,7 @@ public static class IndexableFieldExtensions /// The field value or 0 if the type is non-numeric. public static byte GetByteValueOrDefault(this IIndexableField field) { - if (field == null) return default(byte); + if (field is null) return default; return field.GetByteValue().GetValueOrDefault(); } @@ -44,7 +44,7 @@ public static byte GetByteValueOrDefault(this IIndexableField field) /// The field value or 0 if the type is non-numeric. public static short GetInt16ValueOrDefault(this IIndexableField field) { - if (field == null) return default(short); + if (field is null) return default; return field.GetInt16Value().GetValueOrDefault(); } @@ -56,7 +56,7 @@ public static short GetInt16ValueOrDefault(this IIndexableField field) /// The field value or 0 if the type is non-numeric. public static int GetInt32ValueOrDefault(this IIndexableField field) { - if (field == null) return default(int); + if (field is null) return default; return field.GetInt32Value().GetValueOrDefault(); } @@ -68,7 +68,7 @@ public static int GetInt32ValueOrDefault(this IIndexableField field) /// The field value or 0 if the type is non-numeric. public static long GetInt64ValueOrDefault(this IIndexableField field) { - if (field == null) return default(long); + if (field is null) return default; return field.GetInt64Value().GetValueOrDefault(); } @@ -80,7 +80,7 @@ public static long GetInt64ValueOrDefault(this IIndexableField field) /// The field value or 0 if the type is non-numeric. public static float GetSingleValueOrDefault(this IIndexableField field) { - if (field == null) return default(float); + if (field is null) return default; return field.GetSingleValue().GetValueOrDefault(); } @@ -92,7 +92,7 @@ public static float GetSingleValueOrDefault(this IIndexableField field) /// The field value or 0 if the type is non-numeric. public static double GetDoubleValueOrDefault(this IIndexableField field) { - if (field == null) return default(double); + if (field is null) return default; return field.GetDoubleValue().GetValueOrDefault(); } } diff --git a/src/Lucene.Net/Support/EnumerableExtensions.cs b/src/Lucene.Net/Support/EnumerableExtensions.cs index d415dfc080..d978aa277d 100644 --- a/src/Lucene.Net/Support/EnumerableExtensions.cs +++ b/src/Lucene.Net/Support/EnumerableExtensions.cs @@ -30,7 +30,7 @@ internal static class EnumerableExtensions /// Enumerates a sequence in pairs /// /// - /// In the case of an uneven amount of elements, the list call to pases default(T) as the second parameter. + /// In the case of an uneven amount of elements, the list call to pases default as the second parameter. /// /// The type of the elements of . /// The type of the elements returned from . @@ -45,18 +45,16 @@ public static IEnumerable InPairs(this IEnumerable source, Fun if (join == null) throw new ArgumentNullException(nameof(join)); - using (IEnumerator enumerator = source.GetEnumerator()) + using IEnumerator enumerator = source.GetEnumerator(); + while (true) { - while (true) - { - if (!enumerator.MoveNext()) - yield break; + if (!enumerator.MoveNext()) + yield break; - T x = enumerator.Current; - if (!enumerator.MoveNext()) - yield return join(x, default); - yield return join(x, enumerator.Current); - } + T x = enumerator.Current; + if (!enumerator.MoveNext()) + yield return join(x, default); + yield return join(x, enumerator.Current); } } diff --git a/src/Lucene.Net/Support/ExceptionToClassNameConventionAttribute.cs b/src/Lucene.Net/Support/ExceptionToClassNameConventionAttribute.cs index 5c5b4fbce5..53c010e299 100644 --- a/src/Lucene.Net/Support/ExceptionToClassNameConventionAttribute.cs +++ b/src/Lucene.Net/Support/ExceptionToClassNameConventionAttribute.cs @@ -23,7 +23,7 @@ namespace Lucene.Net.Support /// Use this attribute to make an exception to the class naming rules (which should not be named like Interfaces). /// [AttributeUsage(AttributeTargets.Class, AllowMultiple = false)] - internal class ExceptionToClassNameConventionAttribute : Attribute + internal sealed class ExceptionToClassNameConventionAttribute : Attribute { } } diff --git a/src/Lucene.Net/Support/ExceptionToNetNumericConventionAttribute.cs b/src/Lucene.Net/Support/ExceptionToNetNumericConventionAttribute.cs index 55446efadc..1e6deac431 100644 --- a/src/Lucene.Net/Support/ExceptionToNetNumericConventionAttribute.cs +++ b/src/Lucene.Net/Support/ExceptionToNetNumericConventionAttribute.cs @@ -25,7 +25,7 @@ namespace Lucene.Net.Support /// that are commonly used in .NET method and property names. /// [AttributeUsage(AttributeTargets.Property | AttributeTargets.Method | AttributeTargets.Event | AttributeTargets.Class, AllowMultiple = false)] - internal class ExceptionToNetNumericConventionAttribute : Attribute + internal sealed class ExceptionToNetNumericConventionAttribute : Attribute { } } diff --git a/src/Lucene.Net/Support/ExceptionToNullableEnumConventionAttribute.cs b/src/Lucene.Net/Support/ExceptionToNullableEnumConventionAttribute.cs index 881fd5ed2a..8ff60ccb33 100644 --- a/src/Lucene.Net/Support/ExceptionToNullableEnumConventionAttribute.cs +++ b/src/Lucene.Net/Support/ExceptionToNullableEnumConventionAttribute.cs @@ -24,7 +24,7 @@ namespace Lucene.Net.Support /// Some of these cannot be avoided. /// [AttributeUsage(AttributeTargets.Method | AttributeTargets.Property | AttributeTargets.Field | AttributeTargets.Constructor, AllowMultiple = false)] - internal class ExceptionToNullableEnumConventionAttribute : Attribute + internal sealed class ExceptionToNullableEnumConventionAttribute : Attribute { } } diff --git a/src/Lucene.Net/Support/IO/FileSupport.cs b/src/Lucene.Net/Support/IO/FileSupport.cs index a4b903cc16..2138547e01 100644 --- a/src/Lucene.Net/Support/IO/FileSupport.cs +++ b/src/Lucene.Net/Support/IO/FileSupport.cs @@ -50,7 +50,7 @@ internal static class FileSupport return GetFileIOExceptionHResult(provokeException: (fileName) => { //Try to create the file again -this should throw an IOException with the correct HResult for the current platform - using (var stream = new FileStream(fileName, FileMode.CreateNew, FileAccess.Write, FileShare.Read)) { } + using var stream = new FileStream(fileName, FileMode.CreateNew, FileAccess.Write, FileShare.Read); }); } @@ -172,7 +172,7 @@ public static FileInfo CreateTempFile(string prefix, string suffix, DirectoryInf } // Ensure the directory exists (this does nothing if it already exists, although may throw exceptions in cases where permissions are changed) directory.Create(); - string fileName = string.Empty; + string fileName; while (true) { @@ -186,10 +186,8 @@ public static FileInfo CreateTempFile(string prefix, string suffix, DirectoryInf try { // Create the file, and close it immediately - using (var stream = new FileStream(fileName, FileMode.CreateNew, FileAccess.Write, FileShare.Read)) - { - break; - } + using var stream = new FileStream(fileName, FileMode.CreateNew, FileAccess.Write, FileShare.Read); + break; } catch (IOException e) when (IsFileAlreadyExistsException(e, fileName)) { @@ -264,8 +262,7 @@ public static string GetCanonicalPath(this FileSystemInfo path) string absPath = path.FullName; // LUCENENET NOTE: This internally calls GetFullPath(), which resolves relative paths byte[] result = Encoding.UTF8.GetBytes(absPath); - string canonPath; - if (fileCanonPathCache.TryGetValue(absPath, out canonPath) && canonPath != null) + if (fileCanonPathCache.TryGetValue(absPath, out string canonPath) && canonPath != null) { return canonPath; } diff --git a/src/Lucene.Net/Support/IO/SafeTextWriterWrapper.cs b/src/Lucene.Net/Support/IO/SafeTextWriterWrapper.cs index 615f9346da..3cc52bbb9a 100644 --- a/src/Lucene.Net/Support/IO/SafeTextWriterWrapper.cs +++ b/src/Lucene.Net/Support/IO/SafeTextWriterWrapper.cs @@ -48,33 +48,14 @@ public SafeTextWriterWrapper(TextWriter textWriter) this.textWriter = textWriter ?? throw new ArgumentNullException(nameof(textWriter)); } - public override Encoding Encoding - { - get - { - return Run(() => textWriter.Encoding); - } - } + public override Encoding Encoding => Run(() => textWriter.Encoding); - public override IFormatProvider FormatProvider - { - get - { - return Run(() => textWriter.FormatProvider); - } - } + public override IFormatProvider FormatProvider => Run(() => textWriter.FormatProvider); public override string NewLine { - get - { - return Run(() => textWriter.NewLine); - } - - set - { - Run(() => textWriter.NewLine = value); - } + get => Run(() => textWriter.NewLine); + set => Run(() => textWriter.NewLine = value); } #if FEATURE_TEXTWRITER_CLOSE @@ -353,7 +334,7 @@ private void Run(Action method) private T Run(Func method) { - if (isDisposed) return default(T); + if (isDisposed) return default; try { @@ -362,7 +343,7 @@ private T Run(Func method) catch (ObjectDisposedException) { isDisposed = true; - return default(T); + return default; } } diff --git a/src/Lucene.Net/Support/IdentityWeakReference.cs b/src/Lucene.Net/Support/IdentityWeakReference.cs index 9da6f2df7f..dac12b80c2 100644 --- a/src/Lucene.Net/Support/IdentityWeakReference.cs +++ b/src/Lucene.Net/Support/IdentityWeakReference.cs @@ -27,7 +27,7 @@ internal class IdentityWeakReference : WeakReference private static readonly object NULL = new object(); public IdentityWeakReference(T target) - : base(target == null ? NULL : target) + : base(target ?? NULL) { hash = RuntimeHelpers.GetHashCode(target); } @@ -43,13 +43,9 @@ public override bool Equals(object o) { return true; } - if (o is IdentityWeakReference) + if (o is IdentityWeakReference iwr && ReferenceEquals(this.Target, iwr.Target)) { - IdentityWeakReference iwr = (IdentityWeakReference)o; - if (ReferenceEquals(this.Target, iwr.Target)) - { - return true; - } + return true; } return false; } diff --git a/src/Lucene.Net/Support/Index/Extensions/IndexWriterConfigExtensions.cs b/src/Lucene.Net/Support/Index/Extensions/IndexWriterConfigExtensions.cs index e4c7a25e12..3ae0b7a6ee 100644 --- a/src/Lucene.Net/Support/Index/Extensions/IndexWriterConfigExtensions.cs +++ b/src/Lucene.Net/Support/Index/Extensions/IndexWriterConfigExtensions.cs @@ -232,9 +232,10 @@ public static IndexWriterConfig SetCheckIntegrityAtMerge(this IndexWriterConfig /// this instance /// /// this instance - public static void SetDefaultWriteLockTimeout(this IndexWriterConfig config, long writeLockTimeout) + public static IndexWriterConfig SetDefaultWriteLockTimeout(this IndexWriterConfig config, long writeLockTimeout) { IndexWriterConfig.DefaultWriteLockTimeout = writeLockTimeout; + return config; } /// diff --git a/src/Lucene.Net/Support/Index/TaskMergeScheduler.cs b/src/Lucene.Net/Support/Index/TaskMergeScheduler.cs index 26e933b448..dc74342995 100644 --- a/src/Lucene.Net/Support/Index/TaskMergeScheduler.cs +++ b/src/Lucene.Net/Support/Index/TaskMergeScheduler.cs @@ -112,17 +112,13 @@ public void SetMaxMergesAndThreads(int maxMergeCount, int maxThreadCount) /// /// Return the priority that merge threads run at. This is always the same. /// - public int MergeThreadPriority - { - get - { + public int MergeThreadPriority => #if !FEATURE_THREAD_PRIORITY - return 2; + 2; #else - return (int)ThreadPriority.Normal; -#endif - } - } + (int)ThreadPriority.Normal; +#endif + /// /// This method has no effect in because the @@ -176,6 +172,7 @@ protected override void Dispose(bool disposing) { Sync(); _manualResetEvent.Dispose(); + _lock.Dispose(); } /// @@ -219,10 +216,7 @@ public virtual void Sync() /// Returns the number of merge threads that are alive. Note that this number /// is <= size. /// - private int MergeThreadCount - { - get { return _mergeThreads.Count(x => x.IsAlive && x.CurrentMerge != null); } - } + private int MergeThreadCount => _mergeThreads.Count(x => x.IsAlive && x.CurrentMerge != null); [MethodImpl(MethodImplOptions.NoInlining)] public override void Merge(IndexWriter writer, MergeTrigger trigger, bool newMergesFound) @@ -340,9 +334,7 @@ protected virtual void DoMerge(MergePolicy.OneMerge merge) private void OnMergeThreadCompleted(object sender, EventArgs e) { - var mergeThread = sender as MergeThread; - - if (mergeThread == null) + if (!(sender is MergeThread mergeThread)) { return; } @@ -636,10 +628,7 @@ private void Run(CancellationToken cancellationToken) { _isDone = true; - if (MergeThreadCompleted != null) - { - MergeThreadCompleted(this, EventArgs.Empty); - } + MergeThreadCompleted?.Invoke(this, EventArgs.Empty); } } @@ -664,9 +653,7 @@ public override string ToString() public override bool Equals(object obj) { - var compared = obj as MergeThread; - - if (compared == null + if (!(obj is MergeThread compared) || (Instance == null && compared.Instance != null) || (Instance != null && compared.Instance == null)) { diff --git a/src/Lucene.Net/Support/Threading/ReaderWriterLockSlimExtensions.cs b/src/Lucene.Net/Support/Threading/ReaderWriterLockSlimExtensions.cs index c760da36ef..9a91b7dffe 100644 --- a/src/Lucene.Net/Support/Threading/ReaderWriterLockSlimExtensions.cs +++ b/src/Lucene.Net/Support/Threading/ReaderWriterLockSlimExtensions.cs @@ -29,7 +29,7 @@ namespace Lucene.Net.Support.Threading /// internal static class ReaderWriterLockSlimExtensions { - sealed class ReadLockToken : IDisposable + private sealed class ReadLockToken : IDisposable { private ReaderWriterLockSlim _readerWriterLockSlim; @@ -49,7 +49,7 @@ public void Dispose() } } - sealed class WriteLockToken : IDisposable + private sealed class WriteLockToken : IDisposable { private ReaderWriterLockSlim _readerWriterLockSlim; diff --git a/src/Lucene.Net/Support/Util/BundleResourceManagerFactory.cs b/src/Lucene.Net/Support/Util/BundleResourceManagerFactory.cs index b4ece78c64..2464c0f5b3 100644 --- a/src/Lucene.Net/Support/Util/BundleResourceManagerFactory.cs +++ b/src/Lucene.Net/Support/Util/BundleResourceManagerFactory.cs @@ -56,8 +56,7 @@ public virtual ResourceManager Create(Type resourceSource) /// The to release. public virtual void Release(ResourceManager manager) { - var disposable = manager as IDisposable; - if (disposable != null) + if (manager is IDisposable disposable) { disposable.Dispose(); } diff --git a/src/Lucene.Net/Support/Util/ExceptionExtensions.cs b/src/Lucene.Net/Support/Util/ExceptionExtensions.cs index 5983af1f3c..8528498cad 100644 --- a/src/Lucene.Net/Support/Util/ExceptionExtensions.cs +++ b/src/Lucene.Net/Support/Util/ExceptionExtensions.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; using System.Linq; +using JCG = J2N.Collections.Generic; namespace Lucene.Net.Util { @@ -40,7 +41,7 @@ public static IList GetSuppressedAsList(this Exception e) IList suppressed; if (!e.Data.Contains(SUPPRESSED_EXCEPTIONS_KEY)) { - suppressed = new List(); + suppressed = new JCG.List(); e.Data.Add(SUPPRESSED_EXCEPTIONS_KEY, suppressed); } else diff --git a/src/Lucene.Net/Support/Util/ListExtensions.cs b/src/Lucene.Net/Support/Util/ListExtensions.cs index 8dd963159b..407de7dbac 100644 --- a/src/Lucene.Net/Support/Util/ListExtensions.cs +++ b/src/Lucene.Net/Support/Util/ListExtensions.cs @@ -61,9 +61,9 @@ public static void AddRange(this IList list, IEnumerable collection) /// this public static void Sort(this IList list) { - if (list is List) + if (list is List listToSort) { - ((List)list).Sort(); + listToSort.Sort(); } else { @@ -81,9 +81,9 @@ public static void Sort(this IList list) /// the comparer to use for the sort public static void Sort(this IList list, IComparer comparer) { - if (list is List) + if (list is List listToSort) { - ((List)list).Sort(comparer); + listToSort.Sort(comparer); } else { @@ -159,7 +159,7 @@ public static void IntroSort(this IList list, IComparer comparer) private sealed class FunctorComparer : IComparer { - private Comparison comparison; + private readonly Comparison comparison; // LUCENENET: marked readonly public FunctorComparer(Comparison comparison) { diff --git a/src/Lucene.Net/Support/Util/NamedServiceFactory.cs b/src/Lucene.Net/Support/Util/NamedServiceFactory.cs index 6920bcd09e..4d9325f879 100644 --- a/src/Lucene.Net/Support/Util/NamedServiceFactory.cs +++ b/src/Lucene.Net/Support/Util/NamedServiceFactory.cs @@ -101,8 +101,7 @@ public static string GetServiceName(Type type) var nameAttributes = type.GetCustomAttributes(typeof(ServiceNameAttribute), inherit: true); if (nameAttributes.Length > 0) { - ServiceNameAttribute nameAttribute = nameAttributes[0] as ServiceNameAttribute; - if (nameAttribute != null) + if (nameAttributes[0] is ServiceNameAttribute nameAttribute) { string name = nameAttribute.Name; CheckServiceName(name); @@ -169,16 +168,12 @@ private static bool IsLetterOrDigit(char c) /// /// Gets a value that indicates whether the current application domain executes with full trust. /// - protected bool IsFullyTrusted - { - get - { + protected bool IsFullyTrusted => #if FEATURE_APPDOMAIN_ISFULLYTRUSTED - return AppDomain.CurrentDomain.IsFullyTrusted; // Partial trust support + AppDomain.CurrentDomain.IsFullyTrusted; // Partial trust support #else - return true; // Partial trust not supported + true; // Partial trust not supported #endif - } - } + } } diff --git a/src/Lucene.Net/Support/Util/NumberFormat.cs b/src/Lucene.Net/Support/Util/NumberFormat.cs index 311d096091..80ad26e7b4 100644 --- a/src/Lucene.Net/Support/Util/NumberFormat.cs +++ b/src/Lucene.Net/Support/Util/NumberFormat.cs @@ -51,29 +51,29 @@ public virtual string Format(object number) { string format = GetNumberFormat(); - if (number is int) + if (number is int i) { - return ((int)number).ToString(format, culture); + return i.ToString(format, culture); } - else if (number is long) + else if (number is long l) { - return ((long)number).ToString(format, culture); + return l.ToString(format, culture); } - else if (number is short) + else if (number is short s) { - return ((short)number).ToString(format, culture); + return s.ToString(format, culture); } - else if (number is float) + else if (number is float f) { - return ((float)number).ToString(format, culture); + return f.ToString(format, culture); } - else if (number is double) + else if (number is double d) { - return ((double)number).ToString(format, culture); + return d.ToString(format, culture); } - else if (number is decimal) + else if (number is decimal dec) { - return ((decimal)number).ToString(format, culture); + return dec.ToString(format, culture); } throw new ArgumentException("Cannot format given object as a Number"); diff --git a/src/Lucene.Net/Support/Util/ServiceNameAttribute.cs b/src/Lucene.Net/Support/Util/ServiceNameAttribute.cs index dfb292cf53..7e03227483 100644 --- a/src/Lucene.Net/Support/Util/ServiceNameAttribute.cs +++ b/src/Lucene.Net/Support/Util/ServiceNameAttribute.cs @@ -31,7 +31,7 @@ public abstract class ServiceNameAttribute : System.Attribute /// Sole constructor. Initializes the service name. /// /// - public ServiceNameAttribute(string name) + protected ServiceNameAttribute(string name) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { if (string.IsNullOrEmpty(name)) throw new ArgumentNullException(nameof(name)); diff --git a/src/Lucene.Net/Support/WeakDictionary.cs b/src/Lucene.Net/Support/WeakDictionary.cs index c8be30cae3..c97dbc9ae2 100644 --- a/src/Lucene.Net/Support/WeakDictionary.cs +++ b/src/Lucene.Net/Support/WeakDictionary.cs @@ -258,7 +258,7 @@ private class WeakKey where T : class public WeakKey(T key) { if (key == null) - throw new ArgumentNullException("key"); + throw new ArgumentNullException(nameof(key)); hashCode = key.GetHashCode(); reference = new WeakReference(key); diff --git a/src/Lucene.Net/Support/WritableArrayAttribute.cs b/src/Lucene.Net/Support/WritableArrayAttribute.cs index 525910dd38..82593af1bc 100644 --- a/src/Lucene.Net/Support/WritableArrayAttribute.cs +++ b/src/Lucene.Net/Support/WritableArrayAttribute.cs @@ -35,7 +35,7 @@ namespace Lucene.Net.Support /// /// [AttributeUsage(AttributeTargets.Method | AttributeTargets.Property, AllowMultiple = false)] - internal class WritableArrayAttribute : Attribute + internal sealed class WritableArrayAttribute : Attribute { } } diff --git a/src/Lucene.Net/Util/ArrayInPlaceMergeSorter.cs b/src/Lucene.Net/Util/ArrayInPlaceMergeSorter.cs index 174099a592..454a06f99d 100644 --- a/src/Lucene.Net/Util/ArrayInPlaceMergeSorter.cs +++ b/src/Lucene.Net/Util/ArrayInPlaceMergeSorter.cs @@ -1,4 +1,5 @@ using System.Collections.Generic; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -37,11 +38,13 @@ public ArrayInPlaceMergeSorter(T[] arr, IComparer comparer) this.comparer = comparer; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override int Compare(int i, int j) { return comparer.Compare(arr[i], arr[j]); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Swap(int i, int j) { ArrayUtil.Swap(arr, i, j); diff --git a/src/Lucene.Net/Util/ArrayIntroSorter.cs b/src/Lucene.Net/Util/ArrayIntroSorter.cs index 5f71168f9f..2fdbfc1939 100644 --- a/src/Lucene.Net/Util/ArrayIntroSorter.cs +++ b/src/Lucene.Net/Util/ArrayIntroSorter.cs @@ -1,4 +1,5 @@ using System.Collections.Generic; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -36,24 +37,28 @@ public ArrayIntroSorter(T[] arr, IComparer comparer) { this.arr = arr; this.comparer = comparer; - pivot = default(T); + pivot = default; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override int Compare(int i, int j) { return comparer.Compare(arr[i], arr[j]); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Swap(int i, int j) { ArrayUtil.Swap(arr, i, j); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void SetPivot(int i) { - pivot = (i < arr.Length) ? arr[i] : default(T); + pivot = (i < arr.Length) ? arr[i] : default; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override int ComparePivot(int i) { return comparer.Compare(pivot, arr[i]); diff --git a/src/Lucene.Net/Util/ArrayTimSorter.cs b/src/Lucene.Net/Util/ArrayTimSorter.cs index 79e1cd895a..04463926b7 100644 --- a/src/Lucene.Net/Util/ArrayTimSorter.cs +++ b/src/Lucene.Net/Util/ArrayTimSorter.cs @@ -1,5 +1,6 @@ using System; using System.Collections.Generic; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -49,31 +50,37 @@ public ArrayTimSorter(T[] arr, IComparer comparer, int maxTempSlots) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override int Compare(int i, int j) { return comparer.Compare(arr[i], arr[j]); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Swap(int i, int j) { ArrayUtil.Swap(arr, i, j); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Copy(int src, int dest) { arr[dest] = arr[src]; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Save(int start, int len) { Array.Copy(arr, start, tmp, 0, len); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Restore(int src, int dest) { arr[dest] = tmp[src]; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override int CompareSaved(int i, int j) { return comparer.Compare(tmp[i], arr[j]); diff --git a/src/Lucene.Net/Util/ArrayUtil.cs b/src/Lucene.Net/Util/ArrayUtil.cs index e04d1c106c..d445237479 100644 --- a/src/Lucene.Net/Util/ArrayUtil.cs +++ b/src/Lucene.Net/Util/ArrayUtil.cs @@ -1,6 +1,7 @@ using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -55,6 +56,7 @@ Begin Apache Harmony code /// A string representation of an int quantity. /// The value represented by the argument /// If the argument could not be parsed as an int quantity. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static int ParseInt32(char[] chars) { return ParseInt32(chars, 0, chars.Length, 10); @@ -70,6 +72,7 @@ public static int ParseInt32(char[] chars) /// The length /// the /// If it can't parse + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static int ParseInt32(char[] chars, int offset, int len) { return ParseInt32(chars, offset, len, 10); @@ -283,6 +286,7 @@ public static short[] Grow(short[] array, int minSize) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static short[] Grow(short[] array) { return Grow(array, 1 + array.Length); @@ -303,6 +307,7 @@ public static float[] Grow(float[] array, int minSize) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static float[] Grow(float[] array) { return Grow(array, 1 + array.Length); @@ -323,6 +328,7 @@ public static double[] Grow(double[] array, int minSize) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static double[] Grow(double[] array) { return Grow(array, 1 + array.Length); @@ -359,6 +365,7 @@ public static int[] Grow(int[] array, int minSize) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static int[] Grow(int[] array) { return Grow(array, 1 + array.Length); @@ -395,6 +402,7 @@ public static long[] Grow(long[] array, int minSize) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long[] Grow(long[] array) { return Grow(array, 1 + array.Length); @@ -447,6 +455,7 @@ public static byte[] Grow(byte[] array, int minSize) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static byte[] Grow(byte[] array) { return Grow(array, 1 + array.Length); @@ -483,6 +492,7 @@ public static bool[] Grow(bool[] array, int minSize) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool[] Grow(bool[] array) { return Grow(array, 1 + array.Length); @@ -519,6 +529,7 @@ public static char[] Grow(char[] array, int minSize) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static char[] Grow(char[] array) { return Grow(array, 1 + array.Length); @@ -557,6 +568,7 @@ public static int[][] Grow(int[][] array, int minSize) } [CLSCompliant(false)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static int[][] Grow(int[][] array) { return Grow(array, 1 + array.Length); @@ -596,6 +608,7 @@ public static float[][] Grow(float[][] array, int minSize) } [CLSCompliant(false)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static float[][] Grow(float[][] array) { return Grow(array, 1 + array.Length); @@ -798,6 +811,7 @@ private NaturalComparer() public static NaturalComparer Default { get; } = new NaturalComparer(); + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual int Compare(T o1, T o2) { return ((IComparable)o1).CompareTo(o2); @@ -846,6 +860,7 @@ public static IComparer GetNaturalComparer() /// /// Swap values stored in slots and + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void Swap(T[] arr, int i, int j) { T tmp = arr[i]; @@ -873,6 +888,7 @@ public static void IntroSort(T[] a, int fromIndex, int toIndex, IComparer /// Sorts the given array using the . This method uses the intro sort /// algorithm, but falls back to insertion sort for small arrays. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void IntroSort(T[] a, IComparer comp) { IntroSort(a, 0, a.Length, comp); @@ -896,6 +912,7 @@ public static void IntroSort(T[] a, int fromIndex, int toIndex) //where T : I /// Sorts the given array in natural order. This method uses the intro sort /// algorithm, but falls back to insertion sort for small arrays. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void IntroSort(T[] a) //where T : IComparable // LUCENENET specific: removing constraint because in .NET, it is not needed { IntroSort(a, 0, a.Length); @@ -921,6 +938,7 @@ public static void TimSort(T[] a, int fromIndex, int toIndex, IComparer co /// Sorts the given array using the . this method uses the Tim sort /// algorithm, but falls back to binary sort for small arrays. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void TimSort(T[] a, IComparer comp) { TimSort(a, 0, a.Length, comp); @@ -944,6 +962,7 @@ public static void TimSort(T[] a, int fromIndex, int toIndex) //where T : ICo /// Sorts the given array in natural order. this method uses the Tim sort /// algorithm, but falls back to binary sort for small arrays. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void TimSort(T[] a) //where T : IComparable // LUCENENET specific: removing constraint because in .NET, it is not needed { TimSort(a, 0, a.Length); diff --git a/src/Lucene.Net/Util/AttributeImpl.cs b/src/Lucene.Net/Util/AttributeImpl.cs index 660b69a9e6..9dce417b15 100644 --- a/src/Lucene.Net/Util/AttributeImpl.cs +++ b/src/Lucene.Net/Util/AttributeImpl.cs @@ -1,6 +1,7 @@ using System; using System.Collections.Generic; using System.Reflection; +using System.Runtime.CompilerServices; using System.Text; namespace Lucene.Net.Util @@ -53,6 +54,7 @@ public StringBuilderAttributeReflector(StringBuilder buffer, bool prependAttClas this.prependAttClass = prependAttClass; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Reflect(string key, object value) where T : IAttribute { @@ -69,7 +71,11 @@ public void Reflect(Type type, string key, object value) { buffer.Append(type.Name).Append('#'); } - buffer.Append(key).Append('=').Append(object.ReferenceEquals(value, null) ? (object)"null" : value); + buffer.Append(key).Append('='); + if (value is null) + buffer.Append("null"); + else + buffer.Append(value); } } diff --git a/src/Lucene.Net/Util/AttributeSource.cs b/src/Lucene.Net/Util/AttributeSource.cs index 70417a5f7e..dedba6c788 100644 --- a/src/Lucene.Net/Util/AttributeSource.cs +++ b/src/Lucene.Net/Util/AttributeSource.cs @@ -443,24 +443,23 @@ private State GetCurrentState() return s; } var c = s = currentState[0] = new State(); - using (var it = attributeImpls.Values.GetEnumerator()) + using var it = attributeImpls.Values.GetEnumerator(); + it.MoveNext(); + c.attribute = it.Current; + while (it.MoveNext()) { - it.MoveNext(); + c.next = new State(); + c = c.next; c.attribute = it.Current; - while (it.MoveNext()) - { - c.next = new State(); - c = c.next; - c.attribute = it.Current; - } - return s; } + return s; } /// /// Resets all s in this by calling /// on each implementation. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void ClearAttributes() { for (State state = GetCurrentState(); state != null; state = state.next) @@ -473,6 +472,7 @@ public void ClearAttributes() /// Captures the state of all s. The return value can be passed to /// to restore the state of this or another . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual State CaptureState() { State state = this.GetCurrentState(); @@ -578,23 +578,21 @@ public override bool Equals(object obj) /// /// /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public string ReflectAsString(bool prependAttClass) { StringBuilder buffer = new StringBuilder(); - ReflectWith(new AttributeReflectorAnonymousInnerClassHelper(this, prependAttClass, buffer)); + ReflectWith(new AttributeReflectorAnonymousInnerClassHelper(prependAttClass, buffer)); return buffer.ToString(); } private class AttributeReflectorAnonymousInnerClassHelper : IAttributeReflector { - private readonly AttributeSource outerInstance; + private readonly bool prependAttClass; + private readonly StringBuilder buffer; - private bool prependAttClass; - private StringBuilder buffer; - - public AttributeReflectorAnonymousInnerClassHelper(AttributeSource outerInstance, bool prependAttClass, StringBuilder buffer) + public AttributeReflectorAnonymousInnerClassHelper(bool prependAttClass, StringBuilder buffer) { - this.outerInstance = outerInstance; this.prependAttClass = prependAttClass; this.buffer = buffer; } @@ -615,7 +613,11 @@ public void Reflect(Type attClass, string key, object value) { buffer.Append(attClass.Name).Append('#'); } - buffer.Append(key).Append('=').Append(object.ReferenceEquals(value, null) ? "null" : value); + buffer.Append(key).Append('='); + if (value is null) + buffer.Append("null"); + else + buffer.Append(value); } } @@ -627,6 +629,7 @@ public void Reflect(Type attClass, string key, object value) /// corresponding method. /// /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void ReflectWith(IAttributeReflector reflector) { for (State state = GetCurrentState(); state != null; state = state.next) diff --git a/src/Lucene.Net/Util/Automaton/Automaton.cs b/src/Lucene.Net/Util/Automaton/Automaton.cs index 23e7e5ec91..cd258ee8b3 100644 --- a/src/Lucene.Net/Util/Automaton/Automaton.cs +++ b/src/Lucene.Net/Util/Automaton/Automaton.cs @@ -3,6 +3,7 @@ using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; +using System.Runtime.CompilerServices; using System.Text; using JCG = J2N.Collections.Generic; @@ -142,6 +143,7 @@ public Automaton() /// Selects minimization algorithm (default: MINIMIZE_HOPCROFT). /// /// minimization algorithm + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void SetMinimization(int algorithm) { minimization = algorithm; @@ -154,6 +156,7 @@ public static void SetMinimization(int algorithm) /// automata. By default, the flag is not set. /// /// if true, the flag is set + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void SetMinimizeAlways(bool flag) { minimize_always = flag; @@ -167,6 +170,7 @@ public static void SetMinimizeAlways(bool flag) /// /// if true, the flag is set /// previous value of the flag + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool SetAllowMutate(bool flag) { bool b = allow_mutation; @@ -183,6 +187,7 @@ public static bool SetAllowMutate(bool flag) /// current value of the flag internal static bool AllowMutate => allow_mutation; + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void CheckMinimizeAlways() { if (minimize_always) @@ -216,6 +221,7 @@ public void setInitialState(State s) { /// Gets initial state. /// /// state + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual State GetInitialState() { ExpandSingleton(); @@ -292,6 +298,7 @@ public virtual State[] GetNumberedStates() return numberedStates; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void SetNumberedStates(State[] states) { SetNumberedStates(states, states.Length); @@ -313,6 +320,7 @@ public virtual void SetNumberedStates(State[] states, int count) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void ClearNumberedStates() { numberedStates = null; @@ -386,6 +394,7 @@ internal virtual void Totalize() /// are manipulated manually. /// /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void RestoreInvariant() { RemoveDeadTransitions(); @@ -608,6 +617,7 @@ public virtual int GetNumberOfTransitions() return c; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override bool Equals(object obj) { var other = obj as Automaton; @@ -743,6 +753,7 @@ public virtual string ToDot() /// /// Returns a clone of this automaton, expands if singleton. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual Automaton CloneExpanded() { Automaton a = (Automaton)Clone(); @@ -754,6 +765,7 @@ internal virtual Automaton CloneExpanded() /// Returns a clone of this automaton unless is /// set, expands if singleton. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual Automaton CloneExpandedIfRequired() { if (allow_mutation) @@ -803,6 +815,7 @@ public virtual object Clone() /// Returns a clone of this automaton, or this automaton itself if /// flag is set. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual Automaton CloneIfRequired() { if (allow_mutation) @@ -818,6 +831,7 @@ internal virtual Automaton CloneIfRequired() /// /// See . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual Automaton Concatenate(Automaton a) { return BasicOperations.Concatenate(this, a); @@ -826,6 +840,7 @@ public virtual Automaton Concatenate(Automaton a) /// /// See . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Automaton Concatenate(IList l) { return BasicOperations.Concatenate(l); @@ -834,6 +849,7 @@ public static Automaton Concatenate(IList l) /// /// See . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual Automaton Optional() { return BasicOperations.Optional(this); @@ -842,6 +858,7 @@ public virtual Automaton Optional() /// /// See . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual Automaton Repeat() { return BasicOperations.Repeat(this); @@ -850,6 +867,7 @@ public virtual Automaton Repeat() /// /// See . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual Automaton Repeat(int min) { return BasicOperations.Repeat(this, min); @@ -858,6 +876,7 @@ public virtual Automaton Repeat(int min) /// /// See . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual Automaton Repeat(int min, int max) { return BasicOperations.Repeat(this, min, max); @@ -866,6 +885,7 @@ public virtual Automaton Repeat(int min, int max) /// /// See . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual Automaton Complement() { return BasicOperations.Complement(this); @@ -874,6 +894,7 @@ public virtual Automaton Complement() /// /// See . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual Automaton Minus(Automaton a) { return BasicOperations.Minus(this, a); @@ -882,6 +903,7 @@ public virtual Automaton Minus(Automaton a) /// /// See . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual Automaton Intersection(Automaton a) { return BasicOperations.Intersection(this, a); @@ -890,6 +912,7 @@ public virtual Automaton Intersection(Automaton a) /// /// See . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual bool SubsetOf(Automaton a) { return BasicOperations.SubsetOf(this, a); @@ -898,6 +921,7 @@ public virtual bool SubsetOf(Automaton a) /// /// See . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual Automaton Union(Automaton a) { return BasicOperations.Union(this, a); @@ -906,6 +930,7 @@ public virtual Automaton Union(Automaton a) /// /// See . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Automaton Union(ICollection l) { return BasicOperations.Union(l); @@ -914,6 +939,7 @@ public static Automaton Union(ICollection l) /// /// See . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void Determinize() { BasicOperations.Determinize(this); @@ -928,6 +954,7 @@ public virtual void Determinize() /// See . Returns the /// automaton being given as argument. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Automaton Minimize(Automaton a) { MinimizationOperations.Minimize(a); diff --git a/src/Lucene.Net/Util/Automaton/BasicAutomata.cs b/src/Lucene.Net/Util/Automaton/BasicAutomata.cs index 50068b54e4..86ab82a66c 100644 --- a/src/Lucene.Net/Util/Automaton/BasicAutomata.cs +++ b/src/Lucene.Net/Util/Automaton/BasicAutomata.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; using System.Globalization; +using System.Runtime.CompilerServices; using System.Text; /* @@ -57,6 +58,7 @@ public static Automaton MakeEmpty() /// /// Returns a new (deterministic) automaton that accepts only the empty string. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Automaton MakeEmptyString() { return new Automaton @@ -83,6 +85,7 @@ public static Automaton MakeAnyString() /// /// Returns a new (deterministic) automaton that accepts any single codepoint. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Automaton MakeAnyChar() { return MakeCharRange(Character.MinCodePoint, Character.MaxCodePoint); @@ -92,6 +95,7 @@ public static Automaton MakeAnyChar() /// Returns a new (deterministic) automaton that accepts a single codepoint of /// the given value. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Automaton MakeChar(int c) { return new Automaton @@ -128,6 +132,7 @@ public static Automaton MakeCharRange(int min, int max) /// Constructs sub-automaton corresponding to decimal numbers of length /// x.Substring(n).Length. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static State AnyOfRightLength(string x, int n) { State s = new State(); @@ -146,6 +151,7 @@ private static State AnyOfRightLength(string x, int n) /// Constructs sub-automaton corresponding to decimal numbers of value at least /// x.Substring(n) and length x.Substring(n).Length. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static State AtLeast(string x, int n, ICollection initials, bool zeros) { State s = new State(); @@ -173,6 +179,7 @@ private static State AtLeast(string x, int n, ICollection initials, bool /// Constructs sub-automaton corresponding to decimal numbers of value at most /// x.Substring(n) and length x.Substring(n).Length. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static State AtMost(string x, int n) { State s = new State(); @@ -197,6 +204,7 @@ private static State AtMost(string x, int n) /// x.Substring(n) and y.Substring(n) and of length x.Substring(n).Length /// (which must be equal to y.Substring(n).Length). /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static State Between(string x, string y, int n, ICollection initials, bool zeros) { State s = new State(); @@ -302,6 +310,7 @@ public static Automaton MakeInterval(int min, int max, int digits) /// Returns a new (deterministic) automaton that accepts the single given /// string. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Automaton MakeString(string s) { return new Automaton @@ -341,6 +350,7 @@ public static Automaton MakeString(int[] word, int offset, int length) /// An accepting all input strings. The resulting /// automaton is codepoint based (full unicode codepoints on /// transitions). + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Automaton MakeStringUnion(ICollection utf8Strings) { if (utf8Strings.Count == 0) diff --git a/src/Lucene.Net/Util/Automaton/BasicOperations.cs b/src/Lucene.Net/Util/Automaton/BasicOperations.cs index ddbc4e98a5..f59208a64f 100644 --- a/src/Lucene.Net/Util/Automaton/BasicOperations.cs +++ b/src/Lucene.Net/Util/Automaton/BasicOperations.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Runtime.CompilerServices; using System.Text; using JCG = J2N.Collections.Generic; @@ -626,6 +627,7 @@ private sealed class TransitionList internal Transition[] transitions = new Transition[2]; internal int count; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Add(Transition t) { if (transitions.Length == count) @@ -645,11 +647,13 @@ private sealed class PointTransitions : IComparable internal readonly TransitionList ends = new TransitionList(); internal readonly TransitionList starts = new TransitionList(); + [MethodImpl(MethodImplOptions.AggressiveInlining)] public int CompareTo(PointTransitions other) { return point - other.point; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Reset(int point) { this.point = point; @@ -657,11 +661,13 @@ public void Reset(int point) starts.count = 0; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override bool Equals(object other) { return ((PointTransitions)other).point == point; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int GetHashCode() { return point; @@ -677,6 +683,7 @@ private sealed class PointTransitionSet private readonly Dictionary map = new Dictionary(); private bool useHash = false; + [MethodImpl(MethodImplOptions.AggressiveInlining)] private PointTransitions Next(int point) { // 1st time we are seeing this point @@ -695,6 +702,7 @@ private PointTransitions Next(int point) return points0; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private PointTransitions Find(int point) { if (useHash) @@ -732,6 +740,7 @@ private PointTransitions Find(int point) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Reset() { if (useHash) @@ -742,6 +751,7 @@ public void Reset() count = 0; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Sort() { // Tim sort performs well on already sorted arrays: @@ -751,6 +761,7 @@ public void Sort() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Add(Transition t) { Find(t.min).starts.Add(t); @@ -991,6 +1002,7 @@ public static void AddEpsilons(Automaton a, ICollection pairs) /// Returns true if the given automaton accepts the empty string and nothing /// else. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool IsEmptyString(Automaton a) { if (a.IsSingleton) @@ -1006,6 +1018,7 @@ public static bool IsEmptyString(Automaton a) /// /// Returns true if the given automaton accepts no strings. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool IsEmpty(Automaton a) { if (a.IsSingleton) diff --git a/src/Lucene.Net/Util/Automaton/CompiledAutomaton.cs b/src/Lucene.Net/Util/Automaton/CompiledAutomaton.cs index 811bf7b234..e333b8aa75 100644 --- a/src/Lucene.Net/Util/Automaton/CompiledAutomaton.cs +++ b/src/Lucene.Net/Util/Automaton/CompiledAutomaton.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; using System.Text; namespace Lucene.Net.Util.Automaton @@ -215,6 +216,7 @@ public CompiledAutomaton(Automaton automaton, bool? finite, bool simplify) //private static final boolean DEBUG = BlockTreeTermsWriter.DEBUG; + [MethodImpl(MethodImplOptions.AggressiveInlining)] private BytesRef AddTail(int state, BytesRef term, int idx, int leadLabel) { // Find biggest transition that's < label diff --git a/src/Lucene.Net/Util/Automaton/DaciukMihovAutomatonBuilder.cs b/src/Lucene.Net/Util/Automaton/DaciukMihovAutomatonBuilder.cs index 66eae076d5..e95abbab14 100644 --- a/src/Lucene.Net/Util/Automaton/DaciukMihovAutomatonBuilder.cs +++ b/src/Lucene.Net/Util/Automaton/DaciukMihovAutomatonBuilder.cs @@ -4,6 +4,7 @@ using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; +using System.Runtime.CompilerServices; using Arrays = Lucene.Net.Support.Arrays; using JCG = J2N.Collections.Generic; @@ -129,6 +130,7 @@ public override int GetHashCode() /// Create a new outgoing transition labeled and return /// the newly created target state for this transition. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal State NewState(int label) { if (Debugging.AssertsEnabled) Debugging.Assert(Array.BinarySearch(labels, label) < 0, "State already has transition labeled: {0}", label); @@ -143,6 +145,7 @@ internal State NewState(int label) /// /// Return the most recent transitions's target state. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal State LastChild() // LUCENENET NOTE: Kept this a method because there is another overload { if (Debugging.AssertsEnabled) Debugging.Assert(HasChildren, "No outgoing transitions."); @@ -153,6 +156,7 @@ internal State LastChild() // LUCENENET NOTE: Kept this a method because there i /// Return the associated state if the most recent transition is labeled with /// . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal State LastChild(int label) { int index = labels.Length - 1; @@ -169,6 +173,7 @@ internal State LastChild(int label) /// Replace the last added outgoing transition's target state with the given /// . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal void ReplaceLastChild(State state) { if (Debugging.AssertsEnabled) Debugging.Assert(HasChildren, "No outgoing transitions."); @@ -178,6 +183,7 @@ internal void ReplaceLastChild(State state) /// /// Compare two lists of objects for reference-equality. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static bool ReferenceEquals(object[] a1, object[] a2) { if (a1.Length != a2.Length) @@ -278,6 +284,7 @@ public State Complete() /// /// /// Must use a dictionary with passed into its constructor. + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static Util.Automaton.State Convert(State s, IDictionary visited) { if (visited.TryGetValue(s, out Util.Automaton.State converted) && converted != null) @@ -326,6 +333,7 @@ public static Automaton Build(ICollection input) /// /// Copy into an internal buffer. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] private bool SetPrevious(CharsRef current) { // don't need to copy, once we fix https://issues.apache.org/jira/browse/LUCENE-3277 @@ -361,7 +369,8 @@ private void ReplaceOrRegister(State state) /// Add a suffix of starting at /// (inclusive) to state . /// - private void AddSuffix(State state, ICharSequence current, int fromIndex) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static void AddSuffix(State state, ICharSequence current, int fromIndex) // LUCENENET: CA1822: Mark members as static { int len = current.Length; while (fromIndex < len) diff --git a/src/Lucene.Net/Util/Automaton/LevenshteinAutomata.cs b/src/Lucene.Net/Util/Automaton/LevenshteinAutomata.cs index 1edcf5976a..44902dcdad 100644 --- a/src/Lucene.Net/Util/Automaton/LevenshteinAutomata.cs +++ b/src/Lucene.Net/Util/Automaton/LevenshteinAutomata.cs @@ -2,6 +2,7 @@ using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; +using System.Runtime.CompilerServices; using JCG = J2N.Collections.Generic; namespace Lucene.Net.Util.Automaton @@ -121,6 +122,7 @@ public LevenshteinAutomata(int[] word, int alphaMax, bool withTranspositions) }; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static int[] CodePoints(string input) { int length = Character.CodePointCount(input, 0, input.Length); @@ -222,6 +224,7 @@ public virtual Automaton ToAutomaton(int n) /// Get the characteristic vector X(x, V) /// where V is Substring(pos, end - pos). /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual int GetVector(int x, int pos, int end) { int vector = 0; @@ -271,6 +274,7 @@ internal ParametricDescription(int w, int n, int[] minErrors) /// /// Returns true if the state in any Levenshtein DFA is an accept state (final state). /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual bool IsAccept(int absState) { // decode absState -> state, offset @@ -284,6 +288,7 @@ internal virtual bool IsAccept(int absState) /// Returns the position in the input word for a given state. /// this is the minimal boundary for the state. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual int GetPosition(int absState) { return absState % (m_w + 1); diff --git a/src/Lucene.Net/Util/Automaton/RegExp.cs b/src/Lucene.Net/Util/Automaton/RegExp.cs index 98c18b06f7..c3d7406aa1 100644 --- a/src/Lucene.Net/Util/Automaton/RegExp.cs +++ b/src/Lucene.Net/Util/Automaton/RegExp.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using System.Globalization; using System.IO; +using System.Runtime.CompilerServices; using System.Text; using JCG = J2N.Collections.Generic; @@ -462,6 +463,7 @@ public RegExp(string s, RegExpSyntax syntax_flags) /// Constructs new from this . Same /// as ToAutomaton(null) (empty automaton map). /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual Automaton ToAutomaton() { return ToAutomatonAllowMutate(null, null); @@ -475,6 +477,7 @@ public virtual Automaton ToAutomaton() /// Provider of automata for named identifiers. /// If this regular expression uses a named /// identifier that is not available from the automaton provider. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual Automaton ToAutomaton(IAutomatonProvider automaton_provider) { return ToAutomatonAllowMutate(null, automaton_provider); @@ -489,6 +492,7 @@ public virtual Automaton ToAutomaton(IAutomatonProvider automaton_provider) /// ). /// If this regular expression uses a named /// identifier that does not occur in the automaton map. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual Automaton ToAutomaton(IDictionary automata) { return ToAutomatonAllowMutate(automata, null); @@ -501,6 +505,7 @@ public virtual Automaton ToAutomaton(IDictionary automata) /// /// If true, the flag is set /// Previous value of the flag. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual bool SetAllowMutate(bool flag) { bool b = allow_mutation; @@ -651,6 +656,7 @@ public override string ToString() return ToStringBuilder(new StringBuilder()).ToString(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual StringBuilder ToStringBuilder(StringBuilder b) { switch (kind) @@ -769,6 +775,7 @@ public virtual ISet GetIdentifiers() return set; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void GetIdentifiers(ISet set) { switch (kind) @@ -797,6 +804,7 @@ internal virtual void GetIdentifiers(ISet set) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static RegExp MakeUnion(RegExp exp1, RegExp exp2) { return new RegExp @@ -807,6 +815,7 @@ internal static RegExp MakeUnion(RegExp exp1, RegExp exp2) }; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static RegExp MakeConcatenation(RegExp exp1, RegExp exp2) { if ((exp1.kind == Kind.REGEXP_CHAR || exp1.kind == Kind.REGEXP_STRING) && (exp2.kind == Kind.REGEXP_CHAR || exp2.kind == Kind.REGEXP_STRING)) @@ -835,6 +844,7 @@ internal static RegExp MakeConcatenation(RegExp exp1, RegExp exp2) return r; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static RegExp MakeString(RegExp exp1, RegExp exp2) { StringBuilder b = new StringBuilder(); @@ -857,6 +867,7 @@ private static RegExp MakeString(RegExp exp1, RegExp exp2) return MakeString(b.ToString()); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static RegExp MakeIntersection(RegExp exp1, RegExp exp2) { return new RegExp @@ -867,6 +878,7 @@ internal static RegExp MakeIntersection(RegExp exp1, RegExp exp2) }; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static RegExp MakeOptional(RegExp exp) { return new RegExp @@ -876,6 +888,7 @@ internal static RegExp MakeOptional(RegExp exp) }; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static RegExp MakeRepeat(RegExp exp) { return new RegExp @@ -885,6 +898,7 @@ internal static RegExp MakeRepeat(RegExp exp) }; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static RegExp MakeRepeat(RegExp exp, int min) { return new RegExp @@ -895,6 +909,7 @@ internal static RegExp MakeRepeat(RegExp exp, int min) }; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static RegExp MakeRepeat(RegExp exp, int min, int max) { return new RegExp @@ -906,6 +921,7 @@ internal static RegExp MakeRepeat(RegExp exp, int min, int max) }; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static RegExp MakeComplement(RegExp exp) { return new RegExp @@ -915,6 +931,7 @@ internal static RegExp MakeComplement(RegExp exp) }; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static RegExp MakeChar(int c) { return new RegExp @@ -924,6 +941,7 @@ internal static RegExp MakeChar(int c) }; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static RegExp MakeCharRange(int from, int to) { if (from > to) @@ -938,6 +956,7 @@ internal static RegExp MakeCharRange(int from, int to) }; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static RegExp MakeAnyChar() { return new RegExp @@ -946,6 +965,7 @@ internal static RegExp MakeAnyChar() }; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static RegExp MakeEmpty() { return new RegExp @@ -963,6 +983,7 @@ internal static RegExp MakeString(string s) }; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static RegExp MakeAnyString() { return new RegExp @@ -971,6 +992,7 @@ internal static RegExp MakeAnyString() }; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static RegExp MakeAutomaton(string s) { return new RegExp @@ -980,6 +1002,7 @@ internal static RegExp MakeAutomaton(string s) }; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static RegExp MakeInterval(int min, int max, int digits) { return new RegExp @@ -991,11 +1014,13 @@ internal static RegExp MakeInterval(int min, int max, int digits) }; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private bool Peek(string s) { return More() && s.IndexOf(b.CodePointAt(pos)) != -1; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private bool Match(int c) { if (pos >= b.Length) @@ -1010,11 +1035,13 @@ private bool Match(int c) return false; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private bool More() { return pos < b.Length; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private int Next() { if (!More()) @@ -1026,11 +1053,13 @@ private int Next() return ch; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private bool Check(RegExpSyntax flag) { return (flags & flag) != 0; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal RegExp ParseUnionExp() { RegExp e = ParseInterExp(); @@ -1041,6 +1070,7 @@ internal RegExp ParseUnionExp() return e; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal RegExp ParseInterExp() { RegExp e = ParseConcatExp(); @@ -1051,6 +1081,7 @@ internal RegExp ParseInterExp() return e; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal RegExp ParseConcatExp() { RegExp e = ParseRepeatExp(); @@ -1061,6 +1092,7 @@ internal RegExp ParseConcatExp() return e; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal RegExp ParseRepeatExp() { RegExp e = ParseComplExp(); @@ -1124,6 +1156,7 @@ internal RegExp ParseRepeatExp() return e; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal RegExp ParseComplExp() { if (Check(RegExpSyntax.COMPLEMENT) && Match('~')) @@ -1136,6 +1169,7 @@ internal RegExp ParseComplExp() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal RegExp ParseCharClassExp() { if (Match('[')) @@ -1162,6 +1196,7 @@ internal RegExp ParseCharClassExp() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal RegExp ParseCharClasses() { RegExp e = ParseCharClass(); @@ -1172,6 +1207,7 @@ internal RegExp ParseCharClasses() return e; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal RegExp ParseCharClass() { int c = ParseCharExp(); @@ -1185,6 +1221,7 @@ internal RegExp ParseCharClass() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal RegExp ParseSimpleExp() { if (Match('.')) @@ -1293,6 +1330,7 @@ internal RegExp ParseSimpleExp() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal int ParseCharExp() { Match('\\'); diff --git a/src/Lucene.Net/Util/Automaton/RunAutomaton.cs b/src/Lucene.Net/Util/Automaton/RunAutomaton.cs index 268934e404..af378bbb09 100644 --- a/src/Lucene.Net/Util/Automaton/RunAutomaton.cs +++ b/src/Lucene.Net/Util/Automaton/RunAutomaton.cs @@ -1,5 +1,6 @@ using Lucene.Net.Support; using System; +using System.Runtime.CompilerServices; using System.Text; /* @@ -108,6 +109,7 @@ public override string ToString() /// /// Returns acceptance status for given state. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public bool IsAccept(int state) { return m_accept[state]; @@ -122,6 +124,7 @@ public bool IsAccept(int state) /// Returns array of codepoint class interval start points. The array should /// not be modified by the caller. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public int[] GetCharIntervals() { return (int[])(Array)_points.Clone(); @@ -130,6 +133,7 @@ public int[] GetCharIntervals() /// /// Gets character class of given codepoint. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal int GetCharClass(int c) { return SpecialOperations.FindIndex(c, _points); diff --git a/src/Lucene.Net/Util/Automaton/SortedIntSet.cs b/src/Lucene.Net/Util/Automaton/SortedIntSet.cs index 39ecd152b3..4ae1bf2efa 100644 --- a/src/Lucene.Net/Util/Automaton/SortedIntSet.cs +++ b/src/Lucene.Net/Util/Automaton/SortedIntSet.cs @@ -1,6 +1,7 @@ using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; +using System.Runtime.CompilerServices; using System.Text; using JCG = J2N.Collections.Generic; @@ -54,6 +55,7 @@ public SortedInt32Set(int capacity) } // Adds this state to the set + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Incr(int num) { if (useTreeMap) @@ -116,6 +118,7 @@ public void Incr(int num) } // Removes this state from the set, if count decrs to 0 + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Decr(int num) { if (useTreeMap) @@ -160,6 +163,7 @@ public void Decr(int num) if (Debugging.AssertsEnabled) Debugging.Assert(false); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void ComputeHash() { if (useTreeMap) @@ -188,6 +192,7 @@ public void ComputeHash() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public FrozenInt32Set ToFrozenInt32Set() // LUCENENET specific { int[] c = new int[upto]; @@ -195,6 +200,7 @@ public FrozenInt32Set ToFrozenInt32Set() // LUCENENET specific return new FrozenInt32Set(c, this.hashCode, this.state); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public FrozenInt32Set Freeze(State state) { int[] c = new int[upto]; @@ -202,6 +208,7 @@ public FrozenInt32Set Freeze(State state) return new FrozenInt32Set(c, hashCode, state); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int GetHashCode() { return hashCode; @@ -274,6 +281,7 @@ public FrozenInt32Set(int num, State state) this.hashCode = 683 + num; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int GetHashCode() { return hashCode; diff --git a/src/Lucene.Net/Util/Automaton/SpecialOperations.cs b/src/Lucene.Net/Util/Automaton/SpecialOperations.cs index f9e0f3f653..b2e9dcdc1d 100644 --- a/src/Lucene.Net/Util/Automaton/SpecialOperations.cs +++ b/src/Lucene.Net/Util/Automaton/SpecialOperations.cs @@ -2,6 +2,7 @@ using J2N.Text; using System.Collections.Generic; using System.Linq; +using System.Runtime.CompilerServices; using System.Text; using JCG = J2N.Collections.Generic; @@ -49,6 +50,7 @@ public static class SpecialOperations // LUCENENET specific - made static since /// Finds the largest entry whose value is less than or equal to , or 0 if /// there is no such entry. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static int FindIndex(int c, int[] points) { int a = 0; @@ -90,6 +92,7 @@ public static bool IsFinite(Automaton a) /// // TODO: not great that this is recursive... in theory a // large automata could exceed java's stack + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static bool IsFinite(State s, OpenBitSet path, OpenBitSet visited) { path.Set(s.number); @@ -206,6 +209,7 @@ public static BytesRef GetCommonSuffixBytesRef(Automaton a) return @ref; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void ReverseBytes(BytesRef @ref) { if (@ref.Length <= 1) @@ -300,6 +304,7 @@ public static ISet GetFiniteStrings(Automaton a, int limit) /// false if more than strings are found. /// <0 means "infinite". /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static bool GetFiniteStrings(State s, JCG.HashSet pathstates, JCG.HashSet strings, Int32sRef path, int limit) { pathstates.Add(s); diff --git a/src/Lucene.Net/Util/Automaton/State.cs b/src/Lucene.Net/Util/Automaton/State.cs index f175d36a74..36f08d0f26 100644 --- a/src/Lucene.Net/Util/Automaton/State.cs +++ b/src/Lucene.Net/Util/Automaton/State.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; using System.Text; /* @@ -220,6 +221,7 @@ public virtual void Step(int c, ICollection dest) /// transitions from to this state, and if /// is an accept state then set accept for this state. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void AddEpsilon(State to) { if (to.accept) @@ -350,6 +352,7 @@ public override string ToString() /// Compares this object with the specified object for order. States are /// ordered by the time of construction. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual int CompareTo(State s) { return s.id - id; @@ -362,6 +365,7 @@ public virtual int CompareTo(State s) // IndexOutOfRangeExceptions when using FuzzyTermsEnum. // See GH-296. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int GetHashCode() { return id; diff --git a/src/Lucene.Net/Util/Automaton/StatePair.cs b/src/Lucene.Net/Util/Automaton/StatePair.cs index 0e41d8e6f7..f6e06c1f3d 100644 --- a/src/Lucene.Net/Util/Automaton/StatePair.cs +++ b/src/Lucene.Net/Util/Automaton/StatePair.cs @@ -27,6 +27,8 @@ * this SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ +using System.Runtime.CompilerServices; + namespace Lucene.Net.Util.Automaton { /// @@ -92,6 +94,7 @@ public override bool Equals(object obj) /// Returns hash code. /// /// Hash code. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int GetHashCode() { return s1.GetHashCode() + s2.GetHashCode(); diff --git a/src/Lucene.Net/Util/Automaton/Transition.cs b/src/Lucene.Net/Util/Automaton/Transition.cs index 0d08537587..de719e7955 100644 --- a/src/Lucene.Net/Util/Automaton/Transition.cs +++ b/src/Lucene.Net/Util/Automaton/Transition.cs @@ -2,6 +2,7 @@ using Lucene.Net.Diagnostics; using System.Collections.Generic; using System.Globalization; +using System.Runtime.CompilerServices; using System.Text; /* @@ -127,6 +128,7 @@ public override bool Equals(object obj) /// the destination state). /// /// Hash code. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int GetHashCode() { return min * 2 + max * 3; @@ -136,11 +138,13 @@ public override int GetHashCode() /// Clones this transition. /// /// Clone with same character interval and destination state. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual object Clone() { return (Transition)base.MemberwiseClone(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void AppendCharString(int c, StringBuilder b) { if (c >= 0x21 && c <= 0x7e && c != '\\' && c != '"') @@ -203,6 +207,7 @@ public override string ToString() return b.ToString(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void AppendDot(StringBuilder b) { b.Append(" -> ").Append(to.number).Append(" [label=\""); diff --git a/src/Lucene.Net/Util/Automaton/UTF32ToUTF8.cs b/src/Lucene.Net/Util/Automaton/UTF32ToUTF8.cs index a0b8867820..2ccfabec75 100644 --- a/src/Lucene.Net/Util/Automaton/UTF32ToUTF8.cs +++ b/src/Lucene.Net/Util/Automaton/UTF32ToUTF8.cs @@ -2,6 +2,7 @@ using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; +using System.Runtime.CompilerServices; using System.Text; namespace Lucene.Net.Util.Automaton @@ -77,11 +78,13 @@ public UTF8Sequence() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual int ByteAt(int idx) { return bytes[idx].Value; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual int NumBits(int idx) { return bytes[idx].Bits; @@ -122,6 +125,7 @@ internal virtual void Set(int code) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void SetRest(int code, int numBytes) { for (int i = 0; i < numBytes; i++) @@ -154,6 +158,7 @@ public override string ToString() private readonly UTF8Sequence tmpUTF8b = new UTF8Sequence(); // Builds necessary utf8 edges between start & end + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal void ConvertOneEdge(State start, State end, int startCodePoint, int endCodePoint) { startUTF8.Set(startCodePoint); @@ -163,6 +168,7 @@ internal void ConvertOneEdge(State start, State end, int startCodePoint, int end Build(start, end, startUTF8, endUTF8, 0); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void Build(State start, State end, UTF8Sequence startUTF8, UTF8Sequence endUTF8, int upto) { // Break into start, middle, end: @@ -231,6 +237,7 @@ private void Build(State start, State end, UTF8Sequence startUTF8, UTF8Sequence } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void Start(State start, State end, UTF8Sequence utf8, int upto, bool doAll) { if (upto == utf8.len - 1) @@ -251,6 +258,7 @@ private void Start(State start, State end, UTF8Sequence utf8, int upto, bool doA } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void End(State start, State end, UTF8Sequence utf8, int upto, bool doAll) { if (upto == utf8.len - 1) @@ -282,6 +290,7 @@ private void End(State start, State end, UTF8Sequence utf8, int upto, bool doAll } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void All(State start, State end, int startCode, int endCode, int left) { if (left == 0) @@ -365,6 +374,7 @@ public Automaton Convert(Automaton utf32) return utf8; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private State NewUTF8State() { State s = new State(); diff --git a/src/Lucene.Net/Util/BitUtil.cs b/src/Lucene.Net/Util/BitUtil.cs index afcfc70f65..37e54e10eb 100644 --- a/src/Lucene.Net/Util/BitUtil.cs +++ b/src/Lucene.Net/Util/BitUtil.cs @@ -1,4 +1,5 @@ using J2N.Numerics; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util // from org.apache.solr.util rev 555343 { @@ -99,6 +100,7 @@ public static class BitUtil // LUCENENET specific - made static /// /// Return the number of bits sets in . + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static int BitCount(byte b) { return BYTE_COUNTS[b & 0xFF]; @@ -115,6 +117,7 @@ public static int BitCount(byte b) /// (0x43 >>> 8) & 0x0F is 0, meaning there is no more bit set in this byte. /// /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static int BitList(byte b) { return BIT_LISTS[b & 0xFF]; @@ -126,6 +129,7 @@ public static int BitList(byte b) /// /// Returns the number of set bits in an array of s. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long Pop_Array(long[] arr, int wordOffset, int numWords) { long popCount = 0; @@ -140,6 +144,7 @@ public static long Pop_Array(long[] arr, int wordOffset, int numWords) /// Returns the popcount or cardinality of the two sets after an intersection. /// Neither array is modified. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long Pop_Intersect(long[] arr1, long[] arr2, int wordOffset, int numWords) { long popCount = 0; @@ -154,6 +159,7 @@ public static long Pop_Intersect(long[] arr1, long[] arr2, int wordOffset, int n /// Returns the popcount or cardinality of the union of two sets. /// Neither array is modified. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long Pop_Union(long[] arr1, long[] arr2, int wordOffset, int numWords) { long popCount = 0; @@ -168,6 +174,7 @@ public static long Pop_Union(long[] arr1, long[] arr2, int wordOffset, int numWo /// Returns the popcount or cardinality of A & ~B. /// Neither array is modified. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long Pop_AndNot(long[] arr1, long[] arr2, int wordOffset, int numWords) { long popCount = 0; @@ -182,6 +189,7 @@ public static long Pop_AndNot(long[] arr1, long[] arr2, int wordOffset, int numW /// Returns the popcount or cardinality of A ^ B /// Neither array is modified. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long Pop_Xor(long[] arr1, long[] arr2, int wordOffset, int numWords) { long popCount = 0; @@ -194,6 +202,7 @@ public static long Pop_Xor(long[] arr1, long[] arr2, int wordOffset, int numWord /// /// Returns the next highest power of two, or the current value if it's already a power of two or zero + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static int NextHighestPowerOfTwo(int v) { v--; @@ -208,6 +217,7 @@ public static int NextHighestPowerOfTwo(int v) /// /// Returns the next highest power of two, or the current value if it's already a power of two or zero + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long NextHighestPowerOfTwo(long v) { v--; diff --git a/src/Lucene.Net/Util/Bits.cs b/src/Lucene.Net/Util/Bits.cs index fa46d1d504..8db1de16ee 100644 --- a/src/Lucene.Net/Util/Bits.cs +++ b/src/Lucene.Net/Util/Bits.cs @@ -1,4 +1,5 @@ using Lucene.Net.Support; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -56,6 +57,7 @@ public MatchAllBits(int len) _len = len; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public bool Get(int index) { return true; @@ -76,6 +78,7 @@ public MatchNoBits(int len) _len = len; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public bool Get(int index) { return false; diff --git a/src/Lucene.Net/Util/BroadWord.cs b/src/Lucene.Net/Util/BroadWord.cs index 430c02ea8c..3269054214 100644 --- a/src/Lucene.Net/Util/BroadWord.cs +++ b/src/Lucene.Net/Util/BroadWord.cs @@ -1,6 +1,6 @@ using J2N.Numerics; using Lucene.Net.Diagnostics; -using System.Globalization; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -90,6 +90,7 @@ public static int Select(long x, int r) /// A signed bytewise smaller <8 operator, for operands 0L<= x, y <=0x7L. /// This uses the following numbers of basic operations: 1 or, 2 and, 2 xor, 1 minus, 1 not. /// A with bits set in the positions corresponding to each input signed byte pair that compares smaller. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long SmallerUpTo7_8(long x, long y) { // See section 4, page 5, line 14 of the Vigna article: @@ -100,6 +101,7 @@ public static long SmallerUpTo7_8(long x, long y) /// An unsigned bytewise smaller <8 operator. /// This uses the following numbers of basic operations: 3 or, 2 and, 2 xor, 1 minus, 1 not. /// A with bits set in the positions corresponding to each input unsigned byte pair that compares smaller. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long Smalleru_8(long x, long y) { // See section 4, 8th line from the bottom of the page 5, of the Vigna article: @@ -110,6 +112,7 @@ public static long Smalleru_8(long x, long y) /// An unsigned bytewise not equals 0 operator. /// This uses the following numbers of basic operations: 2 or, 1 and, 1 minus. /// A with bits set in the positions corresponding to each unsigned byte that does not equal 0. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long NotEquals0_8(long x) { // See section 4, line 6-8 on page 6, of the Vigna article: @@ -120,6 +123,7 @@ public static long NotEquals0_8(long x) /// A bytewise smaller <16 operator. /// This uses the following numbers of basic operations: 1 or, 2 and, 2 xor, 1 minus, 1 not. /// A with bits set in the positions corresponding to each input signed short pair that compares smaller. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long SmallerUpto15_16(long x, long y) { return (((x | H16_L) - (y & (~H16_L))) ^ x ^ ~y) & H16_L; diff --git a/src/Lucene.Net/Util/ByteBlockPool.cs b/src/Lucene.Net/Util/ByteBlockPool.cs index 093e651414..29ece09c16 100644 --- a/src/Lucene.Net/Util/ByteBlockPool.cs +++ b/src/Lucene.Net/Util/ByteBlockPool.cs @@ -5,6 +5,7 @@ using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -71,6 +72,7 @@ public virtual void RecycleByteBlocks(IList blocks) RecycleByteBlocks(b, 0, b.Length); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual byte[] GetByteBlock() { return new byte[m_blockSize]; @@ -91,6 +93,7 @@ public DirectAllocator(int blockSize) { } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void RecycleByteBlocks(byte[][] blocks, int start, int end) { } @@ -115,12 +118,14 @@ public DirectTrackingAllocator(int blockSize, Counter bytesUsed) this.bytesUsed = bytesUsed; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override byte[] GetByteBlock() { bytesUsed.AddAndGet(m_blockSize); return new byte[m_blockSize]; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void RecycleByteBlocks(byte[][] blocks, int start, int end) { bytesUsed.AddAndGet(-((end - start) * m_blockSize)); @@ -185,6 +190,7 @@ public ByteBlockPool(Allocator allocator) /// . Calling /// is not needed after reset. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Reset() { Reset(true, true); diff --git a/src/Lucene.Net/Util/BytesRef.cs b/src/Lucene.Net/Util/BytesRef.cs index b3db4c8c3b..cb976225e6 100644 --- a/src/Lucene.Net/Util/BytesRef.cs +++ b/src/Lucene.Net/Util/BytesRef.cs @@ -4,6 +4,7 @@ using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; using System.Text; using WritableArrayAttribute = Lucene.Net.Support.WritableArrayAttribute; @@ -138,6 +139,7 @@ public BytesRef(string text) /// /// Must be well-formed unicode text, with no /// unpaired surrogates or invalid UTF16 code units. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void CopyChars(ICharSequence text) { if (Debugging.AssertsEnabled) Debugging.Assert(Offset == 0); // TODO broken if offset != 0 @@ -149,6 +151,7 @@ public void CopyChars(ICharSequence text) /// /// Must be well-formed unicode text, with no /// unpaired surrogates or invalid UTF16 code units. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void CopyChars(string text) { if (Debugging.AssertsEnabled) Debugging.Assert(Offset == 0); // TODO broken if offset != 0 @@ -191,6 +194,7 @@ public bool BytesEquals(BytesRef other) /// object. /// /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public object Clone() { return new BytesRef(bytes, Offset, Length); @@ -203,6 +207,7 @@ public object Clone() /// , but is subject to /// change from release to release. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int GetHashCode() { return StringHelper.Murmurhash3_x86_32(this, StringHelper.GOOD_FAST_HASH_SEED); @@ -219,6 +224,7 @@ public override bool Equals(object other) return false; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] bool IEquatable.Equals(BytesRef other) // LUCENENET specific - implemented IEquatable => BytesEquals(other); @@ -226,6 +232,7 @@ bool IEquatable.Equals(BytesRef other) // LUCENENET specific - impleme /// Interprets stored bytes as UTF8 bytes, returning the /// resulting . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public string Utf8ToString() { CharsRef @ref = new CharsRef(Length); @@ -296,6 +303,7 @@ public void Append(BytesRef other) /// /// @lucene.internal /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Grow(int newLength) { if (Debugging.AssertsEnabled) Debugging.Assert(Offset == 0); // NOTE: senseless if offset != 0 @@ -304,6 +312,7 @@ public void Grow(int newLength) /// /// Unsigned byte order comparison + [MethodImpl(MethodImplOptions.AggressiveInlining)] public int CompareTo(object other) // LUCENENET specific: Implemented IComparable for FieldComparer { BytesRef br = other as BytesRef; @@ -313,6 +322,7 @@ public void Grow(int newLength) /// /// Unsigned byte order comparison + [MethodImpl(MethodImplOptions.AggressiveInlining)] public int CompareTo(BytesRef other) { return utf8SortedAsUnicodeSortOrder.Compare(this, other); @@ -343,6 +353,7 @@ public int CompareTo(BytesRef other) /// The returned will have a length of other.Length /// and an offset of zero. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static BytesRef DeepCopyOf(BytesRef other) { BytesRef copy = new BytesRef(); diff --git a/src/Lucene.Net/Util/BytesRefArray.cs b/src/Lucene.Net/Util/BytesRefArray.cs index 3c96604ead..71e2134d2e 100644 --- a/src/Lucene.Net/Util/BytesRefArray.cs +++ b/src/Lucene.Net/Util/BytesRefArray.cs @@ -1,6 +1,7 @@ using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -121,8 +122,8 @@ private class IntroSorterAnonymousInnerClassHelper : IntroSorter { private readonly BytesRefArray outerInstance; - private IComparer comp; - private int[] orderedEntries; + private readonly IComparer comp; + private readonly int[] orderedEntries; public IntroSorterAnonymousInnerClassHelper(BytesRefArray outerInstance, IComparer comp, int[] orderedEntries) { @@ -134,6 +135,7 @@ public IntroSorterAnonymousInnerClassHelper(BytesRefArray outerInstance, ICompar scratch2 = new BytesRef(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Swap(int i, int j) { int o = orderedEntries[i]; @@ -141,18 +143,21 @@ protected override void Swap(int i, int j) orderedEntries[j] = o; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override int Compare(int i, int j) { int idx1 = orderedEntries[i], idx2 = orderedEntries[j]; return comp.Compare(outerInstance.Get(scratch1, idx1), outerInstance.Get(scratch2, idx2)); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void SetPivot(int i) { int index = orderedEntries[i]; outerInstance.Get(pivot, index); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override int ComparePivot(int j) { int index = orderedEntries[j]; @@ -233,6 +238,7 @@ public virtual BytesRef Next() /// /// Sugar for with a null comparer. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public IBytesRefEnumerator GetEnumerator() => GetEnumerator(null); @@ -251,6 +257,7 @@ public IBytesRefEnumerator GetEnumerator() /// This is a non-destructive operation. /// /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public IBytesRefEnumerator GetEnumerator(IComparer comparer) { int[] indices = comparer == null ? null : Sort(comparer); diff --git a/src/Lucene.Net/Util/BytesRefHash.cs b/src/Lucene.Net/Util/BytesRefHash.cs index d0c6c244a2..433cdd3001 100644 --- a/src/Lucene.Net/Util/BytesRefHash.cs +++ b/src/Lucene.Net/Util/BytesRefHash.cs @@ -2,6 +2,7 @@ using Lucene.Net.Support; using System; using System.Collections.Generic; +using System.Runtime.CompilerServices; #if FEATURE_SERIALIZABLE_EXCEPTIONS using System.Runtime.Serialization; #endif @@ -61,7 +62,7 @@ namespace Lucene.Net.Util private int lastCount = -1; private int[] ids; private readonly BytesStartArray bytesStartArray; - private Counter bytesUsed; + private readonly Counter bytesUsed; // LUCENENET: marked readonly /// /// Creates a new with a using a @@ -93,7 +94,7 @@ public BytesRefHash(ByteBlockPool pool, int capacity, BytesStartArray bytesStart Arrays.Fill(ids, -1); this.bytesStartArray = bytesStartArray; bytesStart = bytesStartArray.Init(); - bytesUsed = bytesStartArray.BytesUsed() == null ? Counter.NewCounter() : bytesStartArray.BytesUsed(); + bytesUsed = bytesStartArray.BytesUsed() ?? Counter.NewCounter(); bytesUsed.AddAndGet(hashSize * RamUsageEstimator.NUM_BYTES_INT32); } @@ -169,6 +170,7 @@ public int[] Compact() /// /// /// The used for sorting + [MethodImpl(MethodImplOptions.AggressiveInlining)] public int[] Sort(IComparer comp) { int[] compact = Compact(); @@ -178,11 +180,11 @@ public int[] Sort(IComparer comp) private class IntroSorterAnonymousInnerClassHelper : IntroSorter { - private BytesRefHash outerInstance; + private readonly BytesRefHash outerInstance; - private IComparer comp; - private int[] compact; - private readonly BytesRef pivot = new BytesRef(), scratch1 = new BytesRef(), scratch2 = new BytesRef(); + private readonly IComparer comp; + private readonly int[] compact; + private readonly BytesRef pivot = new BytesRef(), /*scratch1 = new BytesRef(), // LUCENENET: Never read */ scratch2 = new BytesRef(); public IntroSorterAnonymousInnerClassHelper(BytesRefHash outerInstance, IComparer comp, int[] compact) { @@ -202,11 +204,13 @@ protected override int Compare(int i, int j) { int id1 = compact[i], id2 = compact[j]; if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.bytesStart.Length > id1 && outerInstance.bytesStart.Length > id2); + // LUCENENET NOTE: It is critical that this be outerInstance.scratch1 instead of scratch1 outerInstance.pool.SetBytesRef(outerInstance.scratch1, outerInstance.bytesStart[id1]); outerInstance.pool.SetBytesRef(scratch2, outerInstance.bytesStart[id2]); return comp.Compare(outerInstance.scratch1, scratch2); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void SetPivot(int i) { int id = compact[i]; @@ -223,6 +227,7 @@ protected override int ComparePivot(int j) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private bool Equals(int id, BytesRef b) { pool.SetBytesRef(scratch1, bytesStart[id]); @@ -274,6 +279,7 @@ public void Clear(bool resetPool) Arrays.Fill(ids, -1); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Clear() { Clear(true); @@ -373,6 +379,7 @@ public int Add(BytesRef bytes) /// /// The id of the given bytes, or -1 if there is no mapping for the /// given bytes. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public int Find(BytesRef bytes) { return ids[FindHash(bytes)]; @@ -515,6 +522,7 @@ private void Rehash(int newSize, bool hashOnData) } // TODO: maybe use long? But our keys are typically short... + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static int DoHash(byte[] bytes, int offset, int length) { return StringHelper.Murmurhash3_x86_32(bytes, offset, length, StringHelper.GOOD_FAST_HASH_SEED); @@ -547,6 +555,7 @@ public void Reinit() /// The id to look up /// The bytesStart offset into the internally used /// for the given id + [MethodImpl(MethodImplOptions.AggressiveInlining)] public int ByteStart(int bytesID) { if (Debugging.AssertsEnabled) @@ -644,22 +653,26 @@ public DirectBytesStartArray(int initSize) { } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int[] Clear() { return bytesStart = null; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int[] Grow() { if (Debugging.AssertsEnabled) Debugging.Assert(bytesStart != null); return bytesStart = ArrayUtil.Grow(bytesStart, bytesStart.Length + 1); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int[] Init() { return bytesStart = new int[ArrayUtil.Oversize(m_initSize, RamUsageEstimator.NUM_BYTES_INT32)]; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override Counter BytesUsed() { return bytesUsed; diff --git a/src/Lucene.Net/Util/CharsRef.cs b/src/Lucene.Net/Util/CharsRef.cs index 7c76a91d6f..a8eb3d0785 100644 --- a/src/Lucene.Net/Util/CharsRef.cs +++ b/src/Lucene.Net/Util/CharsRef.cs @@ -4,6 +4,7 @@ using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; using WritableArrayAttribute = Lucene.Net.Support.WritableArrayAttribute; namespace Lucene.Net.Util @@ -112,6 +113,7 @@ public CharsRef(string @string) /// object. /// /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public object Clone() { return new CharsRef(chars, Offset, Length); @@ -131,14 +133,14 @@ public override int GetHashCode() public override bool Equals(object other) { - if (other == null) + if (other is null) { return false; } - if (other is CharsRef) + if (other is CharsRef charsRef) { - return this.CharsEquals(((CharsRef)other)); + return this.CharsEquals(charsRef); } return false; } @@ -207,6 +209,7 @@ public int CompareTo(CharsRef other) /// /// /// The to copy. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void CopyChars(CharsRef other) { CopyChars(other.chars, other.Offset, other.Length); @@ -259,6 +262,7 @@ public void Append(char[] otherChars, int otherOffset, int otherLength) Length = newLen; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override string ToString() { return new string(chars, Offset, Length); @@ -279,6 +283,7 @@ public override string ToString() // LUCENENET specific - added to .NETify public char this[int index] { + [MethodImpl(MethodImplOptions.AggressiveInlining)] get { // NOTE: must do a real check here to meet the specs of CharSequence @@ -388,6 +393,7 @@ public virtual int Compare(CharsRef a, CharsRef b) /// The returned will have a Length of other.Length /// and an offset of zero. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static CharsRef DeepCopyOf(CharsRef other) { CharsRef clone = new CharsRef(); diff --git a/src/Lucene.Net/Util/CloseableThreadLocal.cs b/src/Lucene.Net/Util/CloseableThreadLocal.cs index c394cb7f39..e819252c39 100644 --- a/src/Lucene.Net/Util/CloseableThreadLocal.cs +++ b/src/Lucene.Net/Util/CloseableThreadLocal.cs @@ -1,6 +1,7 @@ using System; using System.Collections.Concurrent; using System.Collections.Generic; +using System.Runtime.CompilerServices; using System.Threading; namespace Lucene.Net.Util @@ -87,6 +88,7 @@ public DisposableThreadLocal(Func valueFactory) /// The instance has been disposed. public ICollection Values { + [MethodImpl(MethodImplOptions.AggressiveInlining)] get { if (_disposed) @@ -101,6 +103,7 @@ public ICollection Values /// The instance has been disposed. public bool IsValueCreated { + [MethodImpl(MethodImplOptions.AggressiveInlining)] get { if (_disposed) @@ -236,6 +239,7 @@ private sealed class WeakReferenceCompareValue : IEquatable _weak; private readonly int _hashCode; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public bool TryGetTarget(out TK target) { return _weak.TryGetTarget(out target); @@ -281,6 +285,7 @@ public override bool Equals(object obj) return Equals((WeakReferenceCompareValue)obj); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int GetHashCode() { return _hashCode; diff --git a/src/Lucene.Net/Util/CollectionUtil.cs b/src/Lucene.Net/Util/CollectionUtil.cs index da0a77896e..50d64fc976 100644 --- a/src/Lucene.Net/Util/CollectionUtil.cs +++ b/src/Lucene.Net/Util/CollectionUtil.cs @@ -1,6 +1,7 @@ using J2N.Collections.Generic.Extensions; using System; using System.Collections.Generic; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -50,21 +51,25 @@ internal ListIntroSorter(IList list, IComparer comp) this.comp = comp; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void SetPivot(int i) { pivot = (i < list.Count) ? list[i] : default; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Swap(int i, int j) { list.Swap(i, j); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override int Compare(int i, int j) { return comp.Compare(list[i], list[j]); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override int ComparePivot(int j) { return comp.Compare(pivot, list[j]); @@ -97,16 +102,19 @@ internal ListTimSorter(IList list, IComparer comp, int maxTempSlots) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Swap(int i, int j) { list.Swap(i, j); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Copy(int src, int dest) { list[dest] = list[src]; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Save(int i, int len) { for (int j = 0; j < len; ++j) @@ -115,16 +123,19 @@ protected override void Save(int i, int len) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Restore(int i, int j) { list[j] = tmp[i]; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override int Compare(int i, int j) { return comp.Compare(list[i], list[j]); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override int CompareSaved(int i, int j) { return comp.Compare(tmp[i], list[j]); diff --git a/src/Lucene.Net/Util/CommandLineUtil.cs b/src/Lucene.Net/Util/CommandLineUtil.cs index bf7367b818..6aae655a8b 100644 --- a/src/Lucene.Net/Util/CommandLineUtil.cs +++ b/src/Lucene.Net/Util/CommandLineUtil.cs @@ -1,6 +1,7 @@ using System; using System.IO; using System.Reflection; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -64,6 +65,7 @@ public static FSDirectory NewFSDirectory(string clazzName, DirectoryInfo dir) /// The name of the class to load. /// The class loaded. /// If the specified class cannot be found. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Type LoadDirectoryClass(string clazzName) { return Type.GetType(AdjustDirectoryClassName(clazzName)); @@ -74,6 +76,7 @@ public static Type LoadDirectoryClass(string clazzName) /// The name of the class to load. /// The class loaded. /// If the specified class cannot be found. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Type LoadFSDirectoryClass(string clazzName) { return Type.GetType(AdjustDirectoryClassName(clazzName)); @@ -106,6 +109,7 @@ private static string AdjustDirectoryClassName(string clazzName) /// If the class is abstract or an interface. /// If the constructor does not have public visibility. /// If the constructor throws an exception + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static FSDirectory NewFSDirectory(Type clazz, DirectoryInfo dir) { // Assuming every FSDirectory has a ctor(File): diff --git a/src/Lucene.Net/Util/Constants.cs b/src/Lucene.Net/Util/Constants.cs index f48bcdee6e..60addcacbd 100644 --- a/src/Lucene.Net/Util/Constants.cs +++ b/src/Lucene.Net/Util/Constants.cs @@ -1,4 +1,5 @@ using System; +using System.Runtime.CompilerServices; using System.Runtime.InteropServices; #if NETFRAMEWORK using Microsoft.Win32; @@ -89,6 +90,7 @@ public static class Constants // LUCENENET specific - made static because all me #endif public static readonly string RUNTIME_VERSION = LoadRuntimeVersion(); + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static string LoadRuntimeVersion() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006) { #if NETFRAMEWORK @@ -104,6 +106,7 @@ private static string LoadRuntimeVersion() // LUCENENET: Avoid static constructo /// NOTE: This was JRE_IS_64BIT in Lucene /// public static readonly bool RUNTIME_IS_64BIT = LoadRuntimeIs64Bit(); // LUCENENET NOTE: We still need this constant to indicate 64 bit runtime. + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static bool LoadRuntimeIs64Bit() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006) { // LUCENENET NOTE: In Java, the check is for sun.misc.Unsafe.addressSize, @@ -118,6 +121,7 @@ private static bool LoadRuntimeIs64Bit() // LUCENENET: Avoid static constructors // this method prevents inlining the final version constant in compiled classes, // see: http://www.javaworld.com/community/node/3400 + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static string Ident(string s) { return s.ToString(); @@ -171,18 +175,17 @@ private static string GetFramework45PlusFromRegistry() // As an alternative, if you know the computers you will query are running .NET Framework 4.5 // or later, you can use: - using (RegistryKey ndpKey = RegistryKey.OpenBaseKey(RegistryHive.LocalMachine, RegistryView.Registry32).OpenSubKey(subkey)) + using RegistryKey ndpKey = RegistryKey.OpenBaseKey(RegistryHive.LocalMachine, RegistryView.Registry32).OpenSubKey(subkey); + object releaseValue; + if (ndpKey != null && (releaseValue = ndpKey.GetValue("Release")) != null) { - if (ndpKey != null && ndpKey.GetValue("Release") != null) - { - return CheckFor45PlusVersion((int)ndpKey.GetValue("Release")); - } - else - { - // Fall back to Environment.Version (probably wrong, but this is our best guess if the registry check fails) - return Environment.Version.ToString(); - //Console.WriteLine(".NET Framework Version 4.5 or later is not detected."); - } + return CheckFor45PlusVersion((int)releaseValue); + } + else + { + // Fall back to Environment.Version (probably wrong, but this is our best guess if the registry check fails) + return Environment.Version.ToString(); + //Console.WriteLine(".NET Framework Version 4.5 or later is not detected."); } } @@ -226,6 +229,7 @@ private static string CheckFor45PlusVersion(int releaseKey) /// /// The string to examine /// A regex object to use to extract the string + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static string ExtractString(string input, Regex pattern) { Match m = pattern.Match(input); diff --git a/src/Lucene.Net/Util/Counter.cs b/src/Lucene.Net/Util/Counter.cs index 8c72ac61e1..16f5466e59 100644 --- a/src/Lucene.Net/Util/Counter.cs +++ b/src/Lucene.Net/Util/Counter.cs @@ -1,5 +1,6 @@ using J2N.Threading.Atomic; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -51,6 +52,7 @@ public abstract class Counter /// /// Returns a new counter. The returned counter is not thread-safe. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Counter NewCounter() { return NewCounter(false); @@ -63,6 +65,7 @@ public static Counter NewCounter() /// true if the returned counter can be used by multiple /// threads concurrently. /// A new counter. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Counter NewCounter(bool threadSafe) { return threadSafe ? (Counter)new AtomicCounter() : new SerialCounter(); @@ -78,6 +81,7 @@ private sealed class SerialCounter : Counter { private long count = 0; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long AddAndGet(long delta) { return count += delta; @@ -90,6 +94,7 @@ private sealed class AtomicCounter : Counter { private readonly AtomicInt64 count = new AtomicInt64(); + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long AddAndGet(long delta) { return count.AddAndGet(delta); diff --git a/src/Lucene.Net/Util/DocIdBitSet.cs b/src/Lucene.Net/Util/DocIdBitSet.cs index 440848a00d..4b1040a65a 100644 --- a/src/Lucene.Net/Util/DocIdBitSet.cs +++ b/src/Lucene.Net/Util/DocIdBitSet.cs @@ -1,3 +1,4 @@ +using System.Runtime.CompilerServices; using BitSet = J2N.Collections.BitSet; namespace Lucene.Net.Util @@ -50,6 +51,7 @@ public override DocIdSetIterator GetIterator() /// public virtual BitSet BitSet => this.bitSet; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public bool Get(int index) { return bitSet.Get(index); @@ -89,6 +91,7 @@ public override int Advance(int target) return docId; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long GetCost() { // upper bound diff --git a/src/Lucene.Net/Util/DoubleBarrelLRUCache.cs b/src/Lucene.Net/Util/DoubleBarrelLRUCache.cs index dc9c7095e0..bc3669df27 100644 --- a/src/Lucene.Net/Util/DoubleBarrelLRUCache.cs +++ b/src/Lucene.Net/Util/DoubleBarrelLRUCache.cs @@ -75,8 +75,7 @@ public TValue Get(TKey key) } // Try primary first - TValue result; - if (!primary.TryGetValue(key, out result)) + if (!primary.TryGetValue(key, out TValue result)) { // Not found -- try secondary if (secondary.TryGetValue(key, out result)) diff --git a/src/Lucene.Net/Util/FieldCacheSanityChecker.cs b/src/Lucene.Net/Util/FieldCacheSanityChecker.cs index 9198c96b9d..784fcfeebd 100644 --- a/src/Lucene.Net/Util/FieldCacheSanityChecker.cs +++ b/src/Lucene.Net/Util/FieldCacheSanityChecker.cs @@ -79,6 +79,7 @@ public FieldCacheSanityChecker(bool estimateRam) /// /// Quick and dirty convenience method /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Insanity[] CheckSanity(IFieldCache cache) { return CheckSanity(cache.GetCacheEntries()); @@ -88,6 +89,7 @@ public static Insanity[] CheckSanity(IFieldCache cache) /// Quick and dirty convenience method that instantiates an instance with /// "good defaults" and uses it to test the s /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Insanity[] CheckSanity(params FieldCache.CacheEntry[] cacheEntries) { FieldCacheSanityChecker sanityChecker = new FieldCacheSanityChecker(estimateRam: true); @@ -171,7 +173,10 @@ public Insanity[] Check(params FieldCache.CacheEntry[] cacheEntries) /// instances accordingly. The are used to populate /// the objects. /// - private ICollection CheckValueMismatch(MapOfSets valIdToItems, MapOfSets readerFieldToValIds, ISet valMismatchKeys) + private static ICollection CheckValueMismatch( // LUCENENET: CA1822: Mark members as static + MapOfSets valIdToItems, + MapOfSets readerFieldToValIds, + ISet valMismatchKeys) { List insanity = new List(valMismatchKeys.Count * 3); @@ -208,7 +213,7 @@ private ICollection CheckValueMismatch(MapOfSets /// - private ICollection CheckSubreaders(MapOfSets valIdToItems, MapOfSets readerFieldToValIds) + private static ICollection CheckSubreaders(MapOfSets valIdToItems, MapOfSets readerFieldToValIds) // LUCENENET: CA1822: Mark members as static { List insanity = new List(23); @@ -289,7 +294,7 @@ private ICollection CheckSubreaders(MapOfSetsseed.CoreCacheKey /// - private IList GetAllDescendantReaderKeys(object seed) + private static IList GetAllDescendantReaderKeys(object seed) // LUCENENET: CA1822: Mark members as static { var all = new List(17) {seed}; // will grow as we iter for (var i = 0; i < all.Count; i++) @@ -297,8 +302,7 @@ private IList GetAllDescendantReaderKeys(object seed) var obj = all[i]; // TODO: We don't check closed readers here (as getTopReaderContext // throws ObjectDisposedException), what should we do? Reflection? - var reader = obj as IndexReader; - if (reader != null) + if (obj is IndexReader reader) { try { @@ -336,6 +340,7 @@ public ReaderField(object readerKey, string fieldName) this.FieldName = fieldName; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int GetHashCode() { return RuntimeHelpers.GetHashCode(readerKey) * FieldName.GetHashCode(); @@ -441,6 +446,7 @@ private InsanityType(string label) this.label = label; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override string ToString() { return label; diff --git a/src/Lucene.Net/Util/FilterIterator.cs b/src/Lucene.Net/Util/FilterIterator.cs index 7742977f7b..059caa795c 100644 --- a/src/Lucene.Net/Util/FilterIterator.cs +++ b/src/Lucene.Net/Util/FilterIterator.cs @@ -1,6 +1,7 @@ using Lucene.Net.Diagnostics; using System; using System.Collections.Generic; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -57,6 +58,7 @@ public bool MoveNext() } // LUCENENET specific - seems logical to call reset on the underlying implementation + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Reset() { current = default; @@ -67,6 +69,7 @@ public void Reset() object System.Collections.IEnumerator.Current => current; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Dispose() => iter.Dispose(); } @@ -77,15 +80,15 @@ public void Reset() public abstract class FilterIterator : IEnumerator { private readonly IEnumerator iter; - private T next = default(T); + private T next = default; private bool nextIsSet = false; - private T current = default(T); + private T current = default; /// /// Returns true, if this element should be set to by . protected abstract bool PredicateFunction(T @object); - public FilterIterator(IEnumerator baseIterator) + protected FilterIterator(IEnumerator baseIterator) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.iter = baseIterator; } @@ -105,7 +108,7 @@ public bool MoveNext() finally { nextIsSet = false; - next = default(T); + next = default; } return true; } diff --git a/src/Lucene.Net/Util/FixedBitSet.cs b/src/Lucene.Net/Util/FixedBitSet.cs index 002e9cae32..58d1bd331f 100644 --- a/src/Lucene.Net/Util/FixedBitSet.cs +++ b/src/Lucene.Net/Util/FixedBitSet.cs @@ -2,6 +2,7 @@ using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -90,6 +91,7 @@ public override int NextDoc() public override int DocID => doc; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long GetCost() { return numBits; @@ -167,6 +169,7 @@ public static int Bits2words(int numBits) /// Returns the popcount or cardinality of the intersection of the two sets. /// Neither set is modified. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long IntersectionCount(FixedBitSet a, FixedBitSet b) { return BitUtil.Pop_Intersect(a.bits, b.bits, 0, Math.Min(a.numWords, b.numWords)); @@ -242,6 +245,7 @@ public override DocIdSetIterator GetIterator() /// /// Expert. [WritableArray] + [MethodImpl(MethodImplOptions.AggressiveInlining)] public long[] GetBits() { return bits; @@ -252,6 +256,7 @@ public long[] GetBits() /// in the backing bits array, and the result is not /// internally cached! /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public int Cardinality() { return (int)BitUtil.Pop_Array(bits, 0, bits.Length); @@ -370,17 +375,15 @@ public int PrevSetBit(int index) /// public void Or(DocIdSetIterator iter) { - if (iter is OpenBitSetIterator && iter.DocID == -1) + if (iter.DocID == -1 && iter is OpenBitSetIterator obs) { - OpenBitSetIterator obs = (OpenBitSetIterator)iter; Or(obs.arr, obs.words); // advance after last doc that would be accepted if standard // iteration is used (to exhaust it): obs.Advance(numBits); } - else if (iter is FixedBitSetIterator && iter.DocID == -1) + else if (iter.DocID == -1 && iter is FixedBitSetIterator fbs) { - FixedBitSetIterator fbs = (FixedBitSetIterator)iter; Or(fbs.bits, fbs.numWords); // advance after last doc that would be accepted if standard // iteration is used (to exhaust it): @@ -398,6 +401,7 @@ public void Or(DocIdSetIterator iter) /// /// this = this OR other + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Or(FixedBitSet other) { Or(other.bits, other.numWords); @@ -445,17 +449,15 @@ public void Xor(DocIdSetIterator iter) /// public void And(DocIdSetIterator iter) { - if (iter is OpenBitSetIterator && iter.DocID == -1) + if (iter.DocID == -1 && iter is OpenBitSetIterator obs) { - OpenBitSetIterator obs = (OpenBitSetIterator)iter; And(obs.arr, obs.words); // advance after last doc that would be accepted if standard // iteration is used (to exhaust it): obs.Advance(numBits); } - else if (iter is FixedBitSetIterator && iter.DocID == -1) + else if (iter.DocID == -1 && iter is FixedBitSetIterator fbs) { - FixedBitSetIterator fbs = (FixedBitSetIterator)iter; And(fbs.bits, fbs.numWords); // advance after last doc that would be accepted if standard // iteration is used (to exhaust it): @@ -498,6 +500,7 @@ public bool Intersects(FixedBitSet other) /// /// this = this AND other + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void And(FixedBitSet other) { And(other.bits, other.numWords); @@ -523,17 +526,15 @@ private void And(long[] otherArr, int otherNumWords) /// public void AndNot(DocIdSetIterator iter) { - if (iter is OpenBitSetIterator && iter.DocID == -1) + if (iter.DocID == -1 && iter is OpenBitSetIterator obs) { - OpenBitSetIterator obs = (OpenBitSetIterator)iter; AndNot(obs.arr, obs.words); // advance after last doc that would be accepted if standard // iteration is used (to exhaust it): obs.Advance(numBits); } - else if (iter is FixedBitSetIterator && iter.DocID == -1) + else if (iter.DocID == -1 && iter is FixedBitSetIterator fbs) { - FixedBitSetIterator fbs = (FixedBitSetIterator)iter; AndNot(fbs.bits, fbs.numWords); // advance after last doc that would be accepted if standard // iteration is used (to exhaust it): @@ -551,6 +552,7 @@ public void AndNot(DocIdSetIterator iter) /// /// this = this AND NOT other + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void AndNot(FixedBitSet other) { AndNot(other.bits, other.bits.Length); @@ -694,6 +696,7 @@ public void Clear(int startIndex, int endIndex) bits[endWord] &= endmask; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public FixedBitSet Clone() { long[] bits = new long[this.bits.Length]; diff --git a/src/Lucene.Net/Util/Fst/Builder.cs b/src/Lucene.Net/Util/Fst/Builder.cs index 3662c3572a..96198cd7f4 100644 --- a/src/Lucene.Net/Util/Fst/Builder.cs +++ b/src/Lucene.Net/Util/Fst/Builder.cs @@ -2,6 +2,7 @@ using Lucene.Net.Support; using System; using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Fst { @@ -486,7 +487,7 @@ public virtual void Add(Int32sRef input, T output) //System.out.println(" count[0]=" + frontier[0].inputCount); } - internal bool ValidOutput(T output) + internal bool ValidOutput(T output) // Only called from assert { return output.Equals(NO_OUTPUT) || !output.Equals(NO_OUTPUT); } @@ -533,6 +534,7 @@ public virtual FST Finish() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void CompileAllTargets(UnCompiledNode node, int tailLength) { for (int arcIdx = 0; arcIdx < node.NumArcs; arcIdx++) @@ -560,6 +562,7 @@ private void CompileAllTargets(UnCompiledNode node, int tailLength) // LUCENENET specific: moved INode to Builder type + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual long GetFstSizeInBytes() { return fst.GetSizeInBytes(); diff --git a/src/Lucene.Net/Util/Fst/ByteSequenceOutputs.cs b/src/Lucene.Net/Util/Fst/ByteSequenceOutputs.cs index 48e7f5a63b..57f1479450 100644 --- a/src/Lucene.Net/Util/Fst/ByteSequenceOutputs.cs +++ b/src/Lucene.Net/Util/Fst/ByteSequenceOutputs.cs @@ -1,5 +1,6 @@ using Lucene.Net.Diagnostics; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Fst { @@ -162,6 +163,7 @@ public override BytesRef Read(DataInput @in) public override BytesRef NoOutput => NO_OUTPUT; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override string OutputToString(BytesRef output) { return output.ToString(); diff --git a/src/Lucene.Net/Util/Fst/BytesRefFSTEnum.cs b/src/Lucene.Net/Util/Fst/BytesRefFSTEnum.cs index 5fdcb13c58..9ed7e7f325 100644 --- a/src/Lucene.Net/Util/Fst/BytesRefFSTEnum.cs +++ b/src/Lucene.Net/Util/Fst/BytesRefFSTEnum.cs @@ -1,5 +1,6 @@ using Lucene.Net.Diagnostics; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Fst { @@ -136,6 +137,7 @@ protected override int CurrentLabel set => current.Bytes[m_upto] = (byte)value; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Grow() { current.Bytes = ArrayUtil.Grow(current.Bytes, m_upto + 1); diff --git a/src/Lucene.Net/Util/Fst/BytesStore.cs b/src/Lucene.Net/Util/Fst/BytesStore.cs index 6d58829dbb..8543233db4 100644 --- a/src/Lucene.Net/Util/Fst/BytesStore.cs +++ b/src/Lucene.Net/Util/Fst/BytesStore.cs @@ -1,5 +1,6 @@ using Lucene.Net.Diagnostics; using System; +using System.Runtime.CompilerServices; using JCG = J2N.Collections.Generic; namespace Lucene.Net.Util.Fst @@ -426,6 +427,7 @@ public ForwardBytesReaderAnonymousInner(BytesStore outerInstance) private int nextBuffer; private int nextRead; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override byte ReadByte() { if (nextRead == outerInstance.blockSize) @@ -436,6 +438,7 @@ public override byte ReadByte() return current[nextRead++]; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void SkipBytes(int count) { Position += count; @@ -482,11 +485,13 @@ public override long Position public override bool IsReversed => false; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual FST.BytesReader GetReverseReader() { return GetReverseReader(true); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual FST.BytesReader GetReverseReader(bool allowSingle) { if (allowSingle && blocks.Count == 1) @@ -512,6 +517,7 @@ public ReverseBytesReaderAnonymousInner(BytesStore outerInstance) private int nextBuffer; private int nextRead; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override byte ReadByte() { if (nextRead == -1) @@ -522,11 +528,13 @@ public override byte ReadByte() return current[nextRead--]; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void SkipBytes(int count) { Position -= count; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void ReadBytes(byte[] b, int offset, int len) { for (int i = 0; i < len; i++) diff --git a/src/Lucene.Net/Util/Fst/CharSequenceOutputs.cs b/src/Lucene.Net/Util/Fst/CharSequenceOutputs.cs index 062b0b50fe..4c5cdb91cc 100644 --- a/src/Lucene.Net/Util/Fst/CharSequenceOutputs.cs +++ b/src/Lucene.Net/Util/Fst/CharSequenceOutputs.cs @@ -1,5 +1,6 @@ using Lucene.Net.Diagnostics; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Fst { @@ -172,6 +173,7 @@ public override CharsRef Read(DataInput @in) public override CharsRef NoOutput => NO_OUTPUT; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override string OutputToString(CharsRef output) { return output.ToString(); diff --git a/src/Lucene.Net/Util/Fst/FST.cs b/src/Lucene.Net/Util/Fst/FST.cs index 380a425f94..fe0379c469 100644 --- a/src/Lucene.Net/Util/Fst/FST.cs +++ b/src/Lucene.Net/Util/Fst/FST.cs @@ -4,6 +4,7 @@ using System; using System.Collections.Generic; using System.IO; +using System.Runtime.CompilerServices; using System.Text; using JCG = J2N.Collections.Generic; @@ -340,6 +341,7 @@ public long GetSizeInBytes() return size; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal void Finish(long newStartNode) { if (startNode != -1) @@ -356,6 +358,7 @@ internal void Finish(long newStartNode) CacheRootArcs(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private long GetNodeAddress(long node) { if (nodeAddress != null) @@ -371,6 +374,7 @@ private long GetNodeAddress(long node) } // Caches first 128 labels + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void CacheRootArcs() { cachedRootArcs = (FST.Arc[])new FST.Arc[0x80]; @@ -383,6 +387,7 @@ private void CacheRootArcs() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void ReadRootArcs(FST.Arc[] arcs) { FST.Arc arc = new FST.Arc(); @@ -582,6 +587,7 @@ public void Save(FileInfo file) } // LUCENENET NOTE: static Read() was moved into the FST class + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void WriteLabel(DataOutput @out, int v) { if (Debugging.AssertsEnabled) Debugging.Assert(v >= 0,"v={0}", v); @@ -601,6 +607,7 @@ private void WriteLabel(DataOutput @out, int v) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal int ReadLabel(DataInput @in) { int v; @@ -625,6 +632,7 @@ internal int ReadLabel(DataInput @in) /// returns true if the node at this address has any /// outgoing arcs /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool TargetHasArcs(FST.Arc arc) { return arc.Target > 0; @@ -965,6 +973,7 @@ public FST.Arc ReadLastTargetArc(FST.Arc follow, FST.Arc arc, FST.Bytes } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private long ReadUnpackedNodeTarget(FST.BytesReader @in) { long target; @@ -1056,6 +1065,7 @@ public FST.Arc ReadFirstRealTargetArc(long node, FST.Arc arc, FST.BytesRea /// /// Returns true if arc points to a state in an /// expanded array format. + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal bool IsExpandedTarget(FST.Arc follow, FST.BytesReader @in) { if (!TargetHasArcs(follow)) @@ -1397,6 +1407,7 @@ public FST.Arc FindTargetArc(int labelToMatch, FST.Arc follow, FST.Arc } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void SeekToNextNode(FST.BytesReader @in) { while (true) @@ -1455,6 +1466,7 @@ private void SeekToNextNode(FST.BytesReader @in) /// /// /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] private bool ShouldExpand(Builder.UnCompiledNode node) { return allowArrayArcs && ((node.Depth <= FST.FIXED_ARRAY_SHALLOW_DISTANCE && node.NumArcs >= FST.FIXED_ARRAY_NUM_ARCS_SHALLOW) || node.NumArcs >= FST.FIXED_ARRAY_NUM_ARCS_DEEP); @@ -1610,6 +1622,7 @@ public void countSingleChains() throws IOException { /// /// Creates a packed FST /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] private FST(FST.INPUT_TYPE inputType, Outputs outputs, int bytesPageBits) { version = FST.VERSION_CURRENT; @@ -2267,6 +2280,7 @@ public Arc CopyFrom(Arc other) return this; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual bool Flag(int flag) { return FST.Flag(Flags, flag); @@ -2340,6 +2354,7 @@ public NodeQueue(int topN) { } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected internal override bool LessThan(NodeAndInCount a, NodeAndInCount b) { int cmp = a.CompareTo(b); diff --git a/src/Lucene.Net/Util/Fst/FSTEnum.cs b/src/Lucene.Net/Util/Fst/FSTEnum.cs index dd5e9f7b9f..5293480865 100644 --- a/src/Lucene.Net/Util/Fst/FSTEnum.cs +++ b/src/Lucene.Net/Util/Fst/FSTEnum.cs @@ -1,5 +1,6 @@ using Lucene.Net.Diagnostics; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Fst { @@ -627,6 +628,7 @@ private void PushLast() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private FST.Arc GetArc(int idx) { if (m_arcs[idx] == null) diff --git a/src/Lucene.Net/Util/Fst/ForwardBytesReader.cs b/src/Lucene.Net/Util/Fst/ForwardBytesReader.cs index acfa2d8baa..a10c9135fb 100644 --- a/src/Lucene.Net/Util/Fst/ForwardBytesReader.cs +++ b/src/Lucene.Net/Util/Fst/ForwardBytesReader.cs @@ -1,4 +1,5 @@ using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Fst { @@ -34,17 +35,20 @@ public ForwardBytesReader(byte[] bytes) this.bytes = bytes; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override byte ReadByte() { return bytes[pos++]; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void ReadBytes(byte[] b, int offset, int len) { Array.Copy(bytes, pos, b, offset, len); pos += len; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void SkipBytes(int count) { pos += count; diff --git a/src/Lucene.Net/Util/Fst/IntSequenceOutputs.cs b/src/Lucene.Net/Util/Fst/IntSequenceOutputs.cs index 465b3c1f7d..90d4d84a0d 100644 --- a/src/Lucene.Net/Util/Fst/IntSequenceOutputs.cs +++ b/src/Lucene.Net/Util/Fst/IntSequenceOutputs.cs @@ -1,5 +1,6 @@ using Lucene.Net.Diagnostics; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Fst { @@ -173,6 +174,7 @@ public override Int32sRef Read(DataInput @in) public override Int32sRef NoOutput => NO_OUTPUT; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override string OutputToString(Int32sRef output) { return output.ToString(); diff --git a/src/Lucene.Net/Util/Fst/IntsRefFSTEnum.cs b/src/Lucene.Net/Util/Fst/IntsRefFSTEnum.cs index 206831bd33..ef97580e94 100644 --- a/src/Lucene.Net/Util/Fst/IntsRefFSTEnum.cs +++ b/src/Lucene.Net/Util/Fst/IntsRefFSTEnum.cs @@ -1,4 +1,5 @@ using Lucene.Net.Diagnostics; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Fst { @@ -118,6 +119,7 @@ protected override int CurrentLabel set => current.Int32s[m_upto] = value; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override void Grow() { current.Int32s = ArrayUtil.Grow(current.Int32s, m_upto + 1); diff --git a/src/Lucene.Net/Util/Fst/NoOutputs.cs b/src/Lucene.Net/Util/Fst/NoOutputs.cs index c74d42638f..02b0f3324b 100644 --- a/src/Lucene.Net/Util/Fst/NoOutputs.cs +++ b/src/Lucene.Net/Util/Fst/NoOutputs.cs @@ -43,11 +43,13 @@ public ObjectAnonymousInnerClassHelper() /// NodeHash calls hashCode for this output; we fix this /// so we get deterministic hashing. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int GetHashCode() { return 42; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override bool Equals(object other) { return other == this; @@ -62,6 +64,7 @@ private NoOutputs() public static NoOutputs Singleton => singleton; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object Common(object output1, object output2) { if (Debugging.AssertsEnabled) @@ -72,6 +75,7 @@ public override object Common(object output1, object output2) return NO_OUTPUT; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object Subtract(object output, object inc) { if (Debugging.AssertsEnabled) @@ -82,6 +86,7 @@ public override object Subtract(object output, object inc) return NO_OUTPUT; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object Add(object prefix, object output) { if (Debugging.AssertsEnabled) @@ -103,11 +108,13 @@ public override object Merge(object first, object second) return NO_OUTPUT; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Write(object prefix, DataOutput @out) { //assert false; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object Read(DataInput @in) { //assert false; @@ -117,6 +124,7 @@ public override object Read(DataInput @in) public override object NoOutput => NO_OUTPUT; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override string OutputToString(object output) { return ""; diff --git a/src/Lucene.Net/Util/Fst/NodeHash.cs b/src/Lucene.Net/Util/Fst/NodeHash.cs index caa7937e8a..cb0469c6c9 100644 --- a/src/Lucene.Net/Util/Fst/NodeHash.cs +++ b/src/Lucene.Net/Util/Fst/NodeHash.cs @@ -1,5 +1,6 @@ using J2N.Collections; using Lucene.Net.Diagnostics; +using System.Runtime.CompilerServices; using JCG = J2N.Collections.Generic; namespace Lucene.Net.Util.Fst @@ -48,6 +49,7 @@ public NodeHash(FST fst, FST.BytesReader input) this.input = input; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private bool NodesEqual(Builder.UnCompiledNode node, long address) { fst.ReadFirstRealTargetArc(address, scratchArc, input); @@ -89,7 +91,8 @@ private bool NodesEqual(Builder.UnCompiledNode node, long address) /// hash code for an unfrozen node. this must be identical /// to the frozen case (below)!! /// - private long Hash(Builder.UnCompiledNode node) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static long Hash(Builder.UnCompiledNode node) // LUCENENET: CA1822: Mark members as static { const int PRIME = 31; //System.out.println("hash unfrozen"); @@ -118,6 +121,7 @@ private long Hash(Builder.UnCompiledNode node) /// /// hash code for a frozen node /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] private long Hash(long node) { const int PRIME = 31; @@ -191,6 +195,7 @@ public long Add(Builder.UnCompiledNode nodeIn) /// /// called only by rehash /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void AddNew(long address) { long pos = Hash(address) & mask; @@ -208,6 +213,7 @@ private void AddNew(long address) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void Rehash() { PagedGrowableWriter oldTable = table; diff --git a/src/Lucene.Net/Util/Fst/Outputs.cs b/src/Lucene.Net/Util/Fst/Outputs.cs index 26dfa67597..634faf54c8 100644 --- a/src/Lucene.Net/Util/Fst/Outputs.cs +++ b/src/Lucene.Net/Util/Fst/Outputs.cs @@ -65,6 +65,7 @@ public abstract class Outputs /// . By default this just calls /// . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void WriteFinalOutput(T output, DataOutput @out) { Write(output, @out); @@ -81,6 +82,7 @@ public virtual void WriteFinalOutput(T output, DataOutput @out) /// . By default this /// just calls . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual T ReadFinalOutput(DataInput @in) { return Read(@in); diff --git a/src/Lucene.Net/Util/Fst/PairOutputs.cs b/src/Lucene.Net/Util/Fst/PairOutputs.cs index 664dd88aa3..5eb14eb50f 100644 --- a/src/Lucene.Net/Util/Fst/PairOutputs.cs +++ b/src/Lucene.Net/Util/Fst/PairOutputs.cs @@ -1,4 +1,5 @@ using Lucene.Net.Diagnostics; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Fst { @@ -154,6 +155,7 @@ public override Pair Add(Pair prefix, Pair output) return NewPair(outputs1.Add(prefix.Output1, output.Output1), outputs2.Add(prefix.Output2, output.Output2)); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Write(Pair output, DataOutput writer) { if (Debugging.AssertsEnabled) Debugging.Assert(Valid(output)); @@ -161,6 +163,7 @@ public override void Write(Pair output, DataOutput writer) outputs2.Write(output.Output2, writer); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override Pair Read(DataInput @in) { A output1 = outputs1.Read(@in); @@ -170,6 +173,7 @@ public override Pair Read(DataInput @in) public override Pair NoOutput => NO_OUTPUT; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override string OutputToString(Pair output) { if (Debugging.AssertsEnabled) Debugging.Assert(Valid(output)); diff --git a/src/Lucene.Net/Util/Fst/PositiveIntOutputs.cs b/src/Lucene.Net/Util/Fst/PositiveIntOutputs.cs index 8a523dfc5c..107c09b840 100644 --- a/src/Lucene.Net/Util/Fst/PositiveIntOutputs.cs +++ b/src/Lucene.Net/Util/Fst/PositiveIntOutputs.cs @@ -1,5 +1,6 @@ using Lucene.Net.Diagnostics; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Fst { @@ -109,12 +110,14 @@ private PositiveInt32Outputs() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Write(long? output, DataOutput @out) { if (Debugging.AssertsEnabled) Debugging.Assert(Valid(output)); @out.WriteVInt64(output.Value); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long? Read(DataInput @in) { long v = @in.ReadVInt64(); @@ -137,11 +140,13 @@ private bool Valid(long? o) public override long? NoOutput => NO_OUTPUT; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override string OutputToString(long? output) { return output.ToString(); // LUCENENET TODO: Invariant Culture? } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override string ToString() { return "PositiveIntOutputs"; diff --git a/src/Lucene.Net/Util/Fst/ReverseBytesReader.cs b/src/Lucene.Net/Util/Fst/ReverseBytesReader.cs index 92b98e552f..e6e6b9f326 100644 --- a/src/Lucene.Net/Util/Fst/ReverseBytesReader.cs +++ b/src/Lucene.Net/Util/Fst/ReverseBytesReader.cs @@ -1,3 +1,5 @@ +using System.Runtime.CompilerServices; + namespace Lucene.Net.Util.Fst { /* @@ -29,11 +31,13 @@ public ReverseBytesReader(byte[] bytes) this.bytes = bytes; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override byte ReadByte() { return bytes[pos--]; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void ReadBytes(byte[] b, int offset, int len) { for (int i = 0; i < len; i++) @@ -42,6 +46,7 @@ public override void ReadBytes(byte[] b, int offset, int len) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void SkipBytes(int count) { pos -= count; diff --git a/src/Lucene.Net/Util/Fst/Util.cs b/src/Lucene.Net/Util/Fst/Util.cs index 4e5bcced65..40835da314 100644 --- a/src/Lucene.Net/Util/Fst/Util.cs +++ b/src/Lucene.Net/Util/Fst/Util.cs @@ -5,6 +5,7 @@ using System.Collections.Generic; using System.Globalization; using System.IO; +using System.Runtime.CompilerServices; using BitSet = Lucene.Net.Util.OpenBitSet; using JCG = J2N.Collections.Generic; @@ -635,6 +636,7 @@ public virtual TopResults Search() return new TopResults(rejectCount + topN <= maxQueueDepth, results); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected virtual bool AcceptResult(Int32sRef input, T output) { return true; @@ -955,6 +957,7 @@ public static void ToDot(FST fst, TextWriter @out, bool sameRank, bool lab /// /// Emit a single state in the dot language. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void EmitDotState(TextWriter @out, string name, string shape, string color, string label) { @out.Write(" " + name @@ -968,6 +971,7 @@ private static void EmitDotState(TextWriter @out, string name, string shape, str /// /// Ensures an arc's label is indeed printable (dot uses US-ASCII). /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static string PrintableLabel(int label) { // Any ordinary ascii character, except for " or \, are diff --git a/src/Lucene.Net/Util/GrowableByteArrayDataOutput.cs b/src/Lucene.Net/Util/GrowableByteArrayDataOutput.cs index 6e68e4d551..20a3981704 100644 --- a/src/Lucene.Net/Util/GrowableByteArrayDataOutput.cs +++ b/src/Lucene.Net/Util/GrowableByteArrayDataOutput.cs @@ -1,5 +1,6 @@ using Lucene.Net.Support; using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -52,6 +53,7 @@ public GrowableByteArrayDataOutput(int cp) this.Length = 0; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void WriteByte(byte b) { if (Length >= bytes.Length) @@ -61,6 +63,7 @@ public override void WriteByte(byte b) bytes[Length++] = b; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void WriteBytes(byte[] b, int off, int len) { int newLength = Length + len; diff --git a/src/Lucene.Net/Util/IOUtils.cs b/src/Lucene.Net/Util/IOUtils.cs index 45094a990b..1d16281d16 100644 --- a/src/Lucene.Net/Util/IOUtils.cs +++ b/src/Lucene.Net/Util/IOUtils.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.IO; +using System.Runtime.CompilerServices; using System.Text; namespace Lucene.Net.Util @@ -164,6 +165,7 @@ public static void CloseWhileHandlingException(IEnumerable objects) /// /// null or an exception that will be rethrown after method completion. /// Objects to call on. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void DisposeWhileHandlingException(Exception priorException, params IDisposable[] objects) { Exception th = null; @@ -200,6 +202,7 @@ public static void DisposeWhileHandlingException(Exception priorException, param /// /// Disposes all given s, suppressing all thrown exceptions. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void DisposeWhileHandlingException(Exception priorException, IEnumerable objects) { Exception th = null; @@ -242,6 +245,7 @@ public static void DisposeWhileHandlingException(Exception priorException, IEnum /// /// /// Objects to call on + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void Dispose(params IDisposable[] objects) { Exception th = null; @@ -271,6 +275,7 @@ public static void Dispose(params IDisposable[] objects) /// /// Disposes all given s. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void Dispose(IEnumerable objects) { Exception th = null; @@ -303,6 +308,7 @@ public static void Dispose(IEnumerable objects) /// /// /// Objects to call on + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void DisposeWhileHandlingException(params IDisposable[] objects) { foreach (var o in objects) @@ -324,6 +330,7 @@ public static void DisposeWhileHandlingException(params IDisposable[] objects) /// /// Disposes all given s, suppressing all thrown exceptions. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void DisposeWhileHandlingException(IEnumerable objects) { foreach (IDisposable @object in objects) @@ -352,6 +359,7 @@ public static void DisposeWhileHandlingException(IEnumerable object /// /// this exception should get the suppressed one added /// the suppressed exception + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void AddSuppressed(Exception exception, Exception suppressed) { if (exception != null && suppressed != null) @@ -372,6 +380,7 @@ private static void AddSuppressed(Exception exception, Exception suppressed) /// The stream to wrap in a reader /// The expected charset /// A wrapping reader + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static TextReader GetDecodingReader(Stream stream, Encoding charSet) { return new StreamReader(stream, charSet); diff --git a/src/Lucene.Net/Util/InPlaceMergeSorter.cs b/src/Lucene.Net/Util/InPlaceMergeSorter.cs index 31bda9583a..6423f59203 100644 --- a/src/Lucene.Net/Util/InPlaceMergeSorter.cs +++ b/src/Lucene.Net/Util/InPlaceMergeSorter.cs @@ -1,3 +1,5 @@ +using System.Runtime.CompilerServices; + namespace Lucene.Net.Util { /* @@ -28,7 +30,7 @@ public abstract class InPlaceMergeSorter : Sorter { /// /// Create a new - public InPlaceMergeSorter() + protected InPlaceMergeSorter() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { } @@ -36,6 +38,7 @@ public InPlaceMergeSorter() /// Sort the slice which starts at (inclusive) and ends at /// (exclusive). /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override sealed void Sort(int from, int to) { CheckRange(from, to); diff --git a/src/Lucene.Net/Util/IndexableBinaryStringTools.cs b/src/Lucene.Net/Util/IndexableBinaryStringTools.cs index 19d876a354..a7849d6749 100644 --- a/src/Lucene.Net/Util/IndexableBinaryStringTools.cs +++ b/src/Lucene.Net/Util/IndexableBinaryStringTools.cs @@ -1,5 +1,6 @@ using Lucene.Net.Diagnostics; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -63,7 +64,11 @@ public static class IndexableBinaryStringTools // LUCENENET specific - made stat /// Number of bytes in /// The number of chars required to encode the number of s. // LUCENENET specific overload for CLS compliance + + [MethodImpl(MethodImplOptions.AggressiveInlining)] +#pragma warning disable IDE0060 // Remove unused parameter public static int GetEncodedLength(byte[] inputArray, int inputOffset, int inputLength) +#pragma warning restore IDE0060 // Remove unused parameter { // Use long for intermediaries to protect against overflow return (int)((8L * inputLength + 14L) / 15L) + 1; @@ -77,7 +82,10 @@ public static int GetEncodedLength(byte[] inputArray, int inputOffset, int input /// Number of sbytes in /// The number of chars required to encode the number of s. [CLSCompliant(false)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] +#pragma warning disable IDE0060 // Remove unused parameter public static int GetEncodedLength(sbyte[] inputArray, int inputOffset, int inputLength) +#pragma warning restore IDE0060 // Remove unused parameter { // Use long for intermediaries to protect against overflow return (int)((8L * inputLength + 14L) / 15L) + 1; @@ -118,6 +126,7 @@ public static int GetDecodedLength(char[] encoded, int offset, int length) /// Initial offset into outputArray /// Length of output, must be GetEncodedLength(inputArray, inputOffset, inputLength) // LUCENENET specific overload for CLS compliance + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void Encode(byte[] inputArray, int inputOffset, int inputLength, char[] outputArray, int outputOffset, int outputLength) { Encode((sbyte[])(Array)inputArray, inputOffset, inputLength, outputArray, outputOffset, outputLength); @@ -197,6 +206,7 @@ public static void Encode(sbyte[] inputArray, int inputOffset, int inputLength, /// Length of output, must be /// GetDecodedLength(inputArray, inputOffset, inputLength) // LUCENENET specific overload for CLS compliance + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void Decode(char[] inputArray, int inputOffset, int inputLength, byte[] outputArray, int outputOffset, int outputLength) { Decode(inputArray, inputOffset, inputLength, (sbyte[])(Array)outputArray, outputOffset, outputLength); diff --git a/src/Lucene.Net/Util/IntBlockPool.cs b/src/Lucene.Net/Util/IntBlockPool.cs index b26582b6dc..36574628eb 100644 --- a/src/Lucene.Net/Util/IntBlockPool.cs +++ b/src/Lucene.Net/Util/IntBlockPool.cs @@ -2,6 +2,7 @@ using Lucene.Net.Support; using System; using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -54,7 +55,7 @@ public abstract class Allocator { protected readonly int m_blockSize; - public Allocator(int blockSize) + protected Allocator(int blockSize) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { this.m_blockSize = blockSize; } @@ -67,6 +68,7 @@ public Allocator(int blockSize) /// /// NOTE: This was getIntBlock() in Lucene /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual int[] GetInt32Block() { return new int[m_blockSize]; @@ -159,6 +161,7 @@ public Int32BlockPool(Allocator allocator) /// Resets the pool to its initial state reusing the first buffer. Calling /// is not needed after reset. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Reset() { this.Reset(true, true); @@ -324,6 +327,7 @@ public SliceWriter(Int32BlockPool pool) } /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void Reset(int sliceOffset) { this.offset = sliceOffset; @@ -354,6 +358,7 @@ public virtual void WriteInt32(int value) /// Starts a new slice and returns the start offset. The returned value /// should be used as the start offset to initialize a . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual int StartNewSlice() { return offset = pool.NewSlice(FIRST_LEVEL_SIZE) + pool.Int32Offset; @@ -425,6 +430,7 @@ public void Reset(int startOffset, int endOffset) /// public bool IsEndOfSlice { + [MethodImpl(MethodImplOptions.AggressiveInlining)] get { if (Debugging.AssertsEnabled) Debugging.Assert(upto + bufferOffset <= end); diff --git a/src/Lucene.Net/Util/IntroSorter.cs b/src/Lucene.Net/Util/IntroSorter.cs index 4f2bff3852..e06bf4b278 100644 --- a/src/Lucene.Net/Util/IntroSorter.cs +++ b/src/Lucene.Net/Util/IntroSorter.cs @@ -1,4 +1,5 @@ using J2N.Numerics; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -31,6 +32,7 @@ namespace Lucene.Net.Util /// public abstract class IntroSorter : Sorter { + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static int CeilLog2(int n) { //8bits in a byte @@ -39,7 +41,7 @@ internal static int CeilLog2(int n) /// /// Create a new . - public IntroSorter() + protected IntroSorter() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected) { } @@ -53,6 +55,7 @@ public override sealed void Sort(int from, int to) Quicksort(from, to, CeilLog2(to - from)); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void Quicksort(int from, int to, int maxDepth) { if (to - from < THRESHOLD) diff --git a/src/Lucene.Net/Util/IntsRef.cs b/src/Lucene.Net/Util/IntsRef.cs index 204195148c..76b47dc07a 100644 --- a/src/Lucene.Net/Util/IntsRef.cs +++ b/src/Lucene.Net/Util/IntsRef.cs @@ -2,6 +2,7 @@ using Lucene.Net.Support; using System; using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; using System.Text; namespace Lucene.Net.Util @@ -103,6 +104,7 @@ public Int32sRef(int[] ints, int offset, int length) /// object. /// /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public object Clone() { return new Int32sRef(ints, Offset, Length); @@ -120,15 +122,15 @@ public override int GetHashCode() return result; } - public override bool Equals(object other) + public override bool Equals(object obj) { - if (other == null) + if (obj is null) { return false; } - if (other is Int32sRef) + if (obj is Int32sRef other) { - return this.Int32sEquals((Int32sRef)other); + return this.Int32sEquals(other); } return false; } @@ -213,6 +215,7 @@ public void CopyInt32s(Int32sRef other) /// /// @lucene.internal /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Grow(int newLength) { if (Debugging.AssertsEnabled) Debugging.Assert(Offset == 0); @@ -246,6 +249,7 @@ public override string ToString() /// The returned will have a length of other.Length /// and an offset of zero. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Int32sRef DeepCopyOf(Int32sRef other) { Int32sRef clone = new Int32sRef(); diff --git a/src/Lucene.Net/Util/LongBitSet.cs b/src/Lucene.Net/Util/LongBitSet.cs index ae4f116219..7295513919 100644 --- a/src/Lucene.Net/Util/LongBitSet.cs +++ b/src/Lucene.Net/Util/LongBitSet.cs @@ -2,6 +2,7 @@ using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -105,6 +106,7 @@ public Int64BitSet(long[] storedBits, long numBits) /// /// Expert. [WritableArray] + [MethodImpl(MethodImplOptions.AggressiveInlining)] public long[] GetBits() { return bits; @@ -115,6 +117,7 @@ public long[] GetBits() /// long in the backing bits array, and the result is not /// internally cached! /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public long Cardinality() { return BitUtil.Pop_Array(bits, 0, bits.Length); @@ -419,6 +422,7 @@ public void Clear(long startIndex, long endIndex) bits[endWord] &= endmask; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public Int64BitSet Clone() { long[] bits = new long[this.bits.Length]; diff --git a/src/Lucene.Net/Util/LongValues.cs b/src/Lucene.Net/Util/LongValues.cs index 93e5873517..3408da1b9b 100644 --- a/src/Lucene.Net/Util/LongValues.cs +++ b/src/Lucene.Net/Util/LongValues.cs @@ -1,3 +1,5 @@ +using System.Runtime.CompilerServices; + namespace Lucene.Net.Util { /* @@ -38,6 +40,7 @@ public abstract class Int64Values : NumericDocValues /// /// Get value at . + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long Get(int idx) { return Get((long)idx); diff --git a/src/Lucene.Net/Util/LongsRef.cs b/src/Lucene.Net/Util/LongsRef.cs index 04f77cfc6f..90f07e0c7e 100644 --- a/src/Lucene.Net/Util/LongsRef.cs +++ b/src/Lucene.Net/Util/LongsRef.cs @@ -2,6 +2,7 @@ using Lucene.Net.Support; using System; using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; using System.Text; namespace Lucene.Net.Util @@ -57,10 +58,7 @@ public sealed class Int64sRef : IComparable public long[] Int64s { get => longs; - set - { - longs = value ?? throw new ArgumentNullException(nameof(value)); - } + set => longs = value ?? throw new ArgumentNullException(nameof(value)); } private long[] longs; @@ -106,6 +104,7 @@ public Int64sRef(long[] longs, int offset, int length) /// object. /// /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public object Clone() { return new Int64sRef(longs, Offset, Length); @@ -123,15 +122,15 @@ public override int GetHashCode() return result; } - public override bool Equals(object other) + public override bool Equals(object obj) { - if (other == null) + if (obj == null) { return false; } - if (other is Int64sRef) + if (obj is Int64sRef other) { - return this.Int64sEquals((Int64sRef)other); + return Int64sEquals(other); } return false; } @@ -249,6 +248,7 @@ public override string ToString() /// The returned will have a length of other.Length /// and an offset of zero. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Int64sRef DeepCopyOf(Int64sRef other) { Int64sRef clone = new Int64sRef(); diff --git a/src/Lucene.Net/Util/MergedIterator.cs b/src/Lucene.Net/Util/MergedIterator.cs index 0ef0e4262a..2216c7563d 100644 --- a/src/Lucene.Net/Util/MergedIterator.cs +++ b/src/Lucene.Net/Util/MergedIterator.cs @@ -142,7 +142,7 @@ private void PushTop() } else { - top[i].Current = default(T); + top[i].Current = default; } } numTop = 0; @@ -304,7 +304,7 @@ private void PushTop() } else { - top[i].Current = default(T); + top[i].Current = default; } } numTop = 0; diff --git a/src/Lucene.Net/Util/Mutable/MutableValueBool.cs b/src/Lucene.Net/Util/Mutable/MutableValueBool.cs index a50f2bf1d3..5096914625 100644 --- a/src/Lucene.Net/Util/Mutable/MutableValueBool.cs +++ b/src/Lucene.Net/Util/Mutable/MutableValueBool.cs @@ -1,3 +1,5 @@ +using System.Runtime.CompilerServices; + namespace Lucene.Net.Util.Mutable { /* @@ -25,11 +27,13 @@ public class MutableValueBool : MutableValue { public bool Value { get; set; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object ToObject() { return Exists ? (object)Value : null; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Copy(MutableValue source) { MutableValueBool s = (MutableValueBool)source; @@ -37,6 +41,7 @@ public override void Copy(MutableValue source) Exists = s.Exists; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override MutableValue Duplicate() { return new MutableValueBool @@ -46,6 +51,7 @@ public override MutableValue Duplicate() }; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override bool EqualsSameType(object other) { MutableValueBool b = (MutableValueBool)other; @@ -66,6 +72,7 @@ public override int CompareSameType(object other) return Exists ? 1 : -1; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int GetHashCode() { return Value ? 2 : (Exists ? 1 : 0); diff --git a/src/Lucene.Net/Util/Mutable/MutableValueDate.cs b/src/Lucene.Net/Util/Mutable/MutableValueDate.cs index 2da80d51e7..db75b6535b 100644 --- a/src/Lucene.Net/Util/Mutable/MutableValueDate.cs +++ b/src/Lucene.Net/Util/Mutable/MutableValueDate.cs @@ -1,4 +1,5 @@ using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Mutable { @@ -25,11 +26,13 @@ namespace Lucene.Net.Util.Mutable /// public class MutableValueDate : MutableValueInt64 { + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object ToObject() { return Exists ? new DateTime(Value) as object : null; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override MutableValue Duplicate() { return new MutableValueDate diff --git a/src/Lucene.Net/Util/Mutable/MutableValueDouble.cs b/src/Lucene.Net/Util/Mutable/MutableValueDouble.cs index 746ddbf0d3..f61494a025 100644 --- a/src/Lucene.Net/Util/Mutable/MutableValueDouble.cs +++ b/src/Lucene.Net/Util/Mutable/MutableValueDouble.cs @@ -1,4 +1,4 @@ -using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Mutable { @@ -27,11 +27,13 @@ public class MutableValueDouble : MutableValue { public double Value { get; set; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object ToObject() { return Exists ? (object)Value : null; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Copy(MutableValue source) { MutableValueDouble s = (MutableValueDouble)source; @@ -39,6 +41,7 @@ public override void Copy(MutableValue source) Exists = s.Exists; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override MutableValue Duplicate() { return new MutableValueDouble @@ -48,6 +51,7 @@ public override MutableValue Duplicate() }; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override bool EqualsSameType(object other) { MutableValueDouble b = (MutableValueDouble)other; @@ -73,6 +77,7 @@ public override int CompareSameType(object other) return 0; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int GetHashCode() { long x = J2N.BitConversion.DoubleToInt64Bits(Value); diff --git a/src/Lucene.Net/Util/Mutable/MutableValueFloat.cs b/src/Lucene.Net/Util/Mutable/MutableValueFloat.cs index bac0040f6f..59ff786c56 100644 --- a/src/Lucene.Net/Util/Mutable/MutableValueFloat.cs +++ b/src/Lucene.Net/Util/Mutable/MutableValueFloat.cs @@ -1,3 +1,5 @@ +using System.Runtime.CompilerServices; + namespace Lucene.Net.Util.Mutable { /* @@ -26,11 +28,13 @@ public class MutableValueSingle : MutableValue { public float Value { get; set; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object ToObject() { return Exists ? (object)Value : null; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Copy(MutableValue source) { MutableValueSingle s = (MutableValueSingle)source; @@ -38,6 +42,7 @@ public override void Copy(MutableValue source) Exists = s.Exists; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override MutableValue Duplicate() { return new MutableValueSingle @@ -47,6 +52,7 @@ public override MutableValue Duplicate() }; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override bool EqualsSameType(object other) { MutableValueSingle b = (MutableValueSingle)other; @@ -68,6 +74,7 @@ public override int CompareSameType(object other) return Exists ? 1 : -1; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int GetHashCode() { return J2N.BitConversion.SingleToInt32Bits(Value); diff --git a/src/Lucene.Net/Util/Mutable/MutableValueInt.cs b/src/Lucene.Net/Util/Mutable/MutableValueInt.cs index 9380a7ced3..84dc7f3891 100644 --- a/src/Lucene.Net/Util/Mutable/MutableValueInt.cs +++ b/src/Lucene.Net/Util/Mutable/MutableValueInt.cs @@ -1,3 +1,5 @@ +using System.Runtime.CompilerServices; + namespace Lucene.Net.Util.Mutable { /* @@ -26,11 +28,13 @@ public class MutableValueInt32 : MutableValue { public int Value { get; set; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object ToObject() { return Exists ? (object)Value : null; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Copy(MutableValue source) { MutableValueInt32 s = (MutableValueInt32)source; @@ -38,6 +42,7 @@ public override void Copy(MutableValue source) Exists = s.Exists; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override MutableValue Duplicate() { return new MutableValueInt32 @@ -47,6 +52,7 @@ public override MutableValue Duplicate() }; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override bool EqualsSameType(object other) { MutableValueInt32 b = (MutableValueInt32)other; @@ -74,6 +80,7 @@ public override int CompareSameType(object other) return Exists ? 1 : -1; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int GetHashCode() { // TODO: if used in HashMap, it already mixes the value... maybe use a straight value? diff --git a/src/Lucene.Net/Util/Mutable/MutableValueLong.cs b/src/Lucene.Net/Util/Mutable/MutableValueLong.cs index 01e33e917c..af09d716d8 100644 --- a/src/Lucene.Net/Util/Mutable/MutableValueLong.cs +++ b/src/Lucene.Net/Util/Mutable/MutableValueLong.cs @@ -1,3 +1,5 @@ +using System.Runtime.CompilerServices; + namespace Lucene.Net.Util.Mutable { /* @@ -26,11 +28,13 @@ public class MutableValueInt64 : MutableValue { public long Value { get; set; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object ToObject() { return Exists ? (object)Value : null; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Copy(MutableValue source) { MutableValueInt64 s = (MutableValueInt64)source; @@ -38,6 +42,7 @@ public override void Copy(MutableValue source) Value = s.Value; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override MutableValue Duplicate() { return new MutableValueInt64 @@ -47,6 +52,7 @@ public override MutableValue Duplicate() }; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override bool EqualsSameType(object other) { MutableValueInt64 b = (MutableValueInt64)other; @@ -72,6 +78,7 @@ public override int CompareSameType(object other) return Exists ? 1 : -1; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int GetHashCode() { return (int)Value + (int)(Value >> 32); diff --git a/src/Lucene.Net/Util/Mutable/MutableValueStr.cs b/src/Lucene.Net/Util/Mutable/MutableValueStr.cs index 7ededd1d70..90e2bdbb77 100644 --- a/src/Lucene.Net/Util/Mutable/MutableValueStr.cs +++ b/src/Lucene.Net/Util/Mutable/MutableValueStr.cs @@ -1,3 +1,5 @@ +using System.Runtime.CompilerServices; + namespace Lucene.Net.Util.Mutable { /* @@ -30,11 +32,13 @@ public MutableValueStr() Value = new BytesRef(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object ToObject() { return Exists ? Value.Utf8ToString() : null; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Copy(MutableValue source) { MutableValueStr s = (MutableValueStr)source; @@ -42,6 +46,7 @@ public override void Copy(MutableValue source) Value.CopyBytes(s.Value); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override MutableValue Duplicate() { MutableValueStr v = new MutableValueStr(); @@ -50,6 +55,7 @@ public override MutableValue Duplicate() return v; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override bool EqualsSameType(object other) { MutableValueStr b = (MutableValueStr)other; @@ -71,6 +77,7 @@ public override int CompareSameType(object other) return Exists ? 1 : -1; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int GetHashCode() { return Value.GetHashCode(); diff --git a/src/Lucene.Net/Util/NumericUtils.cs b/src/Lucene.Net/Util/NumericUtils.cs index d49a1514aa..10e92f78f5 100644 --- a/src/Lucene.Net/Util/NumericUtils.cs +++ b/src/Lucene.Net/Util/NumericUtils.cs @@ -562,6 +562,7 @@ public FilteredTermsEnumAnonymousInnerClassHelper(TermsEnum termsEnum) { } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override AcceptStatus Accept(BytesRef term) { return NumericUtils.GetPrefixCodedInt64Shift(term) == 0 ? AcceptStatus.YES : AcceptStatus.END; @@ -578,6 +579,7 @@ protected override AcceptStatus Accept(BytesRef term) /// The terms enum to filter /// A filtered that only returns prefix coded 32 bit /// terms with a shift value of 0. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static TermsEnum FilterPrefixCodedInt32s(TermsEnum termsEnum) { return new FilteredTermsEnumAnonymousInnerClassHelper2(termsEnum); @@ -590,6 +592,7 @@ public FilteredTermsEnumAnonymousInnerClassHelper2(TermsEnum termsEnum) { } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override AcceptStatus Accept(BytesRef term) { return NumericUtils.GetPrefixCodedInt32Shift(term) == 0 ? AcceptStatus.YES : AcceptStatus.END; diff --git a/src/Lucene.Net/Util/OfflineSorter.cs b/src/Lucene.Net/Util/OfflineSorter.cs index 459f12a268..f9cffae113 100644 --- a/src/Lucene.Net/Util/OfflineSorter.cs +++ b/src/Lucene.Net/Util/OfflineSorter.cs @@ -6,6 +6,7 @@ using System.Diagnostics; using System.Globalization; using System.IO; +using System.Runtime.CompilerServices; using System.Text; namespace Lucene.Net.Util @@ -337,6 +338,7 @@ public SortInfo Sort(FileInfo input, FileInfo output) /// Returns the default temporary directory. By default, the System's temp folder. If not accessible /// or not available, an is thrown. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static DirectoryInfo DefaultTempDir() { return new DirectoryInfo(Path.GetTempPath()); @@ -345,6 +347,7 @@ public static DirectoryInfo DefaultTempDir() /// /// Copies one file to another. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void Copy(FileInfo file, FileInfo output) { using Stream inputStream = file.OpenRead(); @@ -447,6 +450,7 @@ public PriorityQueueAnonymousInnerClassHelper(OfflineSorter outerInstance, int s this.outerInstance = outerInstance; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected internal override bool LessThan(FileAndTop a, FileAndTop b) { return outerInstance.comparer.Compare(a.Current, b.Current) < 0; @@ -539,6 +543,7 @@ public virtual void Write(BytesRef @ref) /// /// Writes a byte array. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void Write(byte[] bytes) { Write(bytes, 0, bytes.Length); diff --git a/src/Lucene.Net/Util/OpenBitSet.cs b/src/Lucene.Net/Util/OpenBitSet.cs index e31fed41d8..48e5ec6086 100644 --- a/src/Lucene.Net/Util/OpenBitSet.cs +++ b/src/Lucene.Net/Util/OpenBitSet.cs @@ -2,6 +2,7 @@ using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -168,6 +169,7 @@ public override DocIdSetIterator GetIterator() /// /// Expert: returns the storing the bits. [WritableArray] + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual long[] GetBits() { return m_bits; @@ -651,6 +653,7 @@ public static int pop(long v0, long v1, long v2, long v3) { /// Get the number of set bits. /// /// The number of set bits. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual long Cardinality() { return BitUtil.Pop_Array(m_bits, 0, m_wlen); @@ -660,6 +663,7 @@ public virtual long Cardinality() /// Returns the popcount or cardinality of the intersection of the two sets. /// Neither set is modified. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long IntersectionCount(OpenBitSet a, OpenBitSet b) { return BitUtil.Pop_Intersect(a.m_bits, b.m_bits, 0, Math.Min(a.m_wlen, b.m_wlen)); @@ -872,6 +876,7 @@ public virtual long PrevSetBit(long index) return -1; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public object Clone() { //OpenBitSet obs = (OpenBitSet)base.Clone(); @@ -971,18 +976,21 @@ public virtual void Xor(OpenBitSet other) // some BitSet compatability methods /// see + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void And(OpenBitSet other) { Intersect(other); } /// see + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void Or(OpenBitSet other) { Union(other); } /// see + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void AndNot(OpenBitSet other) { Remove(other); @@ -1018,6 +1026,7 @@ public virtual void EnsureCapacityWords(int numWords) /// Ensure that the is big enough to hold numBits, expanding it if /// necessary. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void EnsureCapacity(long numBits) { EnsureCapacityWords(Bits2words(numBits)); @@ -1042,6 +1051,7 @@ public virtual void TrimTrailingZeros() /// /// Returns the number of 64 bit words it would take to hold . + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static int Bits2words(long numBits) { return (int)(((numBits - 1) >> 6) + 1); diff --git a/src/Lucene.Net/Util/OpenBitSetIterator.cs b/src/Lucene.Net/Util/OpenBitSetIterator.cs index ca8d9cc73c..a09cfe5dec 100644 --- a/src/Lucene.Net/Util/OpenBitSetIterator.cs +++ b/src/Lucene.Net/Util/OpenBitSetIterator.cs @@ -1,3 +1,5 @@ +using System.Runtime.CompilerServices; + namespace Lucene.Net.Util { /* @@ -164,6 +166,7 @@ public override int Advance(int target) public override int DocID => curDocId; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long GetCost() { return words / 64; diff --git a/src/Lucene.Net/Util/PForDeltaDocIdSet.cs b/src/Lucene.Net/Util/PForDeltaDocIdSet.cs index f59debef32..e98da7cf61 100644 --- a/src/Lucene.Net/Util/PForDeltaDocIdSet.cs +++ b/src/Lucene.Net/Util/PForDeltaDocIdSet.cs @@ -2,6 +2,7 @@ using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -57,6 +58,7 @@ private static MonotonicAppendingInt64Buffer LoadSingleZeroBuffer() return buffer; } + [System.Diagnostics.CodeAnalysis.SuppressMessage("Performance", "CA1810:Initialize reference type static fields inline", Justification = "Complexity")] static PForDeltaDocIdSet() { int maxByteBLockCount = 0; @@ -148,6 +150,7 @@ public virtual Builder Add(DocIdSetIterator it) return this; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void ComputeFreqs() { Arrays.Fill(freqs, 0); @@ -172,6 +175,7 @@ internal virtual int PforBlockSize(int bitsPerValue, int numExceptions, int bits return (int)blockSize; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual int UnaryBlockSize() { int deltaSum = 0; @@ -188,6 +192,7 @@ internal virtual int UnaryBlockSize() return blockSize; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual int ComputeOptimalNumberOfBits() { ComputeFreqs(); @@ -216,6 +221,7 @@ internal virtual int ComputeOptimalNumberOfBits() return blockSize; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void PforEncode() { if (numExceptions > 0) @@ -259,6 +265,7 @@ internal virtual void PforEncode() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void UnaryEncode() { int current = 0; @@ -279,6 +286,7 @@ internal virtual void UnaryEncode() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void EncodeBlock() { int originalLength = data.Length; @@ -467,6 +475,7 @@ internal virtual void PforDecompress(byte token) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void UnaryDecompress(byte token) { if (Debugging.AssertsEnabled) Debugging.Assert((token & HAS_EXCEPTIONS) == 0); @@ -482,6 +491,7 @@ internal virtual void UnaryDecompress(byte token) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void DecompressBlock() { var token = data[offset++]; @@ -503,6 +513,7 @@ internal virtual void DecompressBlock() ++blockIdx; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void SkipBlock() { if (Debugging.AssertsEnabled) Debugging.Assert(i == BLOCK_SIZE); @@ -520,6 +531,7 @@ public override int NextDoc() return docID = nextDocs[i++]; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual int ForwardBinarySearch(int target) { // advance forward and double the window at each step @@ -596,6 +608,7 @@ public override int Advance(int target) return SlowAdvance(target); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long GetCost() { return cardinality; @@ -604,6 +617,7 @@ public override long GetCost() /// /// Return the number of documents in this in constant time. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public int Cardinality() { return cardinality; @@ -611,6 +625,7 @@ public int Cardinality() /// /// Return the memory usage of this instance. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public long RamBytesUsed() { return RamUsageEstimator.AlignObjectSize(3 * RamUsageEstimator.NUM_BYTES_OBJECT_REF) + docIDs.RamBytesUsed() + offsets.RamBytesUsed(); diff --git a/src/Lucene.Net/Util/Packed/AbstractAppendingLongBuffer.cs b/src/Lucene.Net/Util/Packed/AbstractAppendingLongBuffer.cs index d6345143ca..d96859a50a 100644 --- a/src/Lucene.Net/Util/Packed/AbstractAppendingLongBuffer.cs +++ b/src/Lucene.Net/Util/Packed/AbstractAppendingLongBuffer.cs @@ -1,5 +1,6 @@ using Lucene.Net.Diagnostics; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Packed { @@ -101,6 +102,7 @@ public void Add(long l) pending[pendingOff++] = l; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void Grow(int newBlockCount) { Array.Resize(ref values, newBlockCount); @@ -215,6 +217,7 @@ public long Next() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual long BaseRamBytesUsed() { return RamUsageEstimator.NUM_BYTES_OBJECT_HEADER diff --git a/src/Lucene.Net/Util/Packed/AbstractBlockPackedWriter.cs b/src/Lucene.Net/Util/Packed/AbstractBlockPackedWriter.cs index 388bc4de39..294e5149d9 100644 --- a/src/Lucene.Net/Util/Packed/AbstractBlockPackedWriter.cs +++ b/src/Lucene.Net/Util/Packed/AbstractBlockPackedWriter.cs @@ -31,6 +31,7 @@ public abstract class AbstractBlockPackedWriter // LUCENENET NOTE: made public r internal const int MIN_VALUE_EQUALS_0 = 1 << 0; internal const int BPV_SHIFT = 1; + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static long ZigZagEncode(long n) { return (n >> 63) ^ (n << 1); @@ -40,6 +41,7 @@ internal static long ZigZagEncode(long n) /// /// NOTE: This was writeVLong() in Lucene. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void WriteVInt64(DataOutput @out, long i) { int k = 0; @@ -70,6 +72,7 @@ protected AbstractBlockPackedWriter(DataOutput @out, int blockSize) // LUCENENET /// /// Reset this writer to wrap . The block size remains unchanged. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void Reset(DataOutput @out) { if (Debugging.AssertsEnabled) Debugging.Assert(@out != null); @@ -79,6 +82,7 @@ public virtual void Reset(DataOutput @out) m_finished = false; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void CheckNotFinished() { if (m_finished) diff --git a/src/Lucene.Net/Util/Packed/AbstractPagedMutable.cs b/src/Lucene.Net/Util/Packed/AbstractPagedMutable.cs index 874ff9a6c9..0a9a670959 100644 --- a/src/Lucene.Net/Util/Packed/AbstractPagedMutable.cs +++ b/src/Lucene.Net/Util/Packed/AbstractPagedMutable.cs @@ -1,5 +1,6 @@ using Lucene.Net.Diagnostics; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Packed { @@ -59,6 +60,7 @@ protected void FillPages() protected abstract PackedInt32s.Mutable NewMutable(int valueCount, int bitsPerValue); + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal int LastPageSize(long size) { int sz = IndexInPage(size); @@ -74,11 +76,13 @@ internal int LastPageSize(long size) /// public long Count => size; + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal int PageIndex(long index) { return (int)((long)((ulong)index >> pageShift)); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal int IndexInPage(long index) { return (int)index & pageMask; @@ -167,6 +171,7 @@ public T Grow(long minSize) /// /// Similar to . + [MethodImpl(MethodImplOptions.AggressiveInlining)] public T Grow() { return Grow(Count + 1); diff --git a/src/Lucene.Net/Util/Packed/AppendingDeltaPackedLongBuffer.cs b/src/Lucene.Net/Util/Packed/AppendingDeltaPackedLongBuffer.cs index aa38829b33..108012781a 100644 --- a/src/Lucene.Net/Util/Packed/AppendingDeltaPackedLongBuffer.cs +++ b/src/Lucene.Net/Util/Packed/AppendingDeltaPackedLongBuffer.cs @@ -1,5 +1,6 @@ using Lucene.Net.Support; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Packed { @@ -133,17 +134,20 @@ internal override void PackPendingValues() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal override void Grow(int newBlockCount) { base.Grow(newBlockCount); this.minValues = Arrays.CopyOf(minValues, newBlockCount); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal override long BaseRamBytesUsed() { return base.BaseRamBytesUsed() + RamUsageEstimator.NUM_BYTES_OBJECT_REF; // additional array } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long RamBytesUsed() { return base.RamBytesUsed() + RamUsageEstimator.SizeOf(minValues); diff --git a/src/Lucene.Net/Util/Packed/BlockPackedReaderIterator.cs b/src/Lucene.Net/Util/Packed/BlockPackedReaderIterator.cs index 2e3803bd87..1b43ac8e7b 100644 --- a/src/Lucene.Net/Util/Packed/BlockPackedReaderIterator.cs +++ b/src/Lucene.Net/Util/Packed/BlockPackedReaderIterator.cs @@ -2,6 +2,7 @@ using Lucene.Net.Support; using System; using System.IO; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Packed { @@ -33,6 +34,7 @@ namespace Lucene.Net.Util.Packed /// public sealed class BlockPackedReaderIterator { + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static long ZigZagDecode(long n) { return (((long)((ulong)n >> 1)) ^ -(n & 1)); @@ -42,6 +44,7 @@ internal static long ZigZagDecode(long n) /// /// NOTE: This was readVLong() in Lucene. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static long ReadVInt64(DataInput @in) { byte b = @in.ReadByte(); @@ -186,6 +189,7 @@ public void Skip(long count) off += (int)count; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void SkipBytes(long count) { if (@in is IndexInput input) diff --git a/src/Lucene.Net/Util/Packed/Direct16.cs b/src/Lucene.Net/Util/Packed/Direct16.cs index b65f8e3ca5..2418337894 100644 --- a/src/Lucene.Net/Util/Packed/Direct16.cs +++ b/src/Lucene.Net/Util/Packed/Direct16.cs @@ -1,6 +1,7 @@ using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; +using System.Runtime.CompilerServices; // this file has been automatically generated, DO NOT EDIT @@ -55,11 +56,13 @@ internal Direct16(int packedIntsVersion, DataInput @in, int valueCount) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long Get(int index) { return values[index] & 0xFFFFL; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Set(int index, long value) { values[index] = (short)(value); @@ -74,11 +77,13 @@ public override long RamBytesUsed() + RamUsageEstimator.SizeOf(values); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Clear() { Arrays.Fill(values, (short)0L); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object GetArray() { return values; @@ -120,6 +125,7 @@ public override int Set(int index, long[] arr, int off, int len) return sets; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Fill(int fromIndex, int toIndex, long val) { if (Debugging.AssertsEnabled) Debugging.Assert(val == (val & 0xFFFFL)); diff --git a/src/Lucene.Net/Util/Packed/Direct32.cs b/src/Lucene.Net/Util/Packed/Direct32.cs index a4d3f1e6ee..9dd7dc6670 100644 --- a/src/Lucene.Net/Util/Packed/Direct32.cs +++ b/src/Lucene.Net/Util/Packed/Direct32.cs @@ -1,6 +1,7 @@ using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; +using System.Runtime.CompilerServices; // this file has been automatically generated, DO NOT EDIT @@ -55,11 +56,13 @@ internal Direct32(int packedIntsVersion, DataInput @in, int valueCount) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long Get(int index) { return values[index] & 0xFFFFFFFFL; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Set(int index, long value) { values[index] = (int)(value); @@ -74,11 +77,13 @@ public override long RamBytesUsed() + RamUsageEstimator.SizeOf(values); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Clear() { Arrays.Fill(values, (int)0L); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object GetArray() { return values; @@ -120,6 +125,7 @@ public override int Set(int index, long[] arr, int off, int len) return sets; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Fill(int fromIndex, int toIndex, long val) { if (Debugging.AssertsEnabled) Debugging.Assert(val == (val & 0xFFFFFFFFL)); diff --git a/src/Lucene.Net/Util/Packed/Direct64.cs b/src/Lucene.Net/Util/Packed/Direct64.cs index e32c15048d..c32b08b631 100644 --- a/src/Lucene.Net/Util/Packed/Direct64.cs +++ b/src/Lucene.Net/Util/Packed/Direct64.cs @@ -1,6 +1,7 @@ using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; +using System.Runtime.CompilerServices; // this file has been automatically generated, DO NOT EDIT @@ -49,11 +50,13 @@ internal Direct64(/*int packedIntsVersion,*/ DataInput @in, int valueCount) // L } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long Get(int index) { return values[index]; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Set(int index, long value) { values[index] = (value); @@ -68,11 +71,13 @@ public override long RamBytesUsed() + RamUsageEstimator.SizeOf(values); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Clear() { Arrays.Fill(values, 0L); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object GetArray() { return values; @@ -108,6 +113,7 @@ public override int Set(int index, long[] arr, int off, int len) return sets; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Fill(int fromIndex, int toIndex, long val) { Arrays.Fill(values, fromIndex, toIndex, val); diff --git a/src/Lucene.Net/Util/Packed/Direct8.cs b/src/Lucene.Net/Util/Packed/Direct8.cs index cd464c446e..220bae7e3e 100644 --- a/src/Lucene.Net/Util/Packed/Direct8.cs +++ b/src/Lucene.Net/Util/Packed/Direct8.cs @@ -1,6 +1,7 @@ using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; +using System.Runtime.CompilerServices; // this file has been automatically generated, DO NOT EDIT @@ -32,7 +33,7 @@ namespace Lucene.Net.Util.Packed /// internal sealed class Direct8 : PackedInt32s.MutableImpl { - readonly byte[] values; + private readonly byte[] values; internal Direct8(int valueCount) : base(valueCount, 8) @@ -52,11 +53,13 @@ internal Direct8(int packedIntsVersion, DataInput @in, int valueCount) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long Get(int index) { return values[index] & 0xFFL; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Set(int index, long value) { values[index] = (byte)(value); @@ -71,11 +74,13 @@ public override long RamBytesUsed() + RamUsageEstimator.SizeOf(values); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Clear() { Arrays.Fill(values, (byte)0L); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object GetArray() { return values; @@ -117,6 +122,7 @@ public override int Set(int index, long[] arr, int off, int len) return sets; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Fill(int fromIndex, int toIndex, long val) { if (Debugging.AssertsEnabled) Debugging.Assert(val == (val & 0xFFL)); diff --git a/src/Lucene.Net/Util/Packed/DirectPacked64SingleBlockReader.cs b/src/Lucene.Net/Util/Packed/DirectPacked64SingleBlockReader.cs index 1d773b4bbb..b4ecd33fa6 100644 --- a/src/Lucene.Net/Util/Packed/DirectPacked64SingleBlockReader.cs +++ b/src/Lucene.Net/Util/Packed/DirectPacked64SingleBlockReader.cs @@ -1,5 +1,6 @@ using System; using System.IO; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Packed { @@ -56,6 +57,7 @@ public override long Get(int index) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long RamBytesUsed() { return 0; diff --git a/src/Lucene.Net/Util/Packed/DirectPackedReader.cs b/src/Lucene.Net/Util/Packed/DirectPackedReader.cs index 31f59f52c9..6829ed6b4c 100644 --- a/src/Lucene.Net/Util/Packed/DirectPackedReader.cs +++ b/src/Lucene.Net/Util/Packed/DirectPackedReader.cs @@ -1,5 +1,6 @@ using System; using System.IO; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Packed { @@ -113,6 +114,7 @@ public override long Get(int index) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long RamBytesUsed() { return 0; diff --git a/src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs b/src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs index 50921328ad..12a00a3a2f 100644 --- a/src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs +++ b/src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs @@ -2,6 +2,7 @@ using Lucene.Net.Diagnostics; using System; using System.Globalization; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Packed { @@ -92,12 +93,14 @@ public virtual long CurrentIndex() /// /// This is only intended for use after returned true. /// The value encoded at . + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual long CurrentValue() { return CombineHighLowValues(CurrentHighValue(), CurrentLowValue()); } /// The high value for the current decoding index. + [MethodImpl(MethodImplOptions.AggressiveInlining)] private long CurrentHighValue() { return setBitForIndex - efIndex; // sequence of unary gaps @@ -124,6 +127,7 @@ private static long UnPackValue(long[] longArray, int numBits, long packIndex, l } /// The low value for the current decoding index. + [MethodImpl(MethodImplOptions.AggressiveInlining)] private long CurrentLowValue() { if (Debugging.AssertsEnabled) Debugging.Assert(((efIndex >= 0) && (efIndex < numEncoded)), "efIndex {0}", efIndex); @@ -132,6 +136,7 @@ private long CurrentLowValue() /// The given shifted left by the number of low bits from by the EliasFanoSequence, /// logically OR-ed with the given . + [MethodImpl(MethodImplOptions.AggressiveInlining)] private long CombineHighLowValues(long highValue, long lowValue) { return (highValue << efEncoder.numLowBits) | lowValue; @@ -156,6 +161,7 @@ private long CombineHighLowValues(long highValue, long lowValue) /// /// Set the decoding index to just before the first encoded value. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void ToBeforeSequence() { efIndex = -1; @@ -220,6 +226,7 @@ private void ToNextHighValue() /// and have just been incremented, scan to the next high set bit /// by incrementing , and by setting accordingly. /// The next encoded high value. + [MethodImpl(MethodImplOptions.AggressiveInlining)] private long NextHighValue() { ToNextHighValue(); @@ -414,6 +421,7 @@ public virtual long AdvanceToValue(long target) /// /// Set the decoding index to just after the last encoded value. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void ToAfterSequence() { efIndex = numEncoded; // just after last index diff --git a/src/Lucene.Net/Util/Packed/EliasFanoDocIdSet.cs b/src/Lucene.Net/Util/Packed/EliasFanoDocIdSet.cs index 0a8276ad4e..ddcd907370 100644 --- a/src/Lucene.Net/Util/Packed/EliasFanoDocIdSet.cs +++ b/src/Lucene.Net/Util/Packed/EliasFanoDocIdSet.cs @@ -1,4 +1,5 @@ using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Packed { @@ -46,6 +47,7 @@ public EliasFanoDocIdSet(int numValues, int upperBound) /// The number of document identifiers that is to be encoded. Should be non negative. /// The maximum possible value for a document identifier. Should be at least . /// See + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool SufficientlySmallerThanBitSet(long numValues, long upperBound) { return EliasFanoEncoder.SufficientlySmallerThanBitSet(numValues, upperBound); @@ -93,22 +95,26 @@ public DocIdSetIteratorAnonymousInnerClassHelper(EliasFanoDocIdSet outerInstance public override int DocID => curDocId; + [MethodImpl(MethodImplOptions.AggressiveInlining)] private int SetCurDocID(long value) { curDocId = (value == EliasFanoDecoder.NO_MORE_VALUES) ? NO_MORE_DOCS : (int)value; return curDocId; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int NextDoc() { return SetCurDocID(efDecoder.NextValue()); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int Advance(int target) { return SetCurDocID(efDecoder.AdvanceToValue(target)); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long GetCost() { return efDecoder.NumEncoded; @@ -120,11 +126,13 @@ public override long GetCost() /// true public override bool IsCacheable => true; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override bool Equals(object other) { return (other is EliasFanoDocIdSet otherEncoder) && efEncoder.Equals(otherEncoder.efEncoder); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int GetHashCode() { return efEncoder.GetHashCode() ^ this.GetType().GetHashCode(); diff --git a/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs b/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs index 3cb2699db9..6457e84613 100644 --- a/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs +++ b/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs @@ -4,6 +4,7 @@ using System; using System.Diagnostics.CodeAnalysis; using System.Globalization; +using System.Runtime.CompilerServices; using System.Text; namespace Lucene.Net.Util.Packed @@ -218,6 +219,7 @@ public EliasFanoEncoder(long numValues, long upperBound) /// /// NOTE: This was numLongsForBits() in Lucene. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static long NumInt64sForBits(long numBits) // Note: int version in FixedBitSet.bits2words() { if (Debugging.AssertsEnabled) Debugging.Assert(numBits >= 0, "{0}", numBits); @@ -263,17 +265,20 @@ public virtual void EncodeNext(long x) numEncoded++; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void EncodeUpperBits(long highValue) { long nextHighBitNum = numEncoded + highValue; // sequence of unary gaps upperLongs[(int)((long)((ulong)nextHighBitNum >> LOG2_INT64_SIZE))] |= (1L << (int)(nextHighBitNum & ((sizeof(long) * 8) - 1))); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void EncodeLowerBits(long lowValue) { PackValue(lowValue, lowerLongs, numLowBits, numEncoded); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void PackValue(long value, long[] longArray, int numBits, long packIndex) { if (numBits != 0) @@ -318,6 +323,7 @@ public static bool SufficientlySmallerThanBitSet(long numValues, long upperBound /// Returns an to access the encoded values. /// Perform all calls to before calling . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual EliasFanoDecoder GetDecoder() { // decode as far as currently encoded as determined by numEncoded. diff --git a/src/Lucene.Net/Util/Packed/GrowableWriter.cs b/src/Lucene.Net/Util/Packed/GrowableWriter.cs index 4b4cc2a27d..ffdbea002b 100644 --- a/src/Lucene.Net/Util/Packed/GrowableWriter.cs +++ b/src/Lucene.Net/Util/Packed/GrowableWriter.cs @@ -1,5 +1,6 @@ using Lucene.Net.Diagnostics; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Packed { @@ -46,11 +47,13 @@ public GrowableWriter(int startBitsPerValue, int valueCount, float acceptableOve currentMask = Mask(current.BitsPerValue); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static long Mask(int bitsPerValue) { return bitsPerValue == 64 ? ~0L : PackedInt32s.MaxValue(bitsPerValue); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long Get(int index) { return current.Get(index); @@ -62,6 +65,7 @@ public override long Get(int index) public virtual PackedInt32s.Mutable Mutable => current; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object GetArray() { return current.GetArray(); @@ -84,12 +88,14 @@ private void EnsureCapacity(long value) currentMask = Mask(current.BitsPerValue); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Set(int index, long value) { EnsureCapacity(value); current.Set(index, value); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Clear() { current.Clear(); @@ -103,6 +109,7 @@ public virtual GrowableWriter Resize(int newSize) return next; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int Get(int index, long[] arr, int off, int len) { return current.Get(index, arr, off, len); @@ -123,6 +130,7 @@ public override int Set(int index, long[] arr, int off, int len) return current.Set(index, arr, off, len); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Fill(int fromIndex, int toIndex, long val) { EnsureCapacity(val); @@ -139,6 +147,7 @@ public override long RamBytesUsed() + current.RamBytesUsed(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Save(DataOutput @out) { current.Save(@out); diff --git a/src/Lucene.Net/Util/Packed/MonotonicAppendingLongBuffer.cs b/src/Lucene.Net/Util/Packed/MonotonicAppendingLongBuffer.cs index 869d78a2d1..ffb8afb7fb 100644 --- a/src/Lucene.Net/Util/Packed/MonotonicAppendingLongBuffer.cs +++ b/src/Lucene.Net/Util/Packed/MonotonicAppendingLongBuffer.cs @@ -1,6 +1,7 @@ using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Packed { @@ -32,11 +33,13 @@ namespace Lucene.Net.Util.Packed /// public sealed class MonotonicAppendingInt64Buffer : AbstractAppendingInt64Buffer { + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static long ZigZagDecode(long n) { return (((long)((ulong)n >> 1)) ^ -(n & 1)); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static long ZigZagEncode(long n) { return (n >> 63) ^ (n << 1); @@ -128,6 +131,7 @@ internal override int Get(int block, int element, long[] arr, int off, int len) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal override void Grow(int newBlockCount) { base.Grow(newBlockCount); @@ -175,11 +179,13 @@ internal override void PackPendingValues() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal override long BaseRamBytesUsed() { return base.BaseRamBytesUsed() + 2 * RamUsageEstimator.NUM_BYTES_OBJECT_REF; // 2 additional arrays } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long RamBytesUsed() { return base.RamBytesUsed() + RamUsageEstimator.SizeOf(averages) + RamUsageEstimator.SizeOf(minValues); diff --git a/src/Lucene.Net/Util/Packed/MonotonicBlockPackedWriter.cs b/src/Lucene.Net/Util/Packed/MonotonicBlockPackedWriter.cs index 5176fbd9a2..6ba982b565 100644 --- a/src/Lucene.Net/Util/Packed/MonotonicBlockPackedWriter.cs +++ b/src/Lucene.Net/Util/Packed/MonotonicBlockPackedWriter.cs @@ -64,6 +64,7 @@ public MonotonicBlockPackedWriter(DataOutput @out, int blockSize) { } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Add(long l) { if (Debugging.AssertsEnabled) Debugging.Assert(l >= 0); diff --git a/src/Lucene.Net/Util/Packed/Packed16ThreeBlocks.cs b/src/Lucene.Net/Util/Packed/Packed16ThreeBlocks.cs index 77b2af7c16..a19570ee0a 100644 --- a/src/Lucene.Net/Util/Packed/Packed16ThreeBlocks.cs +++ b/src/Lucene.Net/Util/Packed/Packed16ThreeBlocks.cs @@ -1,6 +1,7 @@ using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; +using System.Runtime.CompilerServices; // this file has been automatically generated, DO NOT EDIT @@ -61,6 +62,7 @@ internal Packed16ThreeBlocks(int packedIntsVersion, DataInput @in, int valueCoun } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long Get(int index) { int o = index * 3; @@ -125,6 +127,7 @@ public override void Fill(int fromIndex, int toIndex, long val) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Clear() { Arrays.Fill(blocks, (short)0); diff --git a/src/Lucene.Net/Util/Packed/Packed64.cs b/src/Lucene.Net/Util/Packed/Packed64.cs index 3d62878ef0..9c21f70765 100644 --- a/src/Lucene.Net/Util/Packed/Packed64.cs +++ b/src/Lucene.Net/Util/Packed/Packed64.cs @@ -2,6 +2,7 @@ using Lucene.Net.Support; using System; using System.IO; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Packed { @@ -338,6 +339,7 @@ private static int Gcd(int a, int b) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Clear() { Arrays.Fill(blocks, 0L); diff --git a/src/Lucene.Net/Util/Packed/Packed64SingleBlock.cs b/src/Lucene.Net/Util/Packed/Packed64SingleBlock.cs index 30e5362fda..29de9190b5 100644 --- a/src/Lucene.Net/Util/Packed/Packed64SingleBlock.cs +++ b/src/Lucene.Net/Util/Packed/Packed64SingleBlock.cs @@ -1,6 +1,7 @@ using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; +using System.Runtime.CompilerServices; // this file has been automatically generated, DO NOT EDIT @@ -35,11 +36,13 @@ internal abstract class Packed64SingleBlock : PackedInt32s.MutableImpl public const int MAX_SUPPORTED_BITS_PER_VALUE = 32; private static readonly int[] SUPPORTED_BITS_PER_VALUE = new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 16, 21, 32 }; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool IsSupported(int bitsPerValue) { return Array.BinarySearch(SUPPORTED_BITS_PER_VALUE, bitsPerValue) >= 0; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static int RequiredCapacity(int valueCount, int valuesPerBlock) { return valueCount / valuesPerBlock + (valueCount % valuesPerBlock == 0 ? 0 : 1); @@ -55,6 +58,7 @@ internal Packed64SingleBlock(int valueCount, int bitsPerValue) blocks = new long[RequiredCapacity(valueCount, valuesPerBlock)]; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Clear() { Arrays.Fill(blocks, 0L); diff --git a/src/Lucene.Net/Util/Packed/Packed8ThreeBlocks.cs b/src/Lucene.Net/Util/Packed/Packed8ThreeBlocks.cs index 72f4da6672..4ea8619304 100644 --- a/src/Lucene.Net/Util/Packed/Packed8ThreeBlocks.cs +++ b/src/Lucene.Net/Util/Packed/Packed8ThreeBlocks.cs @@ -1,6 +1,7 @@ using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; +using System.Runtime.CompilerServices; // this file has been automatically generated, DO NOT EDIT @@ -32,7 +33,7 @@ namespace Lucene.Net.Util.Packed /// internal sealed class Packed8ThreeBlocks : PackedInt32s.MutableImpl { - readonly byte[] blocks; + private readonly byte[] blocks; public const int MAX_SIZE = int.MaxValue / 3; @@ -58,6 +59,7 @@ internal Packed8ThreeBlocks(int packedIntsVersion, DataInput @in, int valueCount } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long Get(int index) { int o = index * 3; @@ -122,6 +124,7 @@ public override void Fill(int fromIndex, int toIndex, long val) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Clear() { Arrays.Fill(blocks, (byte)0); diff --git a/src/Lucene.Net/Util/Packed/PackedDataInput.cs b/src/Lucene.Net/Util/Packed/PackedDataInput.cs index 005fe259f5..3a64ffd039 100644 --- a/src/Lucene.Net/Util/Packed/PackedDataInput.cs +++ b/src/Lucene.Net/Util/Packed/PackedDataInput.cs @@ -1,6 +1,6 @@ using Lucene.Net.Diagnostics; using System; -using System.Globalization; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Packed { @@ -74,6 +74,7 @@ public long ReadInt64(int bitsPerValue) /// If there are pending bits (at most 7), they will be ignored and the next /// value will be read starting at the next byte. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void SkipToNextByte() { remainingBits = 0; diff --git a/src/Lucene.Net/Util/Packed/PackedInts.cs b/src/Lucene.Net/Util/Packed/PackedInts.cs index 7f4dd71070..579d963147 100644 --- a/src/Lucene.Net/Util/Packed/PackedInts.cs +++ b/src/Lucene.Net/Util/Packed/PackedInts.cs @@ -3,8 +3,8 @@ using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Globalization; using System.IO; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Packed { @@ -41,7 +41,7 @@ namespace Lucene.Net.Util.Packed /// /// @lucene.internal /// - public class PackedInt32s + public static class PackedInt32s // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { /// /// At most 700% memory overhead, always select a direct implementation. @@ -122,6 +122,7 @@ public PackedSingleBlockFormat() /// /// NOTE: This was longCount() in Lucene. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override int Int64Count(int packedIntsVersion, int valueCount, int bitsPerValue) { int valuesPerBlock = 64 / bitsPerValue; @@ -132,6 +133,7 @@ public override int Int64Count(int packedIntsVersion, int valueCount, int bitsPe /// Tests whether the provided number of bits per value is supported by the /// format. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override bool IsSupported(int bitsPerValue) { return Packed64SingleBlock.IsSupported(bitsPerValue); @@ -189,6 +191,7 @@ public static Format ById(int id) throw new ArgumentException("Unknown format id: " + id); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal Format(int id) { this.Id = id; @@ -203,6 +206,7 @@ internal Format(int id) /// Computes how many blocks are needed to store /// values of size . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual long ByteCount(int packedIntsVersion, int valueCount, int bitsPerValue) { if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerValue >= 0 && bitsPerValue <= 64, "{0}", bitsPerValue); @@ -231,6 +235,7 @@ public virtual int Int64Count(int packedIntsVersion, int valueCount, int bitsPer /// Tests whether the provided number of bits per value is supported by the /// format. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual bool IsSupported(int bitsPerValue) { return bitsPerValue >= 1 && bitsPerValue <= 64; @@ -239,6 +244,7 @@ public virtual bool IsSupported(int bitsPerValue) /// /// Returns the overhead per value, in bits. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual float OverheadPerValue(int bitsPerValue) { if (Debugging.AssertsEnabled) Debugging.Assert(IsSupported(bitsPerValue)); @@ -248,6 +254,7 @@ public virtual float OverheadPerValue(int bitsPerValue) /// /// Returns the overhead ratio (overhead per value / bits per value). /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual float OverheadRatio(int bitsPerValue) { if (Debugging.AssertsEnabled) Debugging.Assert(IsSupported(bitsPerValue)); @@ -707,6 +714,7 @@ public virtual void Fill(int fromIndex, int toIndex, long val) /// /// Sets all values to 0. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void Clear() { Fill(0, Count, 0); @@ -787,6 +795,7 @@ public NullReader(int valueCount) this.valueCount = valueCount; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long Get(int index) { return 0; @@ -808,6 +817,7 @@ public override int Get(int index, long[] arr, int off, int len) public override int Count => valueCount; + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long RamBytesUsed() { return RamUsageEstimator.AlignObjectSize(RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + RamUsageEstimator.NUM_BYTES_INT32); @@ -876,6 +886,7 @@ internal virtual void WriteHeader() /// The compatibility version. /// The number of bits per value. /// A decoder. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static IDecoder GetDecoder(Format format, int version, int bitsPerValue) { CheckVersion(version); @@ -889,6 +900,7 @@ public static IDecoder GetDecoder(Format format, int version, int bitsPerValue) /// The compatibility version. /// The number of bits per value. /// An encoder. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static IEncoder GetEncoder(Format format, int version, int bitsPerValue) { CheckVersion(version); @@ -969,6 +981,7 @@ public static Reader GetReaderNoHeader(DataInput @in, Format format, int version /// A . /// If there is a low-level I/O error. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Reader GetReaderNoHeader(DataInput @in, Header header) { return GetReaderNoHeader(@in, header.format, header.version, header.valueCount, header.bitsPerValue); @@ -1009,6 +1022,7 @@ public static Reader GetReader(DataInput @in) /// How much memory the iterator is allowed to use to read-ahead (likely to speed up iteration). /// A . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static IReaderIterator GetReaderIteratorNoHeader(DataInput @in, Format format, int version, int valueCount, int bitsPerValue, int mem) { CheckVersion(version); @@ -1129,6 +1143,7 @@ public override long Get(int index) /// A . /// If there is a low-level I/O error. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Reader GetDirectReaderNoHeader(IndexInput @in, Header header) { return GetDirectReaderNoHeader(@in, header.format, header.version, header.valueCount, header.bitsPerValue); @@ -1176,6 +1191,7 @@ public static Reader GetDirectReader(IndexInput @in) /// An acceptable overhead /// ratio per value. /// A mutable packed integer array. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Mutable GetMutable(int valueCount, int bitsPerValue, float acceptableOverheadRatio) { FormatAndBits formatAndBits = FastestFormatAndBits(valueCount, bitsPerValue, acceptableOverheadRatio); @@ -1277,6 +1293,7 @@ public static Mutable GetMutable(int valueCount, int bitsPerValue, PackedInt32s. /// A . /// /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Writer GetWriterNoHeader(DataOutput @out, Format format, int valueCount, int bitsPerValue, int mem) { return new PackedWriter(format, @out, valueCount, bitsPerValue, mem); @@ -1350,6 +1367,7 @@ public static int BitsRequired(long maxValue) /// /// The number of bits available for any given value. /// The maximum value for the given bits. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long MaxValue(int bitsPerValue) { return bitsPerValue == 64 ? long.MaxValue : ~(~0L << bitsPerValue); @@ -1385,6 +1403,7 @@ public static void Copy(Reader src, int srcPos, Mutable dest, int destPos, int l /// /// Same as but using a pre-allocated buffer. + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void Copy(Reader src, int srcPos, Mutable dest, int destPos, int len, long[] buf) { if (Debugging.AssertsEnabled) Debugging.Assert(buf.Length > 0); @@ -1456,6 +1475,7 @@ public Header(Format format, int valueCount, int bitsPerValue, int version) /// Check that the block size is a power of 2, in the right bounds, and return /// its log in base 2. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static int CheckBlockSize(int blockSize, int minBlockSize, int maxBlockSize) { if (blockSize < minBlockSize || blockSize > maxBlockSize) @@ -1473,6 +1493,7 @@ internal static int CheckBlockSize(int blockSize, int minBlockSize, int maxBlock /// Return the number of blocks required to store values on /// . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static int NumBlocks(long size, int blockSize) { int numBlocks = (int)(size / blockSize) + (size % blockSize == 0 ? 0 : 1); diff --git a/src/Lucene.Net/Util/Packed/PackedReaderIterator.cs b/src/Lucene.Net/Util/Packed/PackedReaderIterator.cs index 71d388d529..e87c020cf5 100644 --- a/src/Lucene.Net/Util/Packed/PackedReaderIterator.cs +++ b/src/Lucene.Net/Util/Packed/PackedReaderIterator.cs @@ -2,6 +2,7 @@ using Lucene.Net.Support; using System; using System.IO; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Packed { @@ -48,6 +49,7 @@ internal PackedReaderIterator(PackedInt32s.Format format, int packedIntsVersion, position = -1; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private int Iterations(int mem) { int iterations = bulkOperation.ComputeIterations(m_valueCount, mem); diff --git a/src/Lucene.Net/Util/Packed/PackedWriter.cs b/src/Lucene.Net/Util/Packed/PackedWriter.cs index ee71f34f7b..bafb449a5a 100644 --- a/src/Lucene.Net/Util/Packed/PackedWriter.cs +++ b/src/Lucene.Net/Util/Packed/PackedWriter.cs @@ -1,6 +1,5 @@ using Lucene.Net.Diagnostics; using Lucene.Net.Support; -using System.Globalization; using System.IO; using System.Runtime.CompilerServices; diff --git a/src/Lucene.Net/Util/Packed/PagedGrowableWriter.cs b/src/Lucene.Net/Util/Packed/PagedGrowableWriter.cs index fbc85ffe2c..e0622cad9c 100644 --- a/src/Lucene.Net/Util/Packed/PagedGrowableWriter.cs +++ b/src/Lucene.Net/Util/Packed/PagedGrowableWriter.cs @@ -1,3 +1,5 @@ +using System.Runtime.CompilerServices; + namespace Lucene.Net.Util.Packed { /* @@ -55,16 +57,19 @@ internal PagedGrowableWriter(long size, int pageSize, int startBitsPerValue, flo } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override Mutable NewMutable(int valueCount, int bitsPerValue) { return new GrowableWriter(bitsPerValue, valueCount, acceptableOverheadRatio); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override PagedGrowableWriter NewUnfilledCopy(long newSize) { return new PagedGrowableWriter(newSize, PageSize, bitsPerValue, acceptableOverheadRatio, false); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override long BaseRamBytesUsed() { return base.BaseRamBytesUsed() + RamUsageEstimator.NUM_BYTES_SINGLE; diff --git a/src/Lucene.Net/Util/Packed/PagedMutable.cs b/src/Lucene.Net/Util/Packed/PagedMutable.cs index 2a08f45df1..40f4649b68 100644 --- a/src/Lucene.Net/Util/Packed/PagedMutable.cs +++ b/src/Lucene.Net/Util/Packed/PagedMutable.cs @@ -1,4 +1,5 @@ using Lucene.Net.Diagnostics; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Packed { @@ -56,17 +57,20 @@ internal PagedMutable(long size, int pageSize, int bitsPerValue, PackedInt32s.Fo this.format = format; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override Mutable NewMutable(int valueCount, int bitsPerValue) { if (Debugging.AssertsEnabled) Debugging.Assert(this.bitsPerValue >= bitsPerValue); return PackedInt32s.GetMutable(valueCount, this.bitsPerValue, format); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override PagedMutable NewUnfilledCopy(long newSize) { return new PagedMutable(newSize, PageSize, bitsPerValue, format); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected override long BaseRamBytesUsed() { return base.BaseRamBytesUsed() + RamUsageEstimator.NUM_BYTES_OBJECT_REF; diff --git a/src/Lucene.Net/Util/PagedBytes.cs b/src/Lucene.Net/Util/PagedBytes.cs index ba267c61fc..6436e63ed6 100644 --- a/src/Lucene.Net/Util/PagedBytes.cs +++ b/src/Lucene.Net/Util/PagedBytes.cs @@ -2,7 +2,7 @@ using Lucene.Net.Support; using System; using System.Collections.Generic; -using System.Globalization; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -281,6 +281,7 @@ public long GetPointer() /// /// Return approx RAM usage in bytes. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public long RamBytesUsed() { return (blocks.Count + (currentBlock != null ? 1 : 0)) * bytesUsedPerBlock; @@ -353,6 +354,7 @@ public override object Clone() /// /// Returns the current byte position. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public long GetPosition() { return (long)currentBlockIndex * outerInstance.blockSize + currentBlockUpto; @@ -402,6 +404,7 @@ public override void ReadBytes(byte[] b, int offset, int len) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void NextBlock() { currentBlockIndex++; @@ -479,6 +482,7 @@ public override void WriteBytes(byte[] b, int offset, int length) /// /// Return the current byte position. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public long GetPosition() { return outerInstance.GetPointer(); @@ -504,6 +508,7 @@ public PagedBytesDataInput GetDataInput() /// not call the other writing methods (eg, copy); /// results are undefined. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public PagedBytesDataOutput GetDataOutput() { if (frozen) diff --git a/src/Lucene.Net/Util/PrintStreamInfoStream.cs b/src/Lucene.Net/Util/PrintStreamInfoStream.cs index d6dcaba514..fb1a1b4b96 100644 --- a/src/Lucene.Net/Util/PrintStreamInfoStream.cs +++ b/src/Lucene.Net/Util/PrintStreamInfoStream.cs @@ -2,7 +2,7 @@ using Lucene.Net.Support.IO; using System; using System.IO; -using System.Reflection; +using System.Runtime.CompilerServices; using System.Threading; using Console = Lucene.Net.Util.SystemConsole; @@ -72,11 +72,13 @@ public TextWriterInfoStream(TextWriter stream, int messageID) this.m_messageID = messageID; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Message(string component, string message) { m_stream.Write(component + " " + m_messageID + " [" + DateTime.Now + "; " + Thread.CurrentThread.Name + "]: " + message); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override bool IsEnabled(string component) { return true; diff --git a/src/Lucene.Net/Util/PriorityQueue.cs b/src/Lucene.Net/Util/PriorityQueue.cs index 7085b501bb..f2270a3966 100644 --- a/src/Lucene.Net/Util/PriorityQueue.cs +++ b/src/Lucene.Net/Util/PriorityQueue.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -145,6 +146,7 @@ protected PriorityQueue(int maxSize, bool prepopulate) // LUCENENET specific - m /// /// The sentinel object to use to pre-populate the queue, or null if /// sentinel objects are not supported. + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected virtual T GetSentinelObject() { return default; @@ -260,6 +262,7 @@ public T Pop() /// /// /// The new 'top' element. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public T UpdateTop() { DownHeap(); @@ -274,6 +277,7 @@ public T UpdateTop() /// /// Removes all entries from the . + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Clear() { for (int i = 0; i <= size; i++) diff --git a/src/Lucene.Net/Util/QueryBuilder.cs b/src/Lucene.Net/Util/QueryBuilder.cs index ab9e73059e..85905b6fa5 100644 --- a/src/Lucene.Net/Util/QueryBuilder.cs +++ b/src/Lucene.Net/Util/QueryBuilder.cs @@ -4,6 +4,7 @@ using System; using System.Collections.Generic; using System.IO; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -70,6 +71,7 @@ public QueryBuilder(Analyzer analyzer) /// Text to be passed to the analyzer. /// or , based on the analysis /// of . + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual Query CreateBooleanQuery(string field, string queryText) { return CreateBooleanQuery(field, queryText, Occur.SHOULD); @@ -100,6 +102,7 @@ public virtual Query CreateBooleanQuery(string field, string queryText, Occur @o /// Text to be passed to the analyzer. /// , , , or /// , based on the analysis of . + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual Query CreatePhraseQuery(string field, string queryText) { return CreatePhraseQuery(field, queryText, 0); @@ -113,6 +116,7 @@ public virtual Query CreatePhraseQuery(string field, string queryText) /// number of other words permitted between words in query phrase /// , , , or /// , based on the analysis of . + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual Query CreatePhraseQuery(string field, string queryText, int phraseSlop) { return CreateFieldQuery(analyzer, Occur.MUST, field, queryText, true, phraseSlop); @@ -140,9 +144,8 @@ public virtual Query CreateMinShouldMatchQuery(string field, string queryText, f } Query query = CreateFieldQuery(analyzer, Occur.SHOULD, field, queryText, false, 0); - if (query is BooleanQuery) + if (query is BooleanQuery bq) { - BooleanQuery bq = (BooleanQuery)query; bq.MinimumNumberShouldMatch = (int)(fraction * bq.Clauses.Count); } return query; @@ -195,7 +198,7 @@ protected Query CreateFieldQuery(Analyzer analyzer, Occur @operator, string fiel int numTokens = 0; int positionCount = 0; bool severalTokensAtSamePosition = false; - bool hasMoreTokens = false; + bool hasMoreTokens/* = false*/; // LUCENENET: IDE0059: Remove unnecessary value assignment TokenStream source = null; try @@ -439,6 +442,7 @@ protected Query CreateFieldQuery(Analyzer analyzer, Occur @operator, string fiel /// /// Disable coord. /// New instance. + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected virtual BooleanQuery NewBooleanQuery(bool disableCoord) { return new BooleanQuery(disableCoord); @@ -451,6 +455,7 @@ protected virtual BooleanQuery NewBooleanQuery(bool disableCoord) /// /// Term. /// New instance. + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected virtual Query NewTermQuery(Term term) { return new TermQuery(term); @@ -462,6 +467,7 @@ protected virtual Query NewTermQuery(Term term) /// This is intended for subclasses that wish to customize the generated queries. /// /// New instance. + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected virtual PhraseQuery NewPhraseQuery() { return new PhraseQuery(); @@ -473,6 +479,7 @@ protected virtual PhraseQuery NewPhraseQuery() /// This is intended for subclasses that wish to customize the generated queries. /// /// New instance. + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected virtual MultiPhraseQuery NewMultiPhraseQuery() { return new MultiPhraseQuery(); diff --git a/src/Lucene.Net/Util/RamUsageEstimator.cs b/src/Lucene.Net/Util/RamUsageEstimator.cs index 4928e99bd6..6252b310a2 100644 --- a/src/Lucene.Net/Util/RamUsageEstimator.cs +++ b/src/Lucene.Net/Util/RamUsageEstimator.cs @@ -283,6 +283,7 @@ public ClassCache(long alignedShallowInstanceSize, FieldInfo[] referenceFields) /// /// Aligns an object size to be the next multiple of . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long AlignObjectSize(long size) { size += (long)NUM_BYTES_OBJECT_ALIGNMENT - 1L; @@ -292,6 +293,7 @@ public static long AlignObjectSize(long size) /// /// Returns the size in bytes of the object. // LUCENENET specific overload for CLS compliance + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long SizeOf(byte[] arr) { return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + arr.Length); @@ -300,6 +302,7 @@ public static long SizeOf(byte[] arr) /// /// Returns the size in bytes of the object. [CLSCompliant(false)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long SizeOf(sbyte[] arr) { return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + arr.Length); @@ -307,6 +310,7 @@ public static long SizeOf(sbyte[] arr) /// /// Returns the size in bytes of the object. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long SizeOf(bool[] arr) { return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + arr.Length); @@ -314,6 +318,7 @@ public static long SizeOf(bool[] arr) /// /// Returns the size in bytes of the object. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long SizeOf(char[] arr) { return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + (long)NUM_BYTES_CHAR * arr.Length); @@ -321,6 +326,7 @@ public static long SizeOf(char[] arr) /// /// Returns the size in bytes of the object. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long SizeOf(short[] arr) { return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + (long)NUM_BYTES_INT16 * arr.Length); @@ -328,6 +334,7 @@ public static long SizeOf(short[] arr) /// /// Returns the size in bytes of the object. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long SizeOf(int[] arr) { return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + (long)NUM_BYTES_INT32 * arr.Length); @@ -335,6 +342,7 @@ public static long SizeOf(int[] arr) /// /// Returns the size in bytes of the object. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long SizeOf(float[] arr) { return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + (long)NUM_BYTES_SINGLE * arr.Length); @@ -342,6 +350,7 @@ public static long SizeOf(float[] arr) /// /// Returns the size in bytes of the object. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long SizeOf(long[] arr) { return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + (long)NUM_BYTES_INT64 * arr.Length); @@ -349,6 +358,7 @@ public static long SizeOf(long[] arr) /// /// Returns the size in bytes of the object. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long SizeOf(double[] arr) { return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + (long)NUM_BYTES_DOUBLE * arr.Length); @@ -357,6 +367,7 @@ public static long SizeOf(double[] arr) /// /// Returns the size in bytes of the object. [CLSCompliant(false)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long SizeOf(ulong[] arr) { return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + (long)NUM_BYTES_INT64 * arr.Length); @@ -365,6 +376,7 @@ public static long SizeOf(ulong[] arr) /// /// Returns the size in bytes of the object. [CLSCompliant(false)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long SizeOf(uint[] arr) { return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + (long)NUM_BYTES_INT32 * arr.Length); @@ -373,6 +385,7 @@ public static long SizeOf(uint[] arr) /// /// Returns the size in bytes of the object. [CLSCompliant(false)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long SizeOf(ushort[] arr) { return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + (long)NUM_BYTES_INT16 * arr.Length); @@ -387,6 +400,7 @@ public static long SizeOf(ushort[] arr) /// (it isn't side-effect free). After the method exits, this memory /// should be GCed. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static long SizeOf(object obj) { return MeasureObjectSize(obj); @@ -593,6 +607,7 @@ private static long MeasureObjectSize(object root) /// Create a cached information about shallow size and reference fields for /// a given class. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static ClassCache CreateCacheEntry(Type clazz) { ClassCache cachedInfo; @@ -631,6 +646,7 @@ private static ClassCache CreateCacheEntry(Type clazz) /// The returned offset will be the maximum of whatever was measured so far and /// field's offset and representation size (unaligned). /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static long AdjustForField(long sizeSoFar, FieldInfo f) { Type type = f.FieldType; @@ -663,6 +679,7 @@ private static long AdjustForField(long sizeSoFar, FieldInfo f) /// /// Returns size in human-readable units (GB, MB, KB or bytes). /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static string HumanReadableUnits(long bytes) { return HumanReadableUnits(bytes, new NumberFormatInfo() { NumberDecimalDigits = 1 }); @@ -695,6 +712,7 @@ public static string HumanReadableUnits(long bytes, IFormatProvider df) /// Return a human-readable size of a given object. /// /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static string HumanSizeOf(object @object) { return HumanReadableUnits(SizeOf(@object)); @@ -704,6 +722,7 @@ public static string HumanSizeOf(object @object) /// Return a human-readable size of a given object. /// /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static string HumanSizeOf(object @object, IFormatProvider df) { return HumanReadableUnits(SizeOf(@object), df); @@ -895,6 +914,7 @@ private void ExpandAndRehash() /// /// /// New capacity (must be a power of two). + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void AllocateBuffers(int capacity) { this.keys = new object[capacity]; @@ -904,7 +924,7 @@ private void AllocateBuffers(int capacity) /// /// Return the next possible capacity, counting from the current buffers' size. /// - private int NextCapacity(int current) // LUCENENET NOTE: made private, since protected is not valid in a sealed class + private static int NextCapacity(int current) // LUCENENET NOTE: made private, since protected is not valid in a sealed class // LUCENENET: CA1822: Mark members as static { if (Debugging.AssertsEnabled) { @@ -939,6 +959,7 @@ private int RoundCapacity(int requestedCapacity) // LUCENENET NOTE: made private return capacity; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Clear() { Assigned = 0; @@ -955,6 +976,7 @@ public void Clear() // } //} + [MethodImpl(MethodImplOptions.AggressiveInlining)] public IEnumerator GetEnumerator() { return new IteratorAnonymousInnerClassHelper(this); diff --git a/src/Lucene.Net/Util/RefCount.cs b/src/Lucene.Net/Util/RefCount.cs index fdc1e5ad25..9750997fe5 100644 --- a/src/Lucene.Net/Util/RefCount.cs +++ b/src/Lucene.Net/Util/RefCount.cs @@ -1,5 +1,6 @@ using J2N.Threading.Atomic; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -74,6 +75,7 @@ public void DecRef() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public T Get() { return m_object; @@ -81,6 +83,7 @@ public T Get() /// /// Returns the current reference count. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public int GetRefCount() // LUCENENET NOTE: although this would be a good candidate for a property, doing so would cause a naming conflict { return refCount; @@ -90,6 +93,7 @@ public T Get() /// Increments the reference count. Calls to this method must be matched with /// calls to . /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public void IncRef() { refCount.IncrementAndGet(); diff --git a/src/Lucene.Net/Util/RollingBuffer.cs b/src/Lucene.Net/Util/RollingBuffer.cs index 8ee93d1c7f..84539c2ae3 100644 --- a/src/Lucene.Net/Util/RollingBuffer.cs +++ b/src/Lucene.Net/Util/RollingBuffer.cs @@ -1,5 +1,6 @@ using Lucene.Net.Diagnostics; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -97,6 +98,7 @@ private bool InBounds(int pos) return pos < nextPos && pos >= nextPos - count; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private int GetIndex(int pos) { int index = nextWrite - (nextPos - pos); diff --git a/src/Lucene.Net/Util/SPIClassIterator.cs b/src/Lucene.Net/Util/SPIClassIterator.cs index 5439a51ab7..1829817329 100644 --- a/src/Lucene.Net/Util/SPIClassIterator.cs +++ b/src/Lucene.Net/Util/SPIClassIterator.cs @@ -1,7 +1,7 @@ using System; using System.Collections.Generic; using System.Linq; -using System.Reflection; +using System.Runtime.CompilerServices; using JCG = J2N.Collections.Generic; namespace Lucene.Net.Util @@ -113,21 +113,25 @@ private static JCG.HashSet LoadTypes() // LUCENENET: Avoid static construc return types; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static bool IsInvokableSubclassOf(Type type) { return typeof(T).IsAssignableFrom(type) && !type.IsAbstract && !type.IsInterface; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static SPIClassIterator Get() { return new SPIClassIterator(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public IEnumerator GetEnumerator() { return types.GetEnumerator(); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return GetEnumerator(); diff --git a/src/Lucene.Net/Util/SentinelIntSet.cs b/src/Lucene.Net/Util/SentinelIntSet.cs index 44bca36259..99f2990a02 100644 --- a/src/Lucene.Net/Util/SentinelIntSet.cs +++ b/src/Lucene.Net/Util/SentinelIntSet.cs @@ -2,6 +2,7 @@ using Lucene.Net.Support; using System; using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -87,6 +88,7 @@ public SentinelInt32Set(int size, int emptyVal) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual void Clear() { Arrays.Fill(keys, EmptyVal); @@ -97,6 +99,7 @@ public virtual void Clear() /// (internal) Return the hash for the key. The default implementation just returns the key, /// which is not appropriate for general purpose use. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual int Hash(int key) { return key; @@ -163,6 +166,7 @@ public virtual int Find(int key) /// /// Does this set contain the specified integer? + [MethodImpl(MethodImplOptions.AggressiveInlining)] public virtual bool Exists(int key) { return Find(key) >= 0; diff --git a/src/Lucene.Net/Util/SetOnce.cs b/src/Lucene.Net/Util/SetOnce.cs index a0b4317448..1c7bcb1d9a 100644 --- a/src/Lucene.Net/Util/SetOnce.cs +++ b/src/Lucene.Net/Util/SetOnce.cs @@ -1,5 +1,6 @@ using J2N.Threading.Atomic; using System; +using System.Runtime.CompilerServices; #if FEATURE_SERIALIZABLE_EXCEPTIONS using System.Runtime.Serialization; #endif @@ -79,11 +80,13 @@ public void Set(T obj) /// /// Returns the object set by . + [MethodImpl(MethodImplOptions.AggressiveInlining)] public T Get() { return obj; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public object Clone() { return obj == null ? new SetOnce() : new SetOnce(obj); diff --git a/src/Lucene.Net/Util/SmallFloat.cs b/src/Lucene.Net/Util/SmallFloat.cs index 30a945fbff..cca427e664 100644 --- a/src/Lucene.Net/Util/SmallFloat.cs +++ b/src/Lucene.Net/Util/SmallFloat.cs @@ -1,4 +1,5 @@ using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -40,6 +41,7 @@ public static class SmallSingle // LUCENENET specific - made static /// The zero-point in the range of exponent values. /// The 8 bit float representation. // LUCENENET specific overload for CLS compliance + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static byte SingleToByte(float f, int numMantissaBits, int zeroExp) { return (byte)SingleToSByte(f, numMantissaBits, zeroExp); @@ -86,6 +88,7 @@ public static sbyte SingleToSByte(float f, int numMantissaBits, int zeroExp) /// NOTE: This was byteToFloat() in Lucene /// // LUCENENET specific overload for CLS compliance + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static float ByteToSingle(byte b, int numMantissaBits, int zeroExp) { return SByteToSingle((sbyte)b, numMantissaBits, zeroExp); @@ -125,6 +128,7 @@ public static float SByteToSingle(sbyte b, int numMantissaBits, int zeroExp) /// NOTE: This was floatToByte315() in Lucene /// // LUCENENET specific overload for CLS compliance + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static byte SingleToByte315(float f) { return (byte)SingleToSByte315(f); @@ -160,6 +164,7 @@ public static sbyte SingleToSByte315(float f) /// NOTE: This was byte315ToFloat() in Lucene /// // LUCENENET specific overload for CLS compliance + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static float Byte315ToSingle(byte b) { return SByte315ToSingle((sbyte)b); @@ -193,6 +198,7 @@ public static float SByte315ToSingle(sbyte b) /// NOTE: This was floatToByte52() in Lucene /// // LUCENENET specific overload for CLS compliance + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static byte SingleToByte52(float f) { return (byte)SingleToSByte315(f); @@ -228,6 +234,7 @@ public static sbyte SingleToSByte52(float f) /// NOTE: This was byte52ToFloat() in Lucene /// // LUCENENET specific overload for CLS compliance + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static float Byte52ToSingle(byte b) { return SByte52ToSingle((sbyte)b); diff --git a/src/Lucene.Net/Util/Sorter.cs b/src/Lucene.Net/Util/Sorter.cs index 9511d4fcbc..3c116d0aea 100644 --- a/src/Lucene.Net/Util/Sorter.cs +++ b/src/Lucene.Net/Util/Sorter.cs @@ -1,5 +1,6 @@ using Lucene.Net.Diagnostics; using System; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -52,6 +53,7 @@ protected Sorter() /// public abstract void Sort(int from, int to); + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void CheckRange(int from, int to) { if (to < from) @@ -60,6 +62,7 @@ internal virtual void CheckRange(int from, int to) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void MergeInPlace(int from, int mid, int to) { if (from == mid || mid == to || Compare(mid - 1, mid) <= 0) @@ -101,6 +104,7 @@ internal virtual void MergeInPlace(int from, int mid, int to) MergeInPlace(new_mid, second_cut, to); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual int Lower(int from, int to, int val) { int len = to - from; @@ -121,6 +125,7 @@ internal virtual int Lower(int from, int to, int val) return from; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual int Upper(int from, int to, int val) { int len = to - from; @@ -142,6 +147,7 @@ internal virtual int Upper(int from, int to, int val) } // faster than lower when val is at the end of [from:to[ + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual int Lower2(int from, int to, int val) { int f = to - 1, t = to; @@ -159,6 +165,7 @@ internal virtual int Lower2(int from, int to, int val) } // faster than upper when val is at the beginning of [from:to[ + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual int Upper2(int from, int to, int val) { int f = from, t = f + 1; @@ -175,6 +182,7 @@ internal virtual int Upper2(int from, int to, int val) return Upper(f, to, val); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal void Reverse(int from, int to) { for (--to; from < to; ++from, --to) @@ -183,6 +191,7 @@ internal void Reverse(int from, int to) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal void Rotate(int lo, int mid, int hi) { if (Debugging.AssertsEnabled) Debugging.Assert(lo <= mid && mid <= hi); @@ -193,6 +202,7 @@ internal void Rotate(int lo, int mid, int hi) DoRotate(lo, mid, hi); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void DoRotate(int lo, int mid, int hi) { if (mid - lo == hi - mid) @@ -211,6 +221,7 @@ internal virtual void DoRotate(int lo, int mid, int hi) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void InsertionSort(int from, int to) { for (int i = from + 1; i < to; ++i) @@ -229,11 +240,13 @@ internal virtual void InsertionSort(int from, int to) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void BinarySort(int from, int to) { BinarySort(from, to, from + 1); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void BinarySort(int from, int to, int i) { for (; i < to; ++i) @@ -277,6 +290,7 @@ internal virtual void BinarySort(int from, int to, int i) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void HeapSort(int from, int to) { if (to - from <= 1) @@ -291,6 +305,7 @@ internal virtual void HeapSort(int from, int to) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void Heapify(int from, int to) { for (int i = HeapParent(from, to - 1); i >= from; --i) @@ -299,6 +314,7 @@ internal virtual void Heapify(int from, int to) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void SiftDown(int i, int from, int to) { for (int leftChild = HeapChild(from, i); leftChild < to; leftChild = HeapChild(from, i)) @@ -329,11 +345,13 @@ internal virtual void SiftDown(int i, int from, int to) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static int HeapParent(int from, int i) { return ((int)((uint)(i - 1 - from) >> 1)) + from; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static int HeapChild(int from, int i) { return ((i - from) << 1) + 1 + from; diff --git a/src/Lucene.Net/Util/StringHelper.cs b/src/Lucene.Net/Util/StringHelper.cs index eb2e500901..862efbe8fa 100644 --- a/src/Lucene.Net/Util/StringHelper.cs +++ b/src/Lucene.Net/Util/StringHelper.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Globalization; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -41,6 +42,7 @@ public static class StringHelper // LUCENENET specific - marked static and remov // denial of service attacks, and to catch any places that // somehow rely on hash function/order across JVM // instances: + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static int InitializeHashSeed() { // LUCENENET specific - reformatted with : @@ -128,6 +130,7 @@ public static int BytesDifference(this BytesRef left, BytesRef right) // LUCENEN return 0; }); + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool Equals(string s1, string s2) { if (s1 == null) @@ -150,6 +153,7 @@ public static bool Equals(string s1, string s2) /// The expected prefix /// Returns true if the starts with the given . /// Otherwise false. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool StartsWith(this BytesRef @ref, BytesRef prefix) // LUCENENET specific - converted to extension method { return SliceEquals(@ref, prefix, 0); @@ -165,11 +169,13 @@ public static bool StartsWith(this BytesRef @ref, BytesRef prefix) // LUCENENET /// The expected suffix /// Returns true if the ends with the given . /// Otherwise false. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool EndsWith(this BytesRef @ref, BytesRef suffix) // LUCENENET specific - converted to extension method { return SliceEquals(@ref, suffix, @ref.Length - suffix.Length); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static bool SliceEquals(this BytesRef sliceToTest, BytesRef other, int pos) // LUCENENET specific - converted to extension method { if (pos < 0 || sliceToTest.Length - pos < other.Length) @@ -255,6 +261,7 @@ public static int Murmurhash3_x86_32(byte[] data, int offset, int len, int seed) /// Returns the MurmurHash3_x86_32 hash. /// Original source/tests at https://github.com/yonik/java_util/. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static int Murmurhash3_x86_32(BytesRef bytes, int seed) { return Murmurhash3_x86_32(bytes.Bytes, bytes.Offset, bytes.Length, seed); diff --git a/src/Lucene.Net/Util/TimSorter.cs b/src/Lucene.Net/Util/TimSorter.cs index f615bea059..c950e3801d 100644 --- a/src/Lucene.Net/Util/TimSorter.cs +++ b/src/Lucene.Net/Util/TimSorter.cs @@ -65,6 +65,7 @@ protected TimSorter(int maxTempSlots) /// /// Minimum run length for an array of length . + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static int MinRun(int length) { if (Debugging.AssertsEnabled) Debugging.Assert(length >= MINRUN); @@ -80,27 +81,32 @@ internal static int MinRun(int length) return minRun; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual int RunLen(int i) { int off = stackSize - i; return runEnds[off] - runEnds[off - 1]; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual int RunBase(int i) { return runEnds[stackSize - i - 1]; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual int RunEnd(int i) // LUCENENET TODO: API - change to indexer { return runEnds[stackSize - i]; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void SetRunEnd(int i, int runEnd) { runEnds[stackSize - i] = runEnd; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void PushRunLen(int len) { runEnds[stackSize + 1] = runEnds[stackSize] + len; @@ -111,6 +117,7 @@ internal virtual void PushRunLen(int len) /// Compute the length of the next run, make the run sorted and return its /// length. /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual int NextRun() { int runBase = RunEnd(0); @@ -142,6 +149,7 @@ internal virtual int NextRun() return runHi - runBase; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void EnsureInvariants() { while (stackSize > 1) @@ -178,6 +186,7 @@ internal virtual void EnsureInvariants() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void ExhaustStack() { while (stackSize > 1) @@ -186,6 +195,7 @@ internal virtual void ExhaustStack() } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void Reset(int from, int to) { stackSize = 0; @@ -196,6 +206,7 @@ internal virtual void Reset(int from, int to) this.minRun = length <= THRESHOLD ? length : MinRun(length); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void MergeAt(int n) { if (Debugging.AssertsEnabled) Debugging.Assert(stackSize >= 2); @@ -252,6 +263,7 @@ public override void Sort(int from, int to) if (Debugging.AssertsEnabled) Debugging.Assert(RunEnd(0) == to); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal override void DoRotate(int lo, int mid, int hi) { int len1 = mid - lo; @@ -295,6 +307,7 @@ internal override void DoRotate(int lo, int mid, int hi) } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void MergeLo(int lo, int mid, int hi) { if (Debugging.AssertsEnabled) Debugging.Assert(Compare(lo, mid) > 0); @@ -338,6 +351,7 @@ internal virtual void MergeLo(int lo, int mid, int hi) if (Debugging.AssertsEnabled) Debugging.Assert(j == dest); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual void MergeHi(int lo, int mid, int hi) { if (Debugging.AssertsEnabled) Debugging.Assert(Compare(mid - 1, hi - 1) > 0); @@ -381,6 +395,7 @@ internal virtual void MergeHi(int lo, int mid, int hi) if (Debugging.AssertsEnabled) Debugging.Assert(i == dest); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual int LowerSaved(int from, int to, int val) { int len = to - from; @@ -401,6 +416,7 @@ internal virtual int LowerSaved(int from, int to, int val) return from; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual int UpperSaved(int from, int to, int val) { int len = to - from; @@ -422,6 +438,7 @@ internal virtual int UpperSaved(int from, int to, int val) } // faster than lowerSaved when val is at the beginning of [from:to[ + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual int LowerSaved3(int from, int to, int val) { int f = from, t = f + 1; @@ -439,6 +456,7 @@ internal virtual int LowerSaved3(int from, int to, int val) } //faster than upperSaved when val is at the end of [from:to[ + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal virtual int UpperSaved3(int from, int to, int val) { int f = to - 1, t = to; diff --git a/src/Lucene.Net/Util/VirtualMethod.cs b/src/Lucene.Net/Util/VirtualMethod.cs index 4448b48cf9..25511233f2 100644 --- a/src/Lucene.Net/Util/VirtualMethod.cs +++ b/src/Lucene.Net/Util/VirtualMethod.cs @@ -205,7 +205,7 @@ public static int CompareImplementationDistance(Type clazz, VirtualMethod m1, Vi return m1.GetImplementationDistance(clazz).CompareTo(m2.GetImplementationDistance(clazz)); } - private MethodInfo GetMethod(Type clazz, string methodName, BindingFlags bindingFlags, Type[] methodParameters) + private static MethodInfo GetMethod(Type clazz, string methodName, BindingFlags bindingFlags, Type[] methodParameters) // LUCENENET: CA1822: Mark members as static { #if FEATURE_TYPE_GETMETHOD__BINDINGFLAGS_PARAMS return clazz.GetMethod(methodName, bindingFlags, null, methodParameters, null); @@ -217,7 +217,7 @@ private MethodInfo GetMethod(Type clazz, string methodName, BindingFlags binding if (methods.Length == 0) { - return default(MethodInfo); + return default; } else if (methods.Length == 1) { diff --git a/src/Lucene.Net/Util/WAH8DocIdSet.cs b/src/Lucene.Net/Util/WAH8DocIdSet.cs index ac1134ec9f..844aaacb38 100644 --- a/src/Lucene.Net/Util/WAH8DocIdSet.cs +++ b/src/Lucene.Net/Util/WAH8DocIdSet.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using System.IO; using System.Linq; +using System.Runtime.CompilerServices; namespace Lucene.Net.Util { @@ -91,6 +92,7 @@ public sealed class WAH8DocIdSet : DocIdSet private static readonly MonotonicAppendingInt64Buffer SINGLE_ZERO_BUFFER = LoadSingleZeroBuffer(); // LUCENENET specific - optimized empty array creation private static readonly WAH8DocIdSet EMPTY = new WAH8DocIdSet(Arrays.Empty(), 0, 1, SINGLE_ZERO_BUFFER, SINGLE_ZERO_BUFFER); + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static MonotonicAppendingInt64Buffer LoadSingleZeroBuffer() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006) { var buffer = new MonotonicAppendingInt64Buffer(1, 64, PackedInt32s.COMPACT); @@ -100,9 +102,10 @@ private static MonotonicAppendingInt64Buffer LoadSingleZeroBuffer() // LUCENENET } private static readonly IComparer SERIALIZED_LENGTH_COMPARER = Comparer.Create((wi1, wi2) => wi1.@in.Length - wi2.@in.Length); - + /// /// Same as with the default index interval. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static WAH8DocIdSet Intersect(ICollection docIdSets) { return Intersect(docIdSets, DEFAULT_INDEX_INTERVAL); @@ -175,6 +178,7 @@ public static WAH8DocIdSet Intersect(ICollection docIdSets, int in /// /// Same as with the default index interval. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public static WAH8DocIdSet Union(ICollection docIdSets) { return Union(docIdSets, DEFAULT_INDEX_INTERVAL); @@ -242,12 +246,14 @@ public PriorityQueueAnonymousInnerClassHelper(int numSets) { } + [MethodImpl(MethodImplOptions.AggressiveInlining)] protected internal override bool LessThan(Iterator a, Iterator b) { return a.wordNum < b.wordNum; } } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static int WordNum(int docID) { if (Debugging.AssertsEnabled) Debugging.Assert(docID >= 0); @@ -561,6 +567,7 @@ public Builder Add(DocIdSetIterator disi) return this; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override object SetIndexInterval(int indexInterval) { return (Builder)base.SetIndexInterval(indexInterval); @@ -601,6 +608,7 @@ public override DocIdSetIterator GetIterator() return new Iterator(data, cardinality, indexInterval, positions, wordNums); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static int ReadCleanLength(ByteArrayDataInput @in, int token) { int len = ((int)((uint)token >> 4)) & 0x07; @@ -616,6 +624,7 @@ internal static int ReadCleanLength(ByteArrayDataInput @in, int token) return len; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static int ReadDirtyLength(ByteArrayDataInput @in, int token) { int len = token & 0x0F; @@ -883,6 +892,7 @@ public override int Advance(int target) return SlowAdvance(target); } + [MethodImpl(MethodImplOptions.AggressiveInlining)] public override long GetCost() { return cardinality; @@ -891,6 +901,7 @@ public override long GetCost() /// /// Return the number of documents in this in constant time. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public int Cardinality() { return cardinality; @@ -898,6 +909,7 @@ public int Cardinality() /// /// Return the memory usage of this class in bytes. + [MethodImpl(MethodImplOptions.AggressiveInlining)] public long RamBytesUsed() { return RamUsageEstimator.AlignObjectSize(3 * RamUsageEstimator.NUM_BYTES_OBJECT_REF + 2 * RamUsageEstimator.NUM_BYTES_INT32) diff --git a/src/Lucene.Net/Util/WeakIdentityMap.cs b/src/Lucene.Net/Util/WeakIdentityMap.cs index f2f55f4f42..3371207348 100644 --- a/src/Lucene.Net/Util/WeakIdentityMap.cs +++ b/src/Lucene.Net/Util/WeakIdentityMap.cs @@ -147,7 +147,7 @@ // } // else // { -// return default(TValue); +// return default; // } // } diff --git a/src/dotnet/Lucene.Net.Tests.CodeAnalysis/Helpers/DiagnosticResult.cs b/src/dotnet/Lucene.Net.Tests.CodeAnalysis/Helpers/DiagnosticResult.cs index aa19406c32..605e0ba0bf 100644 --- a/src/dotnet/Lucene.Net.Tests.CodeAnalysis/Helpers/DiagnosticResult.cs +++ b/src/dotnet/Lucene.Net.Tests.CodeAnalysis/Helpers/DiagnosticResult.cs @@ -1,5 +1,6 @@ using Microsoft.CodeAnalysis; using System; +using System.Diagnostics.CodeAnalysis; namespace TestHelper { @@ -23,6 +24,7 @@ namespace TestHelper /// /// Location where the diagnostic appears, as determined by path, line number, and column number. /// + [SuppressMessage("Performance", "CA1815:Override equals and operator equals on value types", Justification = "Used for testing")] public struct DiagnosticResultLocation { public DiagnosticResultLocation(string path, int line, int column) @@ -50,17 +52,19 @@ public DiagnosticResultLocation(string path, int line, int column) /// /// Struct that stores information about a Diagnostic appearing in a source /// + [SuppressMessage("Performance", "CA1815:Override equals and operator equals on value types", Justification = "Used for testing")] public struct DiagnosticResult { private DiagnosticResultLocation[] locations; + [SuppressMessage("Performance", "CA1819:Properties should not return arrays", Justification = "Used for testing")] public DiagnosticResultLocation[] Locations { get { if (this.locations == null) { - this.locations = new DiagnosticResultLocation[] { }; + this.locations = Array.Empty(); } return this.locations; } diff --git a/src/dotnet/Lucene.Net.Tests.CodeAnalysis/TestLucene1000_SealIncrementTokenMethodCSCodeFixProvider.cs b/src/dotnet/Lucene.Net.Tests.CodeAnalysis/TestLucene1000_SealIncrementTokenMethodCSCodeFixProvider.cs index 5288f7a7c4..f3461e9c2d 100644 --- a/src/dotnet/Lucene.Net.Tests.CodeAnalysis/TestLucene1000_SealIncrementTokenMethodCSCodeFixProvider.cs +++ b/src/dotnet/Lucene.Net.Tests.CodeAnalysis/TestLucene1000_SealIncrementTokenMethodCSCodeFixProvider.cs @@ -73,7 +73,7 @@ public override bool IncrementToken() var expected = new DiagnosticResult { Id = Lucene1000_TokenStreamOrItsIncrementTokenMethodMustBeSealedCSAnalyzer.DiagnosticId, - Message = String.Format("Type name '{0}' or its IncrementToken() method must be marked sealed.", "TypeName"), + Message = string.Format("Type name '{0}' or its IncrementToken() method must be marked sealed.", "TypeName"), Severity = DiagnosticSeverity.Error, Locations = new[] { diff --git a/src/dotnet/Lucene.Net.Tests.CodeAnalysis/Verifiers/CodeFixVerifier.cs b/src/dotnet/Lucene.Net.Tests.CodeAnalysis/Verifiers/CodeFixVerifier.cs index 36aaeb9ca3..9c01a0b9ac 100644 --- a/src/dotnet/Lucene.Net.Tests.CodeAnalysis/Verifiers/CodeFixVerifier.cs +++ b/src/dotnet/Lucene.Net.Tests.CodeAnalysis/Verifiers/CodeFixVerifier.cs @@ -88,7 +88,7 @@ protected void VerifyBasicFix(string oldSource, string newSource, int? codeFixIn /// A class in the form of a string after the CodeFix was applied to it /// Index determining which codefix to apply if there are multiple /// A bool controlling whether or not the test will fail if the CodeFix introduces other warnings after being applied - private void VerifyFix(string language, DiagnosticAnalyzer analyzer, CodeFixProvider codeFixProvider, string oldSource, string newSource, int? codeFixIndex, bool allowNewCompilerDiagnostics) + private static void VerifyFix(string language, DiagnosticAnalyzer analyzer, CodeFixProvider codeFixProvider, string oldSource, string newSource, int? codeFixIndex, bool allowNewCompilerDiagnostics) { var document = CreateDocument(oldSource, language); var analyzerDiagnostics = GetSortedDiagnosticsFromDocuments(analyzer, new[] { document }); diff --git a/src/dotnet/Lucene.Net.Tests.CodeAnalysis/Verifiers/DiagnosticVerifier.cs b/src/dotnet/Lucene.Net.Tests.CodeAnalysis/Verifiers/DiagnosticVerifier.cs index 61e8572de2..3fe73c9cf0 100644 --- a/src/dotnet/Lucene.Net.Tests.CodeAnalysis/Verifiers/DiagnosticVerifier.cs +++ b/src/dotnet/Lucene.Net.Tests.CodeAnalysis/Verifiers/DiagnosticVerifier.cs @@ -103,7 +103,7 @@ protected void VerifyBasicDiagnostic(string[] sources, params DiagnosticResult[] /// The language of the classes represented by the source strings /// The analyzer to be run on the source code /// DiagnosticResults that should appear after the analyzer is run on the sources - private void VerifyDiagnostics(string[] sources, string language, DiagnosticAnalyzer analyzer, params DiagnosticResult[] expected) + private static void VerifyDiagnostics(string[] sources, string language, DiagnosticAnalyzer analyzer, params DiagnosticResult[] expected) { var diagnostics = GetSortedDiagnostics(sources, language, analyzer); VerifyDiagnosticResults(diagnostics, analyzer, expected); diff --git a/src/dotnet/Lucene.Net.Tests.ICU/Support/JDKBreakIterator.cs b/src/dotnet/Lucene.Net.Tests.ICU/Support/JDKBreakIterator.cs index e023aa244b..bb551d331c 100644 --- a/src/dotnet/Lucene.Net.Tests.ICU/Support/JDKBreakIterator.cs +++ b/src/dotnet/Lucene.Net.Tests.ICU/Support/JDKBreakIterator.cs @@ -31,21 +31,13 @@ namespace Lucene.Net.Support /// public static class JdkBreakIterator { - private static readonly RuleBasedBreakIterator SentenceInstance; - private static readonly RuleBasedBreakIterator WordInstance; + private static readonly RuleBasedBreakIterator SentenceInstance = LoadBreakRules("jdksent.brk"); // LUCENENET: CA1810: Initialize reference type static fields inline + private static readonly RuleBasedBreakIterator WordInstance = LoadBreakRules("jdkword.brk"); // LUCENENET: CA1810: Initialize reference type static fields inline - static JdkBreakIterator() + private static RuleBasedBreakIterator LoadBreakRules(string fileName) { - using (Stream @is = - typeof(JdkBreakIterator).FindAndGetManifestResourceStream("jdksent.brk")) - { - SentenceInstance = RuleBasedBreakIterator.GetInstanceFromCompiledRules(@is); - } - using (Stream @is = - typeof(JdkBreakIterator).FindAndGetManifestResourceStream("jdkword.brk")) - { - WordInstance = RuleBasedBreakIterator.GetInstanceFromCompiledRules(@is); - } + using Stream @is = typeof(JdkBreakIterator).FindAndGetManifestResourceStream(fileName); + return RuleBasedBreakIterator.GetInstanceFromCompiledRules(@is); } /// diff --git a/src/dotnet/Lucene.Net.Tests.ICU/Support/TestJDKBreakIterator.cs b/src/dotnet/Lucene.Net.Tests.ICU/Support/TestJDKBreakIterator.cs index 7279c351ff..95a4e0ad07 100644 --- a/src/dotnet/Lucene.Net.Tests.ICU/Support/TestJDKBreakIterator.cs +++ b/src/dotnet/Lucene.Net.Tests.ICU/Support/TestJDKBreakIterator.cs @@ -33,7 +33,7 @@ namespace Lucene.Net.Tests.ICU.Support // + "and http://grepcode.com/file/repository.grepcode.com/java/root/jdk/openjdk/7u40-b43/sun/text/resources/BreakIteratorRules_th.java#BreakIteratorRules_th")] public class TestJdkBreakIterator { - static readonly String TEXT = + const String TEXT = "Apache Lucene(TM) is a high-performance, full-featured text search engine library written entirely in Java."; private BreakIterator GetWordInstance(System.Globalization.CultureInfo locale) @@ -194,7 +194,7 @@ public void TestWordIterationThai() } - static readonly String SENTENCE_TEXT = + const String SENTENCE_TEXT = "Apache Lucene(TM) is a high-performance, full-featured text\nsearch engine library written entirely in Java. " + "It is a technology suitable for nearly any application that requires" + "full-text search, especially cross-platform. Apache Lucene is an open source project available for free download.\n" + @@ -283,7 +283,7 @@ public void TestSentenceIteration() // NOTE: This test doesn't pass. We need to customize line iteration in order to get it to. However, // none of the defaults set in lucene use line iteration, so this is low priority. Leaving in place // in case we need to make JDK style line breaks in the future. - static readonly String LINE_TEXT = + const String LINE_TEXT = "Apache\tLucene(TM) is a high-\nperformance, full-featured text search engine library written entirely in Java."; private BreakIterator GetLineInstance(System.Globalization.CultureInfo locale) diff --git a/src/dotnet/tools/Lucene.Net.Tests.Cli/Commands/CommandTestCase.cs b/src/dotnet/tools/Lucene.Net.Tests.Cli/Commands/CommandTestCase.cs index 1221377c88..43c5fb13b0 100644 --- a/src/dotnet/tools/Lucene.Net.Tests.Cli/Commands/CommandTestCase.cs +++ b/src/dotnet/tools/Lucene.Net.Tests.Cli/Commands/CommandTestCase.cs @@ -2,6 +2,7 @@ using Lucene.Net.Util; using NUnit.Framework; using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; using System.IO; using System.Linq; using Assert = Lucene.Net.TestFramework.Assert; @@ -149,7 +150,7 @@ public virtual void TestAllOptionsHaveDescription() public virtual MockConsoleApp RunCommand(string command) { var output = new MockConsoleApp(); - var cmd = CreateConfiguration(output).Execute(command.ToArgs()); + CreateConfiguration(output).Execute(command.ToArgs()); return output; } @@ -161,6 +162,7 @@ public void Main(string[] args) this.CallCount++; } + [SuppressMessage("Performance", "CA1819:Properties should not return arrays", Justification = "Used for testing arguments")] public string[] Args { get; private set; } public int CallCount { get; private set; } } @@ -174,6 +176,8 @@ public Arg(string inputPattern, string[] output) } public string InputPattern { get; private set; } + + [SuppressMessage("Performance", "CA1819:Properties should not return arrays", Justification = "Used for testing output")] public string[] Output { get; private set; } } } diff --git a/src/dotnet/tools/Lucene.Net.Tests.Cli/Commands/Index/IndexFixCommandTest.cs b/src/dotnet/tools/Lucene.Net.Tests.Cli/Commands/Index/IndexFixCommandTest.cs index ee84856ffc..05692d0e0c 100644 --- a/src/dotnet/tools/Lucene.Net.Tests.Cli/Commands/Index/IndexFixCommandTest.cs +++ b/src/dotnet/tools/Lucene.Net.Tests.Cli/Commands/Index/IndexFixCommandTest.cs @@ -1,5 +1,6 @@ using Lucene.Net.Attributes; using Lucene.Net.Cli.CommandLine; +using Lucene.Net.Support; using NUnit.Framework; using System.Collections.Generic; using System.Linq; @@ -37,7 +38,7 @@ protected override IList GetOptionalArgs() { new Arg[] { new Arg(inputPattern: "", output: new string[] { "-fix" }), - new Arg(inputPattern: "--dry-run", output: new string[0]), + new Arg(inputPattern: "--dry-run", output: Arrays.Empty()), }, new Arg[] { new Arg(inputPattern: "-c|--cross-check-term-vectors", output: new string[] { "-crossCheckTermVectors" }) }, new Arg[] { new Arg(inputPattern: "-v|--verbose", output: new string[] { "-verbose" }) }, @@ -77,7 +78,7 @@ public override void TestAllValidCombinations() string command = string.Join(" ", requiredArg.Select(x => x.InputPattern).Union(optionalArg.Select(x => x.InputPattern).ToArray())); string[] expected = requiredArg.SelectMany(x => x.Output) // Special case: the -fix option must be specified when --dry-run is not - .Concat(command.Contains("--dry-run") ? new string[0] : new string[] { "-fix" }) + .Concat(command.Contains("--dry-run") ? Arrays.Empty() : new string[] { "-fix" }) .Union(optionalArg.SelectMany(x => x.Output)).ToArray(); AssertCommandTranslation(command, expected); } diff --git a/src/dotnet/tools/Lucene.Net.Tests.Cli/Commands/Index/IndexSplitCommandTest.cs b/src/dotnet/tools/Lucene.Net.Tests.Cli/Commands/Index/IndexSplitCommandTest.cs index efef60765b..f8146e9446 100644 --- a/src/dotnet/tools/Lucene.Net.Tests.Cli/Commands/Index/IndexSplitCommandTest.cs +++ b/src/dotnet/tools/Lucene.Net.Tests.Cli/Commands/Index/IndexSplitCommandTest.cs @@ -1,4 +1,5 @@ using Lucene.Net.Attributes; +using Lucene.Net.Support; using NUnit.Framework; using System.Collections.Generic; using System.Linq; @@ -44,7 +45,7 @@ protected override IList GetRequiredArgs() // NOTE: We must order this in the sequence of the expected output. return new List() { - new Arg[] { new Arg(inputPattern: @"C:\lucene-temp", output: new string[] { /*"-out", @"C:\lucene-temp"*/ }) }, + new Arg[] { new Arg(inputPattern: @"C:\lucene-temp", output: Arrays.Empty() /*"-out", @"C:\lucene-temp"*/) }, new Arg[] { new Arg(inputPattern: @"C:\lucene-temp2 C:\lucene-temp3", output: new string[] { @"C:\lucene-temp2", @"C:\lucene-temp3" }), new Arg(inputPattern: @"C:\lucene-temp2 C:\lucene-temp3 C:\lucene-temp4", output: new string[] { @"C:\lucene-temp2", @"C:\lucene-temp3", @"C:\lucene-temp4" }), @@ -81,7 +82,7 @@ public override void TestAllValidCombinations() // Special case: the -num option must be specified when -n is not // because in MultiPassIndexSplitter it is not optional, so we are patching // it in our command to make 2 the default. - .Concat(command.Contains("-n") ? new string[0] : new string[] { "-num", "2" }) + .Concat(command.Contains("-n") ? Arrays.Empty() : new string[] { "-num", "2" }) .Union(optionalArg.SelectMany(x => x.Output)).ToArray(); AssertCommandTranslation(command, expected); } diff --git a/src/dotnet/tools/Lucene.Net.Tests.Cli/SourceCode/SourceCodeParserTest.cs b/src/dotnet/tools/Lucene.Net.Tests.Cli/SourceCode/SourceCodeParserTest.cs index edf0703799..a3061d5c43 100644 --- a/src/dotnet/tools/Lucene.Net.Tests.Cli/SourceCode/SourceCodeParserTest.cs +++ b/src/dotnet/tools/Lucene.Net.Tests.Cli/SourceCode/SourceCodeParserTest.cs @@ -32,43 +32,39 @@ public void TestSourceCodeSectionParser() { var parser = new SourceCodeSectionParser(); - using (var output = new MemoryStream()) + using var output = new MemoryStream(); + using (var input = this.GetType().FindAndGetManifestResourceStream("TestInputForParser.cs")) { - using (var input = this.GetType().FindAndGetManifestResourceStream("TestInputForParser.cs")) - { - parser.ParseSourceCodeFiles(input, output); - } + parser.ParseSourceCodeFiles(input, output); + } - output.Seek(0, SeekOrigin.Begin); + output.Seek(0, SeekOrigin.Begin); - using (var reader = new StreamReader(output, SourceCodeSectionParser.ENCODING)) - { - Assert.AreEqual("using System;", reader.ReadLine()); - Assert.AreEqual("using System.Collections.Generic;", reader.ReadLine()); - Assert.AreEqual("using System.Linq;", reader.ReadLine()); - Assert.AreEqual("using System.Threading.Tasks;", reader.ReadLine()); - Assert.AreEqual("using System.Reflection;", reader.ReadLine()); - Assert.AreEqual("using System.Xml;", reader.ReadLine()); - Assert.AreEqual("", reader.ReadLine()); - Assert.AreEqual("namespace Lucene.Net.Cli.SourceCode", reader.ReadLine()); - Assert.AreEqual("{", reader.ReadLine()); - Assert.AreEqual(" public class TestInputForParser", reader.ReadLine()); - Assert.AreEqual(" {", reader.ReadLine()); - Assert.AreEqual(" public void Foo()", reader.ReadLine()); - Assert.AreEqual(" {", reader.ReadLine()); - Assert.AreEqual(" Console.WriteLine(\"Foo\");", reader.ReadLine()); - Assert.AreEqual(" }", reader.ReadLine()); - Assert.AreEqual("", reader.ReadLine()); - Assert.AreEqual(" public void Bar()", reader.ReadLine()); - Assert.AreEqual(" {", reader.ReadLine()); - Assert.AreEqual(" Console.WriteLine(\"Bar2\");", reader.ReadLine()); - Assert.AreEqual(" }", reader.ReadLine()); - Assert.AreEqual(" }", reader.ReadLine()); - Assert.AreEqual("}", reader.ReadLine()); - Assert.AreEqual(null, reader.ReadLine()); - Assert.AreEqual(null, reader.ReadLine()); - } - } + using var reader = new StreamReader(output, SourceCodeSectionParser.ENCODING); + Assert.AreEqual("using System;", reader.ReadLine()); + Assert.AreEqual("using System.Collections.Generic;", reader.ReadLine()); + Assert.AreEqual("using System.Linq;", reader.ReadLine()); + Assert.AreEqual("using System.Threading.Tasks;", reader.ReadLine()); + Assert.AreEqual("using System.Reflection;", reader.ReadLine()); + Assert.AreEqual("using System.Xml;", reader.ReadLine()); + Assert.AreEqual("", reader.ReadLine()); + Assert.AreEqual("namespace Lucene.Net.Cli.SourceCode", reader.ReadLine()); + Assert.AreEqual("{", reader.ReadLine()); + Assert.AreEqual(" public class TestInputForParser", reader.ReadLine()); + Assert.AreEqual(" {", reader.ReadLine()); + Assert.AreEqual(" public void Foo()", reader.ReadLine()); + Assert.AreEqual(" {", reader.ReadLine()); + Assert.AreEqual(" Console.WriteLine(\"Foo\");", reader.ReadLine()); + Assert.AreEqual(" }", reader.ReadLine()); + Assert.AreEqual("", reader.ReadLine()); + Assert.AreEqual(" public void Bar()", reader.ReadLine()); + Assert.AreEqual(" {", reader.ReadLine()); + Assert.AreEqual(" Console.WriteLine(\"Bar2\");", reader.ReadLine()); + Assert.AreEqual(" }", reader.ReadLine()); + Assert.AreEqual(" }", reader.ReadLine()); + Assert.AreEqual("}", reader.ReadLine()); + Assert.AreEqual(null, reader.ReadLine()); + Assert.AreEqual(null, reader.ReadLine()); } } } diff --git a/src/dotnet/tools/lucene-cli/CommandLineOptions.cs b/src/dotnet/tools/lucene-cli/CommandLineOptions.cs index 93c74551e8..9fb51318b3 100644 --- a/src/dotnet/tools/lucene-cli/CommandLineOptions.cs +++ b/src/dotnet/tools/lucene-cli/CommandLineOptions.cs @@ -1,4 +1,5 @@ using System; +using System.Diagnostics.CodeAnalysis; namespace Lucene.Net.Cli { @@ -19,6 +20,7 @@ namespace Lucene.Net.Cli * limitations under the License. */ + [SuppressMessage("Design", "CA1052:Static holder types should be Static or NotInheritable", Justification = "This class is intended to hold the options that are passed into the app")] public class CommandLineOptions { public static int Parse(string[] args) diff --git a/src/dotnet/tools/lucene-cli/ConfigurationBase.cs b/src/dotnet/tools/lucene-cli/ConfigurationBase.cs index 4f3cb00beb..3d0e68338e 100644 --- a/src/dotnet/tools/lucene-cli/ConfigurationBase.cs +++ b/src/dotnet/tools/lucene-cli/ConfigurationBase.cs @@ -25,7 +25,7 @@ namespace Lucene.Net.Cli public abstract class ConfigurationBase : CommandLineApplication { - private static Assembly thisAssembly = typeof(ConfigurationBase).Assembly; + private static readonly Assembly thisAssembly = typeof(ConfigurationBase).Assembly; protected static string HELP_VALUE_NAME = "help"; protected ConfigurationBase() diff --git a/src/dotnet/tools/lucene-cli/Program.cs b/src/dotnet/tools/lucene-cli/Program.cs index 92616da650..6063cfaad3 100644 --- a/src/dotnet/tools/lucene-cli/Program.cs +++ b/src/dotnet/tools/lucene-cli/Program.cs @@ -20,7 +20,7 @@ namespace Lucene.Net.Cli * See the License for the specific language governing permissions and * limitations under the License. */ - public class Program + public static class Program { public static int Main(string[] args) { diff --git a/src/dotnet/tools/lucene-cli/SourceCode/SourceCodeExporter.cs b/src/dotnet/tools/lucene-cli/SourceCode/SourceCodeExporter.cs index 1e26fc3cf1..cd013859ad 100644 --- a/src/dotnet/tools/lucene-cli/SourceCode/SourceCodeExporter.cs +++ b/src/dotnet/tools/lucene-cli/SourceCode/SourceCodeExporter.cs @@ -46,11 +46,9 @@ public void ExportSourceCodeFiles(IEnumerable files, string outputPath) foreach (var file in files) { - using (var input = typeof(Program).FindAndGetManifestResourceStream(file)) - using (var output = new FileStream(Path.Combine(outputPath, file), FileMode.Create, FileAccess.Write)) - { - sectionParser.ParseSourceCodeFiles(input, output); - } + using var input = typeof(Program).FindAndGetManifestResourceStream(file); + using var output = new FileStream(Path.Combine(outputPath, file), FileMode.Create, FileAccess.Write); + sectionParser.ParseSourceCodeFiles(input, output); } } } diff --git a/src/dotnet/tools/lucene-cli/SourceCode/SourceCodeSectionParser.cs b/src/dotnet/tools/lucene-cli/SourceCode/SourceCodeSectionParser.cs index 9d2abcfc3c..d41305fe80 100644 --- a/src/dotnet/tools/lucene-cli/SourceCode/SourceCodeSectionParser.cs +++ b/src/dotnet/tools/lucene-cli/SourceCode/SourceCodeSectionParser.cs @@ -76,6 +76,7 @@ public class SourceCodeSectionParser { public static readonly Encoding ENCODING = Encoding.UTF8; + /// /// Parses the source code from the and places the /// valid lines (the lines that are not commented with a token, @@ -84,16 +85,15 @@ public class SourceCodeSectionParser /// /// A stream with the input data. This stream will still be open when the call completes. /// A stream where the output data will be sent. This stream will still be open when the call completes. + [System.Diagnostics.CodeAnalysis.SuppressMessage("Performance", "CA1822:Mark members as static", Justification = "Instance method allows swapping implementation at some point")] public void ParseSourceCodeFiles(Stream input, Stream output) { - using (var reader = new SourceCodeSectionReader(new StreamReader(input, ENCODING, false, 1024, true))) - using (TextWriter writer = new StreamWriter(output, ENCODING, 1024, true)) + using var reader = new SourceCodeSectionReader(new StreamReader(input, ENCODING, false, 1024, true)); + using TextWriter writer = new StreamWriter(output, ENCODING, 1024, true); + string line; + while ((line = reader.ReadLine()) != null) { - string line; - while ((line = reader.ReadLine()) != null) - { - writer.WriteLine(line); - } + writer.WriteLine(line); } } } diff --git a/src/dotnet/tools/lucene-cli/SourceCode/SourceCodeSectionReader.cs b/src/dotnet/tools/lucene-cli/SourceCode/SourceCodeSectionReader.cs index 6812037611..c2927b3565 100644 --- a/src/dotnet/tools/lucene-cli/SourceCode/SourceCodeSectionReader.cs +++ b/src/dotnet/tools/lucene-cli/SourceCode/SourceCodeSectionReader.cs @@ -93,9 +93,7 @@ public class SourceCodeSectionReader : TextReader public SourceCodeSectionReader(TextReader reader) { - if (reader == null) - throw new ArgumentNullException("reader"); - this.reader = reader; + this.reader = reader ?? throw new ArgumentNullException(nameof(reader)); } public override string ReadLine() diff --git a/src/dotnet/tools/lucene-cli/commands/benchmark/benchmark-sample/BenchmarkSampleCommand.cs b/src/dotnet/tools/lucene-cli/commands/benchmark/benchmark-sample/BenchmarkSampleCommand.cs index c8a3490ba0..5d3eac6734 100644 --- a/src/dotnet/tools/lucene-cli/commands/benchmark/benchmark-sample/BenchmarkSampleCommand.cs +++ b/src/dotnet/tools/lucene-cli/commands/benchmark/benchmark-sample/BenchmarkSampleCommand.cs @@ -1,4 +1,5 @@ using Lucene.Net.Benchmarks.ByTask.Programmatic; +using System; using System.Collections.Generic; namespace Lucene.Net.Cli @@ -35,18 +36,12 @@ public Configuration(CommandLineOptions options) this.OnExecute(() => new BenchmarkSampleCommand().Run(this)); } - public override IEnumerable SourceCodeFiles - { - get - { - return new string[] { "Sample.cs" }; - } - } + public override IEnumerable SourceCodeFiles => new string[] { "Sample.cs" }; } public int Run(ConfigurationBase cmd) { - cmd.Main(new string[0]); + cmd.Main(Array.Empty()); return 0; } } diff --git a/src/dotnet/tools/lucene-cli/commands/demo/DemoConfiguration.cs b/src/dotnet/tools/lucene-cli/commands/demo/DemoConfiguration.cs index b2592c3da8..ef7f623089 100644 --- a/src/dotnet/tools/lucene-cli/commands/demo/DemoConfiguration.cs +++ b/src/dotnet/tools/lucene-cli/commands/demo/DemoConfiguration.cs @@ -66,10 +66,8 @@ public override void OnExecute(Func invoke) } if (viewSource) { - using (var console = new ConsolePager(this.SourceCodeFiles)) - { - console.Run(); - } + using var console = new ConsolePager(this.SourceCodeFiles); + console.Run(); } return 0; diff --git a/src/dotnet/tools/lucene-cli/commands/demo/demo-associations-facets/DemoAssociationsFacetsCommand.cs b/src/dotnet/tools/lucene-cli/commands/demo/demo-associations-facets/DemoAssociationsFacetsCommand.cs index f160f97c73..2c9e3619c6 100644 --- a/src/dotnet/tools/lucene-cli/commands/demo/demo-associations-facets/DemoAssociationsFacetsCommand.cs +++ b/src/dotnet/tools/lucene-cli/commands/demo/demo-associations-facets/DemoAssociationsFacetsCommand.cs @@ -1,4 +1,5 @@ using Lucene.Net.Demo.Facet; +using System; using System.Collections.Generic; namespace Lucene.Net.Cli @@ -35,18 +36,12 @@ public Configuration(CommandLineOptions options) this.OnExecute(() => new DemoAssociationsFacetsCommand().Run(this)); } - public override IEnumerable SourceCodeFiles - { - get - { - return new string[] { "AssociationsFacetsExample.cs" }; - } - } + public override IEnumerable SourceCodeFiles => new string[] { "AssociationsFacetsExample.cs" }; } public int Run(ConfigurationBase cmd) { - cmd.Main(new string[0]); + cmd.Main(Array.Empty()); return 0; } } diff --git a/src/dotnet/tools/lucene-cli/commands/demo/demo-distance-facets/DemoDistanceFacetsCommand.cs b/src/dotnet/tools/lucene-cli/commands/demo/demo-distance-facets/DemoDistanceFacetsCommand.cs index 5229279301..3622ece725 100644 --- a/src/dotnet/tools/lucene-cli/commands/demo/demo-distance-facets/DemoDistanceFacetsCommand.cs +++ b/src/dotnet/tools/lucene-cli/commands/demo/demo-distance-facets/DemoDistanceFacetsCommand.cs @@ -1,5 +1,6 @@ using Lucene.Net.Demo.Facet; using System.Collections.Generic; +using System; namespace Lucene.Net.Cli { @@ -35,18 +36,12 @@ public Configuration(CommandLineOptions options) this.OnExecute(() => new DemoDistanceFacetsCommand().Run(this)); } - public override IEnumerable SourceCodeFiles - { - get - { - return new string[] { "DistanceFacetsExample.cs" }; - } - } + public override IEnumerable SourceCodeFiles => new string[] { "DistanceFacetsExample.cs" }; } public int Run(ConfigurationBase cmd) { - cmd.Main(new string[0]); + cmd.Main(Array.Empty()); return 0; } } diff --git a/src/dotnet/tools/lucene-cli/commands/demo/demo-expression-aggregation-facets/DemoExpressionAggregationFacetsCommand.cs b/src/dotnet/tools/lucene-cli/commands/demo/demo-expression-aggregation-facets/DemoExpressionAggregationFacetsCommand.cs index 2ada74b08c..680ab55f49 100644 --- a/src/dotnet/tools/lucene-cli/commands/demo/demo-expression-aggregation-facets/DemoExpressionAggregationFacetsCommand.cs +++ b/src/dotnet/tools/lucene-cli/commands/demo/demo-expression-aggregation-facets/DemoExpressionAggregationFacetsCommand.cs @@ -1,4 +1,5 @@ using Lucene.Net.Demo.Facet; +using System; using System.Collections.Generic; namespace Lucene.Net.Cli @@ -35,18 +36,12 @@ public Configuration(CommandLineOptions options) this.OnExecute(() => new DemoExpressionAggregationFacetsCommand().Run(this)); } - public override IEnumerable SourceCodeFiles - { - get - { - return new string[] { "ExpressionAggregationFacetsExample.cs" }; - } - } + public override IEnumerable SourceCodeFiles => new string[] { "ExpressionAggregationFacetsExample.cs" }; } public int Run(ConfigurationBase cmd) { - cmd.Main(new string[0]); + cmd.Main(Array.Empty()); return 0; } } diff --git a/src/dotnet/tools/lucene-cli/commands/demo/demo-index-files/DemoIndexFilesCommand.cs b/src/dotnet/tools/lucene-cli/commands/demo/demo-index-files/DemoIndexFilesCommand.cs index 70b7768952..583007f945 100644 --- a/src/dotnet/tools/lucene-cli/commands/demo/demo-index-files/DemoIndexFilesCommand.cs +++ b/src/dotnet/tools/lucene-cli/commands/demo/demo-index-files/DemoIndexFilesCommand.cs @@ -46,13 +46,7 @@ public Configuration(CommandLineOptions options) this.OnExecute(() => new DemoIndexFilesCommand().Run(this)); } - public override IEnumerable SourceCodeFiles - { - get - { - return new string[] { "IndexFiles.cs" }; - } - } + public override IEnumerable SourceCodeFiles => new string[] { "IndexFiles.cs" }; public CommandArgument IndexDirectoryArgument { get; private set; } public CommandArgument SourceDirectoryArgument { get; private set; } diff --git a/src/dotnet/tools/lucene-cli/commands/demo/demo-multi-category-lists-facets/DemoMultiCategoryListsFacetsCommand.cs b/src/dotnet/tools/lucene-cli/commands/demo/demo-multi-category-lists-facets/DemoMultiCategoryListsFacetsCommand.cs index 84350a6052..494d695e5e 100644 --- a/src/dotnet/tools/lucene-cli/commands/demo/demo-multi-category-lists-facets/DemoMultiCategoryListsFacetsCommand.cs +++ b/src/dotnet/tools/lucene-cli/commands/demo/demo-multi-category-lists-facets/DemoMultiCategoryListsFacetsCommand.cs @@ -1,4 +1,5 @@ using Lucene.Net.Demo.Facet; +using System; using System.Collections.Generic; namespace Lucene.Net.Cli @@ -35,18 +36,12 @@ public Configuration(CommandLineOptions options) this.OnExecute(() => new DemoMultiCategoryListsFacetsCommand().Run(this)); } - public override IEnumerable SourceCodeFiles - { - get - { - return new string[] { "MultiCategoryListsFacetsExample.cs" }; - } - } + public override IEnumerable SourceCodeFiles => new string[] { "MultiCategoryListsFacetsExample.cs" }; } public int Run(ConfigurationBase cmd) { - cmd.Main(new string[0]); + cmd.Main(Array.Empty()); return 0; } } diff --git a/src/dotnet/tools/lucene-cli/commands/demo/demo-range-facets/DemoRangeFacetsCommand.cs b/src/dotnet/tools/lucene-cli/commands/demo/demo-range-facets/DemoRangeFacetsCommand.cs index 187ccb3b4d..02c94c1af2 100644 --- a/src/dotnet/tools/lucene-cli/commands/demo/demo-range-facets/DemoRangeFacetsCommand.cs +++ b/src/dotnet/tools/lucene-cli/commands/demo/demo-range-facets/DemoRangeFacetsCommand.cs @@ -1,4 +1,5 @@ using Lucene.Net.Demo.Facet; +using System; using System.Collections.Generic; namespace Lucene.Net.Cli @@ -35,18 +36,12 @@ public Configuration(CommandLineOptions options) this.OnExecute(() => new DemoRangeFacetsCommand().Run(this)); } - public override IEnumerable SourceCodeFiles - { - get - { - return new string[] { "RangeFacetsExample.cs" }; - } - } + public override IEnumerable SourceCodeFiles => new string[] { "RangeFacetsExample.cs" }; } public int Run(ConfigurationBase cmd) { - cmd.Main(new string[0]); + cmd.Main(Array.Empty()); return 0; } } diff --git a/src/dotnet/tools/lucene-cli/commands/demo/demo-search-files/DemoSearchFilesCommand.cs b/src/dotnet/tools/lucene-cli/commands/demo/demo-search-files/DemoSearchFilesCommand.cs index a7f6bd6434..29e16cfca4 100644 --- a/src/dotnet/tools/lucene-cli/commands/demo/demo-search-files/DemoSearchFilesCommand.cs +++ b/src/dotnet/tools/lucene-cli/commands/demo/demo-search-files/DemoSearchFilesCommand.cs @@ -64,13 +64,7 @@ public Configuration(CommandLineOptions options) this.OnExecute(() => new DemoSearchFilesCommand().Run(this)); } - public override IEnumerable SourceCodeFiles - { - get - { - return new string[] { "SearchFiles.cs" }; - } - } + public override IEnumerable SourceCodeFiles => new string[] { "SearchFiles.cs" }; public CommandArgument IndexDirectoryArgument { get; private set; } public CommandOption FieldOption { get; private set; } diff --git a/src/dotnet/tools/lucene-cli/commands/demo/demo-simple-facets/DemoSimpleFacetsCommand.cs b/src/dotnet/tools/lucene-cli/commands/demo/demo-simple-facets/DemoSimpleFacetsCommand.cs index 9839ddc9b8..407eb7e9d5 100644 --- a/src/dotnet/tools/lucene-cli/commands/demo/demo-simple-facets/DemoSimpleFacetsCommand.cs +++ b/src/dotnet/tools/lucene-cli/commands/demo/demo-simple-facets/DemoSimpleFacetsCommand.cs @@ -1,4 +1,5 @@ using Lucene.Net.Demo.Facet; +using System; using System.Collections.Generic; namespace Lucene.Net.Cli @@ -35,18 +36,12 @@ public Configuration(CommandLineOptions options) this.OnExecute(() => new DemoSimpleFacetsCommand().Run(this)); } - public override IEnumerable SourceCodeFiles - { - get - { - return new string[] { "SimpleFacetsExample.cs" }; - } - } + public override IEnumerable SourceCodeFiles => new string[] { "SimpleFacetsExample.cs" }; } public int Run(ConfigurationBase cmd) { - cmd.Main(new string[0]); + cmd.Main(Array.Empty()); return 0; } } diff --git a/src/dotnet/tools/lucene-cli/commands/demo/demo-simple-sorted-set-facets/DemoSimpleSortedSetFacetsCommand.cs b/src/dotnet/tools/lucene-cli/commands/demo/demo-simple-sorted-set-facets/DemoSimpleSortedSetFacetsCommand.cs index a790fc4c66..38171b1d45 100644 --- a/src/dotnet/tools/lucene-cli/commands/demo/demo-simple-sorted-set-facets/DemoSimpleSortedSetFacetsCommand.cs +++ b/src/dotnet/tools/lucene-cli/commands/demo/demo-simple-sorted-set-facets/DemoSimpleSortedSetFacetsCommand.cs @@ -1,4 +1,5 @@ using Lucene.Net.Demo.Facet; +using System; using System.Collections.Generic; namespace Lucene.Net.Cli @@ -35,18 +36,12 @@ public Configuration(CommandLineOptions options) this.OnExecute(() => new DemoSimpleSortedSetFacetsCommand().Run(this)); } - public override IEnumerable SourceCodeFiles - { - get - { - return new string[] { "SimpleSortedSetFacetsExample.cs" }; - } - } + public override IEnumerable SourceCodeFiles => new string[] { "SimpleSortedSetFacetsExample.cs" }; } public int Run(ConfigurationBase cmd) { - cmd.Main(new string[0]); + cmd.Main(Array.Empty()); return 0; } } diff --git a/src/dotnet/tools/lucene-cli/commands/index/index-split/IndexSplitCommand.cs b/src/dotnet/tools/lucene-cli/commands/index/index-split/IndexSplitCommand.cs index b8cc60e297..1db576d304 100644 --- a/src/dotnet/tools/lucene-cli/commands/index/index-split/IndexSplitCommand.cs +++ b/src/dotnet/tools/lucene-cli/commands/index/index-split/IndexSplitCommand.cs @@ -53,10 +53,10 @@ public int Run(ConfigurationBase cmd) } // The first argument is the output - we need to use the -out switch - var args = new List(cmd.GetNonNullArguments().Skip(1)); - - args.Add("-out"); - args.Add(cmd.GetNonNullArguments().First()); + var args = new List(cmd.GetNonNullArguments().Skip(1)) { + "-out", + cmd.GetNonNullArguments().First() + }; var input = cmd as Configuration; diff --git a/src/dotnet/tools/lucene-cli/lucene-cli.csproj b/src/dotnet/tools/lucene-cli/lucene-cli.csproj index 3b7ae22755..9a68d103f0 100644 --- a/src/dotnet/tools/lucene-cli/lucene-cli.csproj +++ b/src/dotnet/tools/lucene-cli/lucene-cli.csproj @@ -35,9 +35,14 @@ The Lucene.Net command line interface (CLI) is a new cross-platform toolchain with utilities for maintaining Lucene.Net and demos for learning basic Lucene.Net functionality. Run 'lucene' to see usage information. $(PackageTags);maintenance;utility bin\$(Configuration)\$(TargetFramework)\$(AssemblyName).xml + + $(NoWarn);1591;1573 + $(NoWarn);CA1034 + $(NoWarn);IDE0060 +