Skip to content

Commit

Permalink
SWEEP: Add unchecked to GetHashCode, #1065 (#1068)
Browse files Browse the repository at this point in the history
* SWEEP: Add unchecked to GetHashCode, #1065

* Remove redundant unchecked operators
  • Loading branch information
paulirwin authored Dec 19, 2024
1 parent 7cfc4c3 commit a739a80
Show file tree
Hide file tree
Showing 128 changed files with 1,183 additions and 763 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -277,12 +277,15 @@ public override int GetHashCode()
return 1303507063;
}

int h = 1;
h = 31 * h + pattern.ToString().GetHashCode();
h = 31 * h + (int)pattern.Options;
h = 31 * h + (toLowerCase ? 1231 : 1237);
h = 31 * h + (stopWords != null ? stopWords.GetHashCode() : 0);
return h;
unchecked
{
int h = 1;
h = 31 * h + pattern.ToString().GetHashCode();
h = 31 * h + (int)pattern.Options;
h = 31 * h + (toLowerCase ? 1231 : 1237);
h = 31 * h + (stopWords != null ? stopWords.GetHashCode() : 0);
return h;
}
}

/// <summary>
Expand Down
63 changes: 43 additions & 20 deletions src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayMap.cs
Original file line number Diff line number Diff line change
Expand Up @@ -1800,18 +1800,23 @@ public override bool Equals(object? obj)
/// <returns></returns>
public override int GetHashCode()
{
const int PRIME = 31; // arbitrary prime
int hash = PRIME;
using (var iter = GetEnumerator())
unchecked
{
while (iter.MoveNext())
const int PRIME = 31; // arbitrary prime
int hash = PRIME;
using (var iter = GetEnumerator())
{
hash = (hash * PRIME) ^ iter.CurrentKeyString.GetHashCode();
TValue? value = iter.CurrentValue;
hash = (hash * PRIME) ^ (value is null ? 0 : JCG.EqualityComparer<TValue>.Default.GetHashCode(value));
while (iter.MoveNext())
{
hash = (hash * PRIME) ^ iter.CurrentKeyString.GetHashCode();
TValue? value = iter.CurrentValue;
hash = (hash * PRIME) ^
(value is null ? 0 : JCG.EqualityComparer<TValue>.Default.GetHashCode(value));
}
}

return hash;
}
return hash;
}

[MethodImpl(MethodImplOptions.AggressiveInlining)]
Expand All @@ -1832,16 +1837,22 @@ private int GetHashCode(char[] text, int startIndex, int length)
{
for (int i = startIndex; i < stop;)
{
int codePointAt = charUtils.CodePointAt(text, i, stop);
code = code * 31 + Character.ToLower(codePointAt, CultureInfo.InvariantCulture); // LUCENENET specific - need to use invariant culture to match Java
i += Character.CharCount(codePointAt);
unchecked
{
int codePointAt = charUtils.CodePointAt(text, i, stop);
code = code * 31 + Character.ToLower(codePointAt, CultureInfo.InvariantCulture); // LUCENENET specific - need to use invariant culture to match Java
i += Character.CharCount(codePointAt);
}
}
}
else
{
for (int i = startIndex; i < stop; i++)
{
code = code * 31 + text[i];
unchecked
{
code = code * 31 + text[i];
}
}
}
return code;
Expand All @@ -1859,16 +1870,22 @@ private int GetHashCode(ICharSequence text)
{
for (int i = 0; i < length;)
{
int codePointAt = charUtils.CodePointAt(text, i);
code = code * 31 + Character.ToLower(codePointAt, CultureInfo.InvariantCulture); // LUCENENET specific - need to use invariant culture to match Java
i += Character.CharCount(codePointAt);
unchecked
{
int codePointAt = charUtils.CodePointAt(text, i);
code = code * 31 + Character.ToLower(codePointAt, CultureInfo.InvariantCulture); // LUCENENET specific - need to use invariant culture to match Java
i += Character.CharCount(codePointAt);
}
}
}
else
{
for (int i = 0; i < length; i++)
{
code = code * 31 + text[i];
unchecked
{
code = code * 31 + text[i];
}
}
}
return code;
Expand All @@ -1886,16 +1903,22 @@ private int GetHashCode(string text)
{
for (int i = 0; i < length;)
{
int codePointAt = charUtils.CodePointAt(text, i);
code = code * 31 + Character.ToLower(codePointAt, CultureInfo.InvariantCulture); // LUCENENET specific - need to use invariant culture to match Java
i += Character.CharCount(codePointAt);
unchecked
{
int codePointAt = charUtils.CodePointAt(text, i);
code = code * 31 + Character.ToLower(codePointAt, CultureInfo.InvariantCulture); // LUCENENET specific - need to use invariant culture to match Java
i += Character.CharCount(codePointAt);
}
}
}
else
{
for (int i = 0; i < length; i++)
{
code = code * 31 + text[i];
unchecked
{
code = code * 31 + text[i];
}
}
}
return code;
Expand Down
17 changes: 10 additions & 7 deletions src/Lucene.Net.Analysis.SmartCn/Hhmm/PathNode.cs
Original file line number Diff line number Diff line change
Expand Up @@ -48,13 +48,16 @@ public virtual int CompareTo(PathNode pn)
/// </summary>
public override int GetHashCode()
{
int prime = 31;
int result = 1;
result = prime * result + PreNode;
long temp;
temp = J2N.BitConversion.DoubleToInt64Bits(Weight);
result = prime * result + (int)(temp ^ (temp >>> 32));
return result;
unchecked
{
const int prime = 31;
int result = 1;
result = prime * result + PreNode;
long temp;
temp = J2N.BitConversion.DoubleToInt64Bits(Weight);
result = prime * result + (int)(temp ^ (temp >>> 32));
return result;
}
}

/// <summary>
Expand Down
23 changes: 13 additions & 10 deletions src/Lucene.Net.Analysis.SmartCn/Hhmm/SegToken.cs
Original file line number Diff line number Diff line change
Expand Up @@ -83,18 +83,21 @@ public SegToken(char[] idArray, int start, int end, WordType wordType, int weigh
/// </summary>
public override int GetHashCode()
{
int prime = 31;
int result = 1;
for (int i = 0; i < CharArray.Length; i++)
unchecked
{
result = prime * result + CharArray[i];
const int prime = 31;
int result = 1;
for (int i = 0; i < CharArray.Length; i++)
{
result = prime * result + CharArray[i];
}
result = prime * result + EndOffset;
result = prime * result + Index;
result = prime * result + StartOffset;
result = prime * result + Weight;
result = prime * result + (int)WordType;
return result;
}
result = prime * result + EndOffset;
result = prime * result + Index;
result = prime * result + StartOffset;
result = prime * result + Weight;
result = prime * result + (int)WordType;
return result;
}

/// <summary>
Expand Down
23 changes: 13 additions & 10 deletions src/Lucene.Net.Analysis.SmartCn/Hhmm/SegTokenPair.cs
Original file line number Diff line number Diff line change
Expand Up @@ -55,18 +55,21 @@ public SegTokenPair(char[] idArray, int from, int to, double weight)
/// </summary>
public override int GetHashCode()
{
int prime = 31;
int result = 1;
for (int i = 0; i < CharArray.Length; i++)
unchecked
{
result = prime * result + CharArray[i];
const int prime = 31;
int result = 1;
for (int i = 0; i < CharArray.Length; i++)
{
result = prime * result + CharArray[i];
}
result = prime * result + From;
result = prime * result + To;
long temp;
temp = J2N.BitConversion.DoubleToInt64Bits(Weight);
result = prime * result + (int)(temp ^ (temp >>> 32));
return result;
}
result = prime * result + From;
result = prime * result + To;
long temp;
temp = J2N.BitConversion.DoubleToInt64Bits(Weight);
result = prime * result + (int)(temp ^ (temp >>> 32));
return result;
}

/// <summary>
Expand Down
15 changes: 9 additions & 6 deletions src/Lucene.Net.Codecs/BlockTerms/BlockTermsReader.cs
Original file line number Diff line number Diff line change
Expand Up @@ -28,15 +28,15 @@ namespace Lucene.Net.Codecs.BlockTerms

/// <summary>
/// Handles a terms dict, but decouples all details of
/// doc/freqs/positions reading to an instance of
/// doc/freqs/positions reading to an instance of
/// <see cref="PostingsReaderBase"/>. This class is reusable for
/// codecs that use a different format for
/// docs/freqs/positions (though codecs are also free to
/// make their own terms dict impl).
/// <para/>
/// This class also interacts with an instance of
/// <see cref="TermsIndexReaderBase"/>, to abstract away the specific
/// implementation of the terms dict index.
/// implementation of the terms dict index.
/// <para/>
/// @lucene.experimental
/// </summary>
Expand Down Expand Up @@ -90,7 +90,10 @@ public override object Clone()

public override int GetHashCode()
{
return Field.GetHashCode() * 31 + Term.GetHashCode();
unchecked
{
return Field.GetHashCode() * 31 + Term.GetHashCode();
}
}
}

Expand Down Expand Up @@ -372,7 +375,7 @@ public SegmentTermsEnum(FieldReader outerInstance)
/// the terms data from that"; eg FuzzyTermsEnum will
/// (usually) just immediately call seek again if we
/// return NOT_FOUND so it's a waste for us to fill in
/// the term that was actually NOT_FOUND
/// the term that was actually NOT_FOUND
/// </remarks>
public override SeekStatus SeekCeil(BytesRef target)
{
Expand Down Expand Up @@ -876,7 +879,7 @@ public override long Ord
// Does initial decode of next block of terms; this
// doesn't actually decode the docFreq, totalTermFreq,
// postings details (frq/prx offset, etc.) metadata;
// it just loads them as byte[] blobs which are then
// it just loads them as byte[] blobs which are then
// decoded on-demand if the metadata is ever requested
// for any term in this block. This enables terms-only
// intensive consumes (eg certain MTQs, respelling) to
Expand Down Expand Up @@ -1014,4 +1017,4 @@ public override void CheckIntegrity()
postingsReader.CheckIntegrity();
}
}
}
}
31 changes: 17 additions & 14 deletions src/Lucene.Net.Codecs/Memory/FSTTermOutputs.cs
Original file line number Diff line number Diff line change
Expand Up @@ -73,26 +73,29 @@ internal TermData(long[] longs, byte[] bytes, int docFreq, long totalTermFreq)
// aren't NO_OUTPUTs.
public override int GetHashCode()
{
var hash = 0;
if (longs != null)
unchecked
{
var end = longs.Length;
for (var i = 0; i < end; i++)
var hash = 0;
if (longs != null)
{
hash -= (int) longs[i];
var end = longs.Length;
for (var i = 0; i < end; i++)
{
hash -= (int) longs[i];
}
}
}
if (bytes != null)
{
hash = -hash;
var end = bytes.Length;
for (var i = 0; i < end; i++)
if (bytes != null)
{
hash += bytes[i];
hash = -hash;
var end = bytes.Length;
for (var i = 0; i < end; i++)
{
hash += bytes[i];
}
}
hash += (int) (docFreq + totalTermFreq);
return hash;
}
hash += (int) (docFreq + totalTermFreq);
return hash;
}

public override bool Equals(object other)
Expand Down
13 changes: 8 additions & 5 deletions src/Lucene.Net.Expressions/ExpressionSortField.cs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ internal class ExpressionSortField : SortField
{
private readonly ExpressionValueSource source;

internal ExpressionSortField(string name, ExpressionValueSource source, bool reverse)
internal ExpressionSortField(string name, ExpressionValueSource source, bool reverse)
: base(name, SortFieldType.CUSTOM, reverse)
{
this.source = source;
Expand All @@ -40,10 +40,13 @@ public override FieldComparer GetComparer(int numHits, int sortPos)

public override int GetHashCode()
{
int prime = 31;
int result = base.GetHashCode();
result = prime * result + ((source is null) ? 0 : source.GetHashCode());
return result;
unchecked
{
const int prime = 31;
int result = base.GetHashCode();
result = prime * result + (source is null ? 0 : source.GetHashCode());
return result;
}
}

public override bool Equals(object obj)
Expand Down
15 changes: 9 additions & 6 deletions src/Lucene.Net.Expressions/ExpressionValueSource.cs
Original file line number Diff line number Diff line change
Expand Up @@ -120,12 +120,15 @@ public override string GetDescription()

public override int GetHashCode()
{
int prime = 31;
int result = 1;
result = prime * result + ((expression is null) ? 0 : expression.GetHashCode());
result = prime * result + (needsScores ? 1231 : 1237);
result = prime * result + Arrays.GetHashCode(variables);
return result;
unchecked
{
const int prime = 31;
int result = 1;
result = prime * result + (expression is null ? 0 : expression.GetHashCode());
result = prime * result + (needsScores ? 1231 : 1237);
result = prime * result + Arrays.GetHashCode(variables);
return result;
}
}

public override bool Equals(object obj)
Expand Down
Loading

0 comments on commit a739a80

Please sign in to comment.