Marvin.cs 4.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136
  1. // Licensed to the .NET Foundation under one or more agreements.
  2. // The .NET Foundation licenses this file to you under the MIT license.
  3. // See the LICENSE file in the project root for more information.
  4. using System.Diagnostics;
  5. using System.Runtime.CompilerServices;
  6. using System.Runtime.InteropServices;
  7. using Internal.Runtime.CompilerServices;
  8. #if BIT64
  9. using nuint = System.UInt64;
  10. #else
  11. using nuint = System.UInt32;
  12. #endif
  13. namespace System
  14. {
  15. internal static partial class Marvin
  16. {
  17. /// <summary>
  18. /// Compute a Marvin hash and collapse it into a 32-bit hash.
  19. /// </summary>
  20. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  21. public static int ComputeHash32(ReadOnlySpan<byte> data, ulong seed) => ComputeHash32(ref MemoryMarshal.GetReference(data), data.Length, (uint)seed, (uint)(seed >> 32));
  22. /// <summary>
  23. /// Compute a Marvin hash and collapse it into a 32-bit hash.
  24. /// </summary>
  25. public static int ComputeHash32(ref byte data, int count, uint p0, uint p1)
  26. {
  27. nuint ucount = (nuint)count;
  28. nuint byteOffset = 0;
  29. while (ucount >= 8)
  30. {
  31. p0 += Unsafe.ReadUnaligned<uint>(ref Unsafe.AddByteOffset(ref data, byteOffset));
  32. Block(ref p0, ref p1);
  33. p0 += Unsafe.ReadUnaligned<uint>(ref Unsafe.AddByteOffset(ref data, byteOffset + 4));
  34. Block(ref p0, ref p1);
  35. byteOffset += 8;
  36. ucount -= 8;
  37. }
  38. switch (ucount)
  39. {
  40. case 4:
  41. p0 += Unsafe.ReadUnaligned<uint>(ref Unsafe.AddByteOffset(ref data, byteOffset));
  42. Block(ref p0, ref p1);
  43. goto case 0;
  44. case 0:
  45. p0 += 0x80u;
  46. break;
  47. case 5:
  48. p0 += Unsafe.ReadUnaligned<uint>(ref Unsafe.AddByteOffset(ref data, byteOffset));
  49. byteOffset += 4;
  50. Block(ref p0, ref p1);
  51. goto case 1;
  52. case 1:
  53. p0 += 0x8000u | Unsafe.AddByteOffset(ref data, byteOffset);
  54. break;
  55. case 6:
  56. p0 += Unsafe.ReadUnaligned<uint>(ref Unsafe.AddByteOffset(ref data, byteOffset));
  57. byteOffset += 4;
  58. Block(ref p0, ref p1);
  59. goto case 2;
  60. case 2:
  61. p0 += 0x800000u | Unsafe.ReadUnaligned<ushort>(ref Unsafe.AddByteOffset(ref data, byteOffset));
  62. break;
  63. case 7:
  64. p0 += Unsafe.ReadUnaligned<uint>(ref Unsafe.AddByteOffset(ref data, byteOffset));
  65. byteOffset += 4;
  66. Block(ref p0, ref p1);
  67. goto case 3;
  68. case 3:
  69. p0 += 0x80000000u | (((uint)(Unsafe.AddByteOffset(ref data, byteOffset + 2))) << 16) | (uint)(Unsafe.ReadUnaligned<ushort>(ref Unsafe.AddByteOffset(ref data, byteOffset)));
  70. break;
  71. default:
  72. Debug.Fail("Should not get here.");
  73. break;
  74. }
  75. Block(ref p0, ref p1);
  76. Block(ref p0, ref p1);
  77. return (int)(p1 ^ p0);
  78. }
  79. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  80. private static void Block(ref uint rp0, ref uint rp1)
  81. {
  82. uint p0 = rp0;
  83. uint p1 = rp1;
  84. p1 ^= p0;
  85. p0 = _rotl(p0, 20);
  86. p0 += p1;
  87. p1 = _rotl(p1, 9);
  88. p1 ^= p0;
  89. p0 = _rotl(p0, 27);
  90. p0 += p1;
  91. p1 = _rotl(p1, 19);
  92. rp0 = p0;
  93. rp1 = p1;
  94. }
  95. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  96. private static uint _rotl(uint value, int shift)
  97. {
  98. // This is expected to be optimized into a single rol (or ror with negated shift value) instruction
  99. return (value << shift) | (value >> (32 - shift));
  100. }
  101. public static ulong DefaultSeed { get; } = GenerateSeed();
  102. private static unsafe ulong GenerateSeed()
  103. {
  104. ulong seed;
  105. Interop.GetRandomBytes((byte*)&seed, sizeof(ulong));
  106. return seed;
  107. }
  108. }
  109. }