SpanHelpers.Byte.cs 72 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668
  1. // Licensed to the .NET Foundation under one or more agreements.
  2. // The .NET Foundation licenses this file to you under the MIT license.
  3. // See the LICENSE file in the project root for more information.
  4. using System.Diagnostics;
  5. using System.Numerics;
  6. using System.Runtime.CompilerServices;
  7. using System.Runtime.Intrinsics;
  8. using System.Runtime.Intrinsics.X86;
  9. using Internal.Runtime.CompilerServices;
  10. #if BIT64
  11. using nuint = System.UInt64;
  12. #else
  13. using nuint = System.UInt32;
  14. #endif // BIT64
  15. namespace System
  16. {
  17. internal static partial class SpanHelpers // .Byte
  18. {
  19. public static int IndexOf(ref byte searchSpace, int searchSpaceLength, ref byte value, int valueLength)
  20. {
  21. Debug.Assert(searchSpaceLength >= 0);
  22. Debug.Assert(valueLength >= 0);
  23. if (valueLength == 0)
  24. return 0; // A zero-length sequence is always treated as "found" at the start of the search space.
  25. byte valueHead = value;
  26. ref byte valueTail = ref Unsafe.Add(ref value, 1);
  27. int valueTailLength = valueLength - 1;
  28. int remainingSearchSpaceLength = searchSpaceLength - valueTailLength;
  29. int offset = 0;
  30. while (remainingSearchSpaceLength > 0)
  31. {
  32. // Do a quick search for the first element of "value".
  33. int relativeIndex = IndexOf(ref Unsafe.Add(ref searchSpace, offset), valueHead, remainingSearchSpaceLength);
  34. if (relativeIndex == -1)
  35. break;
  36. remainingSearchSpaceLength -= relativeIndex;
  37. offset += relativeIndex;
  38. if (remainingSearchSpaceLength <= 0)
  39. break; // The unsearched portion is now shorter than the sequence we're looking for. So it can't be there.
  40. // Found the first element of "value". See if the tail matches.
  41. if (SequenceEqual(ref Unsafe.Add(ref searchSpace, offset + 1), ref valueTail, valueTailLength))
  42. return offset; // The tail matched. Return a successful find.
  43. remainingSearchSpaceLength--;
  44. offset++;
  45. }
  46. return -1;
  47. }
  48. public static int IndexOfAny(ref byte searchSpace, int searchSpaceLength, ref byte value, int valueLength)
  49. {
  50. Debug.Assert(searchSpaceLength >= 0);
  51. Debug.Assert(valueLength >= 0);
  52. if (valueLength == 0)
  53. return -1; // A zero-length set of values is always treated as "not found".
  54. int offset = -1;
  55. for (int i = 0; i < valueLength; i++)
  56. {
  57. var tempIndex = IndexOf(ref searchSpace, Unsafe.Add(ref value, i), searchSpaceLength);
  58. if ((uint)tempIndex < (uint)offset)
  59. {
  60. offset = tempIndex;
  61. // Reduce space for search, cause we don't care if we find the search value after the index of a previously found value
  62. searchSpaceLength = tempIndex;
  63. if (offset == 0)
  64. break;
  65. }
  66. }
  67. return offset;
  68. }
  69. public static int LastIndexOfAny(ref byte searchSpace, int searchSpaceLength, ref byte value, int valueLength)
  70. {
  71. Debug.Assert(searchSpaceLength >= 0);
  72. Debug.Assert(valueLength >= 0);
  73. if (valueLength == 0)
  74. return -1; // A zero-length set of values is always treated as "not found".
  75. int offset = -1;
  76. for (int i = 0; i < valueLength; i++)
  77. {
  78. var tempIndex = LastIndexOf(ref searchSpace, Unsafe.Add(ref value, i), searchSpaceLength);
  79. if (tempIndex > offset)
  80. offset = tempIndex;
  81. }
  82. return offset;
  83. }
  84. // Adapted from IndexOf(...)
  85. [MethodImpl(MethodImplOptions.AggressiveOptimization)]
  86. public static unsafe bool Contains(ref byte searchSpace, byte value, int length)
  87. {
  88. Debug.Assert(length >= 0);
  89. uint uValue = value; // Use uint for comparisons to avoid unnecessary 8->32 extensions
  90. IntPtr offset = (IntPtr)0; // Use IntPtr for arithmetic to avoid unnecessary 64->32->64 truncations
  91. IntPtr lengthToExamine = (IntPtr)length;
  92. if (Vector.IsHardwareAccelerated && length >= Vector<byte>.Count * 2)
  93. {
  94. lengthToExamine = UnalignedCountVector(ref searchSpace);
  95. }
  96. SequentialScan:
  97. while ((byte*)lengthToExamine >= (byte*)8)
  98. {
  99. lengthToExamine -= 8;
  100. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 0) ||
  101. uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 1) ||
  102. uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 2) ||
  103. uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 3) ||
  104. uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 4) ||
  105. uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 5) ||
  106. uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 6) ||
  107. uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 7))
  108. {
  109. goto Found;
  110. }
  111. offset += 8;
  112. }
  113. if ((byte*)lengthToExamine >= (byte*)4)
  114. {
  115. lengthToExamine -= 4;
  116. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 0) ||
  117. uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 1) ||
  118. uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 2) ||
  119. uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 3))
  120. {
  121. goto Found;
  122. }
  123. offset += 4;
  124. }
  125. while ((byte*)lengthToExamine > (byte*)0)
  126. {
  127. lengthToExamine -= 1;
  128. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset))
  129. goto Found;
  130. offset += 1;
  131. }
  132. if (Vector.IsHardwareAccelerated && ((int)(byte*)offset < length))
  133. {
  134. lengthToExamine = (IntPtr)((length - (int)(byte*)offset) & ~(Vector<byte>.Count - 1));
  135. Vector<byte> values = new Vector<byte>(value);
  136. while ((byte*)lengthToExamine > (byte*)offset)
  137. {
  138. var matches = Vector.Equals(values, LoadVector(ref searchSpace, offset));
  139. if (Vector<byte>.Zero.Equals(matches))
  140. {
  141. offset += Vector<byte>.Count;
  142. continue;
  143. }
  144. goto Found;
  145. }
  146. if ((int)(byte*)offset < length)
  147. {
  148. lengthToExamine = (IntPtr)(length - (int)(byte*)offset);
  149. goto SequentialScan;
  150. }
  151. }
  152. return false;
  153. Found:
  154. return true;
  155. }
  156. [MethodImpl(MethodImplOptions.AggressiveOptimization)]
  157. public static unsafe int IndexOf(ref byte searchSpace, byte value, int length)
  158. {
  159. Debug.Assert(length >= 0);
  160. uint uValue = value; // Use uint for comparisons to avoid unnecessary 8->32 extensions
  161. IntPtr offset = (IntPtr)0; // Use IntPtr for arithmetic to avoid unnecessary 64->32->64 truncations
  162. IntPtr lengthToExamine = (IntPtr)length;
  163. if (Avx2.IsSupported || Sse2.IsSupported)
  164. {
  165. // Avx2 branch also operates on Sse2 sizes, so check is combined.
  166. if (length >= Vector128<byte>.Count * 2)
  167. {
  168. lengthToExamine = UnalignedCountVector128(ref searchSpace);
  169. }
  170. }
  171. else if (Vector.IsHardwareAccelerated)
  172. {
  173. if (length >= Vector<byte>.Count * 2)
  174. {
  175. lengthToExamine = UnalignedCountVector(ref searchSpace);
  176. }
  177. }
  178. SequentialScan:
  179. while ((byte*)lengthToExamine >= (byte*)8)
  180. {
  181. lengthToExamine -= 8;
  182. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset))
  183. goto Found;
  184. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 1))
  185. goto Found1;
  186. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 2))
  187. goto Found2;
  188. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 3))
  189. goto Found3;
  190. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 4))
  191. goto Found4;
  192. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 5))
  193. goto Found5;
  194. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 6))
  195. goto Found6;
  196. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 7))
  197. goto Found7;
  198. offset += 8;
  199. }
  200. if ((byte*)lengthToExamine >= (byte*)4)
  201. {
  202. lengthToExamine -= 4;
  203. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset))
  204. goto Found;
  205. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 1))
  206. goto Found1;
  207. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 2))
  208. goto Found2;
  209. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 3))
  210. goto Found3;
  211. offset += 4;
  212. }
  213. while ((byte*)lengthToExamine > (byte*)0)
  214. {
  215. lengthToExamine -= 1;
  216. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset))
  217. goto Found;
  218. offset += 1;
  219. }
  220. // We get past SequentialScan only if IsHardwareAccelerated or intrinsic .IsSupported is true; and remain length is greater than Vector length.
  221. // However, we still have the redundant check to allow the JIT to see that the code is unreachable and eliminate it when the platform does not
  222. // have hardware accelerated. After processing Vector lengths we return to SequentialScan to finish any remaining.
  223. if (Avx2.IsSupported)
  224. {
  225. if ((int)(byte*)offset < length)
  226. {
  227. if ((((nuint)Unsafe.AsPointer(ref searchSpace) + (nuint)offset) & (nuint)(Vector256<byte>.Count - 1)) != 0)
  228. {
  229. // Not currently aligned to Vector256 (is aligned to Vector128); this can cause a problem for searches
  230. // with no upper bound e.g. String.strlen.
  231. // Start with a check on Vector128 to align to Vector256, before moving to processing Vector256.
  232. // This ensures we do not fault across memory pages while searching for an end of string.
  233. Vector128<byte> values = Vector128.Create(value);
  234. Vector128<byte> search = LoadVector128(ref searchSpace, offset);
  235. // Same method as below
  236. int matches = Sse2.MoveMask(Sse2.CompareEqual(values, search));
  237. if (matches == 0)
  238. {
  239. // Zero flags set so no matches
  240. offset += Vector128<byte>.Count;
  241. }
  242. else
  243. {
  244. // Find bitflag offset of first match and add to current offset
  245. return ((int)(byte*)offset) + BitOperations.TrailingZeroCount(matches);
  246. }
  247. }
  248. lengthToExamine = GetByteVector256SpanLength(offset, length);
  249. if ((byte*)lengthToExamine > (byte*)offset)
  250. {
  251. Vector256<byte> values = Vector256.Create(value);
  252. do
  253. {
  254. Vector256<byte> search = LoadVector256(ref searchSpace, offset);
  255. int matches = Avx2.MoveMask(Avx2.CompareEqual(values, search));
  256. // Note that MoveMask has converted the equal vector elements into a set of bit flags,
  257. // So the bit position in 'matches' corresponds to the element offset.
  258. if (matches == 0)
  259. {
  260. // Zero flags set so no matches
  261. offset += Vector256<byte>.Count;
  262. continue;
  263. }
  264. // Find bitflag offset of first match and add to current offset
  265. return ((int)(byte*)offset) + BitOperations.TrailingZeroCount(matches);
  266. } while ((byte*)lengthToExamine > (byte*)offset);
  267. }
  268. lengthToExamine = GetByteVector128SpanLength(offset, length);
  269. if ((byte*)lengthToExamine > (byte*)offset)
  270. {
  271. Vector128<byte> values = Vector128.Create(value);
  272. Vector128<byte> search = LoadVector128(ref searchSpace, offset);
  273. // Same method as above
  274. int matches = Sse2.MoveMask(Sse2.CompareEqual(values, search));
  275. if (matches == 0)
  276. {
  277. // Zero flags set so no matches
  278. offset += Vector128<byte>.Count;
  279. }
  280. else
  281. {
  282. // Find bitflag offset of first match and add to current offset
  283. return ((int)(byte*)offset) + BitOperations.TrailingZeroCount(matches);
  284. }
  285. }
  286. if ((int)(byte*)offset < length)
  287. {
  288. lengthToExamine = (IntPtr)(length - (int)(byte*)offset);
  289. goto SequentialScan;
  290. }
  291. }
  292. }
  293. else if (Sse2.IsSupported)
  294. {
  295. if ((int)(byte*)offset < length)
  296. {
  297. lengthToExamine = GetByteVector128SpanLength(offset, length);
  298. Vector128<byte> values = Vector128.Create(value);
  299. while ((byte*)lengthToExamine > (byte*)offset)
  300. {
  301. Vector128<byte> search = LoadVector128(ref searchSpace, offset);
  302. // Same method as above
  303. int matches = Sse2.MoveMask(Sse2.CompareEqual(values, search));
  304. if (matches == 0)
  305. {
  306. // Zero flags set so no matches
  307. offset += Vector128<byte>.Count;
  308. continue;
  309. }
  310. // Find bitflag offset of first match and add to current offset
  311. return ((int)(byte*)offset) + BitOperations.TrailingZeroCount(matches);
  312. }
  313. if ((int)(byte*)offset < length)
  314. {
  315. lengthToExamine = (IntPtr)(length - (int)(byte*)offset);
  316. goto SequentialScan;
  317. }
  318. }
  319. }
  320. else if (Vector.IsHardwareAccelerated)
  321. {
  322. if ((int)(byte*)offset < length)
  323. {
  324. lengthToExamine = GetByteVectorSpanLength(offset, length);
  325. Vector<byte> values = new Vector<byte>(value);
  326. while ((byte*)lengthToExamine > (byte*)offset)
  327. {
  328. var matches = Vector.Equals(values, LoadVector(ref searchSpace, offset));
  329. if (Vector<byte>.Zero.Equals(matches))
  330. {
  331. offset += Vector<byte>.Count;
  332. continue;
  333. }
  334. // Find offset of first match and add to current offset
  335. return (int)(byte*)offset + LocateFirstFoundByte(matches);
  336. }
  337. if ((int)(byte*)offset < length)
  338. {
  339. lengthToExamine = (IntPtr)(length - (int)(byte*)offset);
  340. goto SequentialScan;
  341. }
  342. }
  343. }
  344. return -1;
  345. Found: // Workaround for https://github.com/dotnet/coreclr/issues/13549
  346. return (int)(byte*)offset;
  347. Found1:
  348. return (int)(byte*)(offset + 1);
  349. Found2:
  350. return (int)(byte*)(offset + 2);
  351. Found3:
  352. return (int)(byte*)(offset + 3);
  353. Found4:
  354. return (int)(byte*)(offset + 4);
  355. Found5:
  356. return (int)(byte*)(offset + 5);
  357. Found6:
  358. return (int)(byte*)(offset + 6);
  359. Found7:
  360. return (int)(byte*)(offset + 7);
  361. }
  362. public static int LastIndexOf(ref byte searchSpace, int searchSpaceLength, ref byte value, int valueLength)
  363. {
  364. Debug.Assert(searchSpaceLength >= 0);
  365. Debug.Assert(valueLength >= 0);
  366. if (valueLength == 0)
  367. return 0; // A zero-length sequence is always treated as "found" at the start of the search space.
  368. byte valueHead = value;
  369. ref byte valueTail = ref Unsafe.Add(ref value, 1);
  370. int valueTailLength = valueLength - 1;
  371. int offset = 0;
  372. for (; ; )
  373. {
  374. Debug.Assert(0 <= offset && offset <= searchSpaceLength); // Ensures no deceptive underflows in the computation of "remainingSearchSpaceLength".
  375. int remainingSearchSpaceLength = searchSpaceLength - offset - valueTailLength;
  376. if (remainingSearchSpaceLength <= 0)
  377. break; // The unsearched portion is now shorter than the sequence we're looking for. So it can't be there.
  378. // Do a quick search for the first element of "value".
  379. int relativeIndex = LastIndexOf(ref searchSpace, valueHead, remainingSearchSpaceLength);
  380. if (relativeIndex == -1)
  381. break;
  382. // Found the first element of "value". See if the tail matches.
  383. if (SequenceEqual(ref Unsafe.Add(ref searchSpace, relativeIndex + 1), ref valueTail, valueTailLength))
  384. return relativeIndex; // The tail matched. Return a successful find.
  385. offset += remainingSearchSpaceLength - relativeIndex;
  386. }
  387. return -1;
  388. }
  389. [MethodImpl(MethodImplOptions.AggressiveOptimization)]
  390. public static unsafe int LastIndexOf(ref byte searchSpace, byte value, int length)
  391. {
  392. Debug.Assert(length >= 0);
  393. uint uValue = value; // Use uint for comparisons to avoid unnecessary 8->32 extensions
  394. IntPtr offset = (IntPtr)length; // Use IntPtr for arithmetic to avoid unnecessary 64->32->64 truncations
  395. IntPtr lengthToExamine = (IntPtr)length;
  396. if (Vector.IsHardwareAccelerated && length >= Vector<byte>.Count * 2)
  397. {
  398. lengthToExamine = UnalignedCountVectorFromEnd(ref searchSpace, length);
  399. }
  400. SequentialScan:
  401. while ((byte*)lengthToExamine >= (byte*)8)
  402. {
  403. lengthToExamine -= 8;
  404. offset -= 8;
  405. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 7))
  406. goto Found7;
  407. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 6))
  408. goto Found6;
  409. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 5))
  410. goto Found5;
  411. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 4))
  412. goto Found4;
  413. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 3))
  414. goto Found3;
  415. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 2))
  416. goto Found2;
  417. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 1))
  418. goto Found1;
  419. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset))
  420. goto Found;
  421. }
  422. if ((byte*)lengthToExamine >= (byte*)4)
  423. {
  424. lengthToExamine -= 4;
  425. offset -= 4;
  426. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 3))
  427. goto Found3;
  428. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 2))
  429. goto Found2;
  430. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 1))
  431. goto Found1;
  432. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset))
  433. goto Found;
  434. }
  435. while ((byte*)lengthToExamine > (byte*)0)
  436. {
  437. lengthToExamine -= 1;
  438. offset -= 1;
  439. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset))
  440. goto Found;
  441. }
  442. if (Vector.IsHardwareAccelerated && ((byte*)offset > (byte*)0))
  443. {
  444. lengthToExamine = (IntPtr)((int)(byte*)offset & ~(Vector<byte>.Count - 1));
  445. Vector<byte> values = new Vector<byte>(value);
  446. while ((byte*)lengthToExamine > (byte*)(Vector<byte>.Count - 1))
  447. {
  448. var matches = Vector.Equals(values, LoadVector(ref searchSpace, offset - Vector<byte>.Count));
  449. if (Vector<byte>.Zero.Equals(matches))
  450. {
  451. offset -= Vector<byte>.Count;
  452. lengthToExamine -= Vector<byte>.Count;
  453. continue;
  454. }
  455. // Find offset of first match and add to current offset
  456. return (int)(offset) - Vector<byte>.Count + LocateLastFoundByte(matches);
  457. }
  458. if ((byte*)offset > (byte*)0)
  459. {
  460. lengthToExamine = offset;
  461. goto SequentialScan;
  462. }
  463. }
  464. return -1;
  465. Found: // Workaround for https://github.com/dotnet/coreclr/issues/13549
  466. return (int)(byte*)offset;
  467. Found1:
  468. return (int)(byte*)(offset + 1);
  469. Found2:
  470. return (int)(byte*)(offset + 2);
  471. Found3:
  472. return (int)(byte*)(offset + 3);
  473. Found4:
  474. return (int)(byte*)(offset + 4);
  475. Found5:
  476. return (int)(byte*)(offset + 5);
  477. Found6:
  478. return (int)(byte*)(offset + 6);
  479. Found7:
  480. return (int)(byte*)(offset + 7);
  481. }
  482. [MethodImpl(MethodImplOptions.AggressiveOptimization)]
  483. public static unsafe int IndexOfAny(ref byte searchSpace, byte value0, byte value1, int length)
  484. {
  485. Debug.Assert(length >= 0);
  486. uint uValue0 = value0; // Use uint for comparisons to avoid unnecessary 8->32 extensions
  487. uint uValue1 = value1; // Use uint for comparisons to avoid unnecessary 8->32 extensions
  488. IntPtr offset = (IntPtr)0; // Use IntPtr for arithmetic to avoid unnecessary 64->32->64 truncations
  489. IntPtr lengthToExamine = (IntPtr)length;
  490. if (Avx2.IsSupported || Sse2.IsSupported)
  491. {
  492. // Avx2 branch also operates on Sse2 sizes, so check is combined.
  493. if (length >= Vector128<byte>.Count * 2)
  494. {
  495. lengthToExamine = UnalignedCountVector128(ref searchSpace);
  496. }
  497. }
  498. else if (Vector.IsHardwareAccelerated)
  499. {
  500. if (length >= Vector<byte>.Count * 2)
  501. {
  502. lengthToExamine = UnalignedCountVector(ref searchSpace);
  503. }
  504. }
  505. SequentialScan:
  506. uint lookUp;
  507. while ((byte*)lengthToExamine >= (byte*)8)
  508. {
  509. lengthToExamine -= 8;
  510. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset);
  511. if (uValue0 == lookUp || uValue1 == lookUp)
  512. goto Found;
  513. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 1);
  514. if (uValue0 == lookUp || uValue1 == lookUp)
  515. goto Found1;
  516. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 2);
  517. if (uValue0 == lookUp || uValue1 == lookUp)
  518. goto Found2;
  519. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 3);
  520. if (uValue0 == lookUp || uValue1 == lookUp)
  521. goto Found3;
  522. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 4);
  523. if (uValue0 == lookUp || uValue1 == lookUp)
  524. goto Found4;
  525. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 5);
  526. if (uValue0 == lookUp || uValue1 == lookUp)
  527. goto Found5;
  528. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 6);
  529. if (uValue0 == lookUp || uValue1 == lookUp)
  530. goto Found6;
  531. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 7);
  532. if (uValue0 == lookUp || uValue1 == lookUp)
  533. goto Found7;
  534. offset += 8;
  535. }
  536. if ((byte*)lengthToExamine >= (byte*)4)
  537. {
  538. lengthToExamine -= 4;
  539. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset);
  540. if (uValue0 == lookUp || uValue1 == lookUp)
  541. goto Found;
  542. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 1);
  543. if (uValue0 == lookUp || uValue1 == lookUp)
  544. goto Found1;
  545. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 2);
  546. if (uValue0 == lookUp || uValue1 == lookUp)
  547. goto Found2;
  548. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 3);
  549. if (uValue0 == lookUp || uValue1 == lookUp)
  550. goto Found3;
  551. offset += 4;
  552. }
  553. while ((byte*)lengthToExamine > (byte*)0)
  554. {
  555. lengthToExamine -= 1;
  556. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset);
  557. if (uValue0 == lookUp || uValue1 == lookUp)
  558. goto Found;
  559. offset += 1;
  560. }
  561. // We get past SequentialScan only if IsHardwareAccelerated or intrinsic .IsSupported is true. However, we still have the redundant check to allow
  562. // the JIT to see that the code is unreachable and eliminate it when the platform does not have hardware accelerated.
  563. if (Avx2.IsSupported)
  564. {
  565. if ((int)(byte*)offset < length)
  566. {
  567. lengthToExamine = GetByteVector256SpanLength(offset, length);
  568. if ((byte*)lengthToExamine > (byte*)offset)
  569. {
  570. Vector256<byte> values0 = Vector256.Create(value0);
  571. Vector256<byte> values1 = Vector256.Create(value1);
  572. do
  573. {
  574. Vector256<byte> search = LoadVector256(ref searchSpace, offset);
  575. // Bitwise Or to combine the matches and MoveMask to convert them to bitflags
  576. int matches = Avx2.MoveMask(
  577. Avx2.Or(
  578. Avx2.CompareEqual(values0, search),
  579. Avx2.CompareEqual(values1, search)));
  580. // Note that MoveMask has converted the equal vector elements into a set of bit flags,
  581. // So the bit position in 'matches' corresponds to the element offset.
  582. if (matches == 0)
  583. {
  584. // Zero flags set so no matches
  585. offset += Vector256<byte>.Count;
  586. continue;
  587. }
  588. // Find bitflag offset of first match and add to current offset
  589. return ((int)(byte*)offset) + BitOperations.TrailingZeroCount(matches);
  590. } while ((byte*)lengthToExamine > (byte*)offset);
  591. }
  592. lengthToExamine = GetByteVector128SpanLength(offset, length);
  593. if ((byte*)lengthToExamine > (byte*)offset)
  594. {
  595. Vector128<byte> values0 = Vector128.Create(value0);
  596. Vector128<byte> values1 = Vector128.Create(value1);
  597. Vector128<byte> search = LoadVector128(ref searchSpace, offset);
  598. // Same method as above
  599. int matches = Sse2.MoveMask(
  600. Sse2.Or(
  601. Sse2.CompareEqual(values0, search),
  602. Sse2.CompareEqual(values1, search)));
  603. if (matches == 0)
  604. {
  605. // Zero flags set so no matches
  606. offset += Vector128<byte>.Count;
  607. }
  608. else
  609. {
  610. // Find bitflag offset of first match and add to current offset
  611. return ((int)(byte*)offset) + BitOperations.TrailingZeroCount(matches);
  612. }
  613. }
  614. if ((int)(byte*)offset < length)
  615. {
  616. lengthToExamine = (IntPtr)(length - (int)(byte*)offset);
  617. goto SequentialScan;
  618. }
  619. }
  620. }
  621. else if (Sse2.IsSupported)
  622. {
  623. if ((int)(byte*)offset < length)
  624. {
  625. lengthToExamine = GetByteVector128SpanLength(offset, length);
  626. Vector128<byte> values0 = Vector128.Create(value0);
  627. Vector128<byte> values1 = Vector128.Create(value1);
  628. while ((byte*)lengthToExamine > (byte*)offset)
  629. {
  630. Vector128<byte> search = LoadVector128(ref searchSpace, offset);
  631. // Same method as above
  632. int matches = Sse2.MoveMask(
  633. Sse2.Or(
  634. Sse2.CompareEqual(values0, search),
  635. Sse2.CompareEqual(values1, search)));
  636. if (matches == 0)
  637. {
  638. // Zero flags set so no matches
  639. offset += Vector128<byte>.Count;
  640. continue;
  641. }
  642. // Find bitflag offset of first match and add to current offset
  643. return ((int)(byte*)offset) + BitOperations.TrailingZeroCount(matches);
  644. }
  645. if ((int)(byte*)offset < length)
  646. {
  647. lengthToExamine = (IntPtr)(length - (int)(byte*)offset);
  648. goto SequentialScan;
  649. }
  650. }
  651. }
  652. else if (Vector.IsHardwareAccelerated)
  653. {
  654. if ((int)(byte*)offset < length)
  655. {
  656. lengthToExamine = GetByteVectorSpanLength(offset, length);
  657. Vector<byte> values0 = new Vector<byte>(value0);
  658. Vector<byte> values1 = new Vector<byte>(value1);
  659. while ((byte*)lengthToExamine > (byte*)offset)
  660. {
  661. Vector<byte> search = LoadVector(ref searchSpace, offset);
  662. var matches = Vector.BitwiseOr(
  663. Vector.Equals(search, values0),
  664. Vector.Equals(search, values1));
  665. if (Vector<byte>.Zero.Equals(matches))
  666. {
  667. offset += Vector<byte>.Count;
  668. continue;
  669. }
  670. // Find offset of first match and add to current offset
  671. return (int)(byte*)offset + LocateFirstFoundByte(matches);
  672. }
  673. if ((int)(byte*)offset < length)
  674. {
  675. lengthToExamine = (IntPtr)(length - (int)(byte*)offset);
  676. goto SequentialScan;
  677. }
  678. }
  679. }
  680. return -1;
  681. Found: // Workaround for https://github.com/dotnet/coreclr/issues/13549
  682. return (int)(byte*)offset;
  683. Found1:
  684. return (int)(byte*)(offset + 1);
  685. Found2:
  686. return (int)(byte*)(offset + 2);
  687. Found3:
  688. return (int)(byte*)(offset + 3);
  689. Found4:
  690. return (int)(byte*)(offset + 4);
  691. Found5:
  692. return (int)(byte*)(offset + 5);
  693. Found6:
  694. return (int)(byte*)(offset + 6);
  695. Found7:
  696. return (int)(byte*)(offset + 7);
  697. }
  698. [MethodImpl(MethodImplOptions.AggressiveOptimization)]
  699. public static unsafe int IndexOfAny(ref byte searchSpace, byte value0, byte value1, byte value2, int length)
  700. {
  701. Debug.Assert(length >= 0);
  702. uint uValue0 = value0; // Use uint for comparisons to avoid unnecessary 8->32 extensions
  703. uint uValue1 = value1;
  704. uint uValue2 = value2;
  705. IntPtr offset = (IntPtr)0; // Use IntPtr for arithmetic to avoid unnecessary 64->32->64 truncations
  706. IntPtr lengthToExamine = (IntPtr)length;
  707. if (Avx2.IsSupported || Sse2.IsSupported)
  708. {
  709. // Avx2 branch also operates on Sse2 sizes, so check is combined.
  710. if (length >= Vector128<byte>.Count * 2)
  711. {
  712. lengthToExamine = UnalignedCountVector128(ref searchSpace);
  713. }
  714. }
  715. else if (Vector.IsHardwareAccelerated)
  716. {
  717. if (length >= Vector<byte>.Count * 2)
  718. {
  719. lengthToExamine = UnalignedCountVector(ref searchSpace);
  720. }
  721. }
  722. SequentialScan:
  723. uint lookUp;
  724. while ((byte*)lengthToExamine >= (byte*)8)
  725. {
  726. lengthToExamine -= 8;
  727. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset);
  728. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  729. goto Found;
  730. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 1);
  731. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  732. goto Found1;
  733. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 2);
  734. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  735. goto Found2;
  736. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 3);
  737. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  738. goto Found3;
  739. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 4);
  740. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  741. goto Found4;
  742. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 5);
  743. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  744. goto Found5;
  745. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 6);
  746. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  747. goto Found6;
  748. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 7);
  749. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  750. goto Found7;
  751. offset += 8;
  752. }
  753. if ((byte*)lengthToExamine >= (byte*)4)
  754. {
  755. lengthToExamine -= 4;
  756. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset);
  757. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  758. goto Found;
  759. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 1);
  760. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  761. goto Found1;
  762. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 2);
  763. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  764. goto Found2;
  765. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 3);
  766. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  767. goto Found3;
  768. offset += 4;
  769. }
  770. while ((byte*)lengthToExamine > (byte*)0)
  771. {
  772. lengthToExamine -= 1;
  773. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset);
  774. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  775. goto Found;
  776. offset += 1;
  777. }
  778. if (Avx2.IsSupported)
  779. {
  780. if ((int)(byte*)offset < length)
  781. {
  782. lengthToExamine = GetByteVector256SpanLength(offset, length);
  783. if ((byte*)lengthToExamine > (byte*)offset)
  784. {
  785. Vector256<byte> values0 = Vector256.Create(value0);
  786. Vector256<byte> values1 = Vector256.Create(value1);
  787. Vector256<byte> values2 = Vector256.Create(value2);
  788. do
  789. {
  790. Vector256<byte> search = LoadVector256(ref searchSpace, offset);
  791. Vector256<byte> matches0 = Avx2.CompareEqual(values0, search);
  792. Vector256<byte> matches1 = Avx2.CompareEqual(values1, search);
  793. Vector256<byte> matches2 = Avx2.CompareEqual(values2, search);
  794. // Bitwise Or to combine the matches and MoveMask to convert them to bitflags
  795. int matches = Avx2.MoveMask(Avx2.Or(Avx2.Or(matches0, matches1), matches2));
  796. // Note that MoveMask has converted the equal vector elements into a set of bit flags,
  797. // So the bit position in 'matches' corresponds to the element offset.
  798. if (matches == 0)
  799. {
  800. // Zero flags set so no matches
  801. offset += Vector256<byte>.Count;
  802. continue;
  803. }
  804. // Find bitflag offset of first match and add to current offset
  805. return ((int)(byte*)offset) + BitOperations.TrailingZeroCount(matches);
  806. } while ((byte*)lengthToExamine > (byte*)offset);
  807. }
  808. lengthToExamine = GetByteVector128SpanLength(offset, length);
  809. if ((byte*)lengthToExamine > (byte*)offset)
  810. {
  811. Vector128<byte> values0 = Vector128.Create(value0);
  812. Vector128<byte> values1 = Vector128.Create(value1);
  813. Vector128<byte> values2 = Vector128.Create(value2);
  814. Vector128<byte> search = LoadVector128(ref searchSpace, offset);
  815. Vector128<byte> matches0 = Sse2.CompareEqual(values0, search);
  816. Vector128<byte> matches1 = Sse2.CompareEqual(values1, search);
  817. Vector128<byte> matches2 = Sse2.CompareEqual(values2, search);
  818. // Same method as above
  819. int matches = Sse2.MoveMask(Sse2.Or(Sse2.Or(matches0, matches1), matches2));
  820. if (matches == 0)
  821. {
  822. // Zero flags set so no matches
  823. offset += Vector128<byte>.Count;
  824. }
  825. else
  826. {
  827. // Find bitflag offset of first match and add to current offset
  828. return ((int)(byte*)offset) + BitOperations.TrailingZeroCount(matches);
  829. }
  830. }
  831. if ((int)(byte*)offset < length)
  832. {
  833. lengthToExamine = (IntPtr)(length - (int)(byte*)offset);
  834. goto SequentialScan;
  835. }
  836. }
  837. }
  838. else if (Sse2.IsSupported)
  839. {
  840. if ((int)(byte*)offset < length)
  841. {
  842. lengthToExamine = GetByteVector128SpanLength(offset, length);
  843. Vector128<byte> values0 = Vector128.Create(value0);
  844. Vector128<byte> values1 = Vector128.Create(value1);
  845. Vector128<byte> values2 = Vector128.Create(value2);
  846. while ((byte*)lengthToExamine > (byte*)offset)
  847. {
  848. Vector128<byte> search = LoadVector128(ref searchSpace, offset);
  849. Vector128<byte> matches0 = Sse2.CompareEqual(values0, search);
  850. Vector128<byte> matches1 = Sse2.CompareEqual(values1, search);
  851. Vector128<byte> matches2 = Sse2.CompareEqual(values2, search);
  852. // Same method as above
  853. int matches = Sse2.MoveMask(Sse2.Or(Sse2.Or(matches0, matches1), matches2));
  854. if (matches == 0)
  855. {
  856. // Zero flags set so no matches
  857. offset += Vector128<byte>.Count;
  858. continue;
  859. }
  860. // Find bitflag offset of first match and add to current offset
  861. return ((int)(byte*)offset) + BitOperations.TrailingZeroCount(matches);
  862. }
  863. if ((int)(byte*)offset < length)
  864. {
  865. lengthToExamine = (IntPtr)(length - (int)(byte*)offset);
  866. goto SequentialScan;
  867. }
  868. }
  869. }
  870. else if (Vector.IsHardwareAccelerated)
  871. {
  872. if ((int)(byte*)offset < length)
  873. {
  874. lengthToExamine = GetByteVectorSpanLength(offset, length);
  875. Vector<byte> values0 = new Vector<byte>(value0);
  876. Vector<byte> values1 = new Vector<byte>(value1);
  877. Vector<byte> values2 = new Vector<byte>(value2);
  878. while ((byte*)lengthToExamine > (byte*)offset)
  879. {
  880. Vector<byte> search = LoadVector(ref searchSpace, offset);
  881. var matches = Vector.BitwiseOr(
  882. Vector.BitwiseOr(
  883. Vector.Equals(search, values0),
  884. Vector.Equals(search, values1)),
  885. Vector.Equals(search, values2));
  886. if (Vector<byte>.Zero.Equals(matches))
  887. {
  888. offset += Vector<byte>.Count;
  889. continue;
  890. }
  891. // Find offset of first match and add to current offset
  892. return (int)(byte*)offset + LocateFirstFoundByte(matches);
  893. }
  894. if ((int)(byte*)offset < length)
  895. {
  896. lengthToExamine = (IntPtr)(length - (int)(byte*)offset);
  897. goto SequentialScan;
  898. }
  899. }
  900. }
  901. return -1;
  902. Found: // Workaround for https://github.com/dotnet/coreclr/issues/13549
  903. return (int)(byte*)offset;
  904. Found1:
  905. return (int)(byte*)(offset + 1);
  906. Found2:
  907. return (int)(byte*)(offset + 2);
  908. Found3:
  909. return (int)(byte*)(offset + 3);
  910. Found4:
  911. return (int)(byte*)(offset + 4);
  912. Found5:
  913. return (int)(byte*)(offset + 5);
  914. Found6:
  915. return (int)(byte*)(offset + 6);
  916. Found7:
  917. return (int)(byte*)(offset + 7);
  918. }
  919. public static unsafe int LastIndexOfAny(ref byte searchSpace, byte value0, byte value1, int length)
  920. {
  921. Debug.Assert(length >= 0);
  922. uint uValue0 = value0; // Use uint for comparisons to avoid unnecessary 8->32 extensions
  923. uint uValue1 = value1;
  924. IntPtr offset = (IntPtr)length; // Use IntPtr for arithmetic to avoid unnecessary 64->32->64 truncations
  925. IntPtr lengthToExamine = (IntPtr)length;
  926. if (Vector.IsHardwareAccelerated && length >= Vector<byte>.Count * 2)
  927. {
  928. lengthToExamine = UnalignedCountVectorFromEnd(ref searchSpace, length);
  929. }
  930. SequentialScan:
  931. uint lookUp;
  932. while ((byte*)lengthToExamine >= (byte*)8)
  933. {
  934. lengthToExamine -= 8;
  935. offset -= 8;
  936. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 7);
  937. if (uValue0 == lookUp || uValue1 == lookUp)
  938. goto Found7;
  939. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 6);
  940. if (uValue0 == lookUp || uValue1 == lookUp)
  941. goto Found6;
  942. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 5);
  943. if (uValue0 == lookUp || uValue1 == lookUp)
  944. goto Found5;
  945. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 4);
  946. if (uValue0 == lookUp || uValue1 == lookUp)
  947. goto Found4;
  948. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 3);
  949. if (uValue0 == lookUp || uValue1 == lookUp)
  950. goto Found3;
  951. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 2);
  952. if (uValue0 == lookUp || uValue1 == lookUp)
  953. goto Found2;
  954. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 1);
  955. if (uValue0 == lookUp || uValue1 == lookUp)
  956. goto Found1;
  957. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset);
  958. if (uValue0 == lookUp || uValue1 == lookUp)
  959. goto Found;
  960. }
  961. if ((byte*)lengthToExamine >= (byte*)4)
  962. {
  963. lengthToExamine -= 4;
  964. offset -= 4;
  965. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 3);
  966. if (uValue0 == lookUp || uValue1 == lookUp)
  967. goto Found3;
  968. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 2);
  969. if (uValue0 == lookUp || uValue1 == lookUp)
  970. goto Found2;
  971. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 1);
  972. if (uValue0 == lookUp || uValue1 == lookUp)
  973. goto Found1;
  974. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset);
  975. if (uValue0 == lookUp || uValue1 == lookUp)
  976. goto Found;
  977. }
  978. while ((byte*)lengthToExamine > (byte*)0)
  979. {
  980. lengthToExamine -= 1;
  981. offset -= 1;
  982. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset);
  983. if (uValue0 == lookUp || uValue1 == lookUp)
  984. goto Found;
  985. }
  986. if (Vector.IsHardwareAccelerated && ((byte*)offset > (byte*)0))
  987. {
  988. lengthToExamine = (IntPtr)((int)(byte*)offset & ~(Vector<byte>.Count - 1));
  989. Vector<byte> values0 = new Vector<byte>(value0);
  990. Vector<byte> values1 = new Vector<byte>(value1);
  991. while ((byte*)lengthToExamine > (byte*)(Vector<byte>.Count - 1))
  992. {
  993. Vector<byte> search = LoadVector(ref searchSpace, offset - Vector<byte>.Count);
  994. var matches = Vector.BitwiseOr(
  995. Vector.Equals(search, values0),
  996. Vector.Equals(search, values1));
  997. if (Vector<byte>.Zero.Equals(matches))
  998. {
  999. offset -= Vector<byte>.Count;
  1000. lengthToExamine -= Vector<byte>.Count;
  1001. continue;
  1002. }
  1003. // Find offset of first match and add to current offset
  1004. return (int)(offset) - Vector<byte>.Count + LocateLastFoundByte(matches);
  1005. }
  1006. if ((byte*)offset > (byte*)0)
  1007. {
  1008. lengthToExamine = offset;
  1009. goto SequentialScan;
  1010. }
  1011. }
  1012. return -1;
  1013. Found: // Workaround for https://github.com/dotnet/coreclr/issues/13549
  1014. return (int)(byte*)offset;
  1015. Found1:
  1016. return (int)(byte*)(offset + 1);
  1017. Found2:
  1018. return (int)(byte*)(offset + 2);
  1019. Found3:
  1020. return (int)(byte*)(offset + 3);
  1021. Found4:
  1022. return (int)(byte*)(offset + 4);
  1023. Found5:
  1024. return (int)(byte*)(offset + 5);
  1025. Found6:
  1026. return (int)(byte*)(offset + 6);
  1027. Found7:
  1028. return (int)(byte*)(offset + 7);
  1029. }
  1030. public static unsafe int LastIndexOfAny(ref byte searchSpace, byte value0, byte value1, byte value2, int length)
  1031. {
  1032. Debug.Assert(length >= 0);
  1033. uint uValue0 = value0; // Use uint for comparisons to avoid unnecessary 8->32 extensions
  1034. uint uValue1 = value1;
  1035. uint uValue2 = value2;
  1036. IntPtr offset = (IntPtr)length; // Use IntPtr for arithmetic to avoid unnecessary 64->32->64 truncations
  1037. IntPtr lengthToExamine = (IntPtr)length;
  1038. if (Vector.IsHardwareAccelerated && length >= Vector<byte>.Count * 2)
  1039. {
  1040. lengthToExamine = UnalignedCountVectorFromEnd(ref searchSpace, length);
  1041. }
  1042. SequentialScan:
  1043. uint lookUp;
  1044. while ((byte*)lengthToExamine >= (byte*)8)
  1045. {
  1046. lengthToExamine -= 8;
  1047. offset -= 8;
  1048. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 7);
  1049. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1050. goto Found7;
  1051. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 6);
  1052. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1053. goto Found6;
  1054. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 5);
  1055. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1056. goto Found5;
  1057. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 4);
  1058. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1059. goto Found4;
  1060. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 3);
  1061. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1062. goto Found3;
  1063. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 2);
  1064. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1065. goto Found2;
  1066. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 1);
  1067. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1068. goto Found1;
  1069. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset);
  1070. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1071. goto Found;
  1072. }
  1073. if ((byte*)lengthToExamine >= (byte*)4)
  1074. {
  1075. lengthToExamine -= 4;
  1076. offset -= 4;
  1077. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 3);
  1078. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1079. goto Found3;
  1080. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 2);
  1081. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1082. goto Found2;
  1083. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 1);
  1084. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1085. goto Found1;
  1086. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset);
  1087. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1088. goto Found;
  1089. }
  1090. while ((byte*)lengthToExamine > (byte*)0)
  1091. {
  1092. lengthToExamine -= 1;
  1093. offset -= 1;
  1094. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset);
  1095. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1096. goto Found;
  1097. }
  1098. if (Vector.IsHardwareAccelerated && ((byte*)offset > (byte*)0))
  1099. {
  1100. lengthToExamine = (IntPtr)((int)(byte*)offset & ~(Vector<byte>.Count - 1));
  1101. Vector<byte> values0 = new Vector<byte>(value0);
  1102. Vector<byte> values1 = new Vector<byte>(value1);
  1103. Vector<byte> values2 = new Vector<byte>(value2);
  1104. while ((byte*)lengthToExamine > (byte*)(Vector<byte>.Count - 1))
  1105. {
  1106. Vector<byte> search = LoadVector(ref searchSpace, offset - Vector<byte>.Count);
  1107. var matches = Vector.BitwiseOr(
  1108. Vector.BitwiseOr(
  1109. Vector.Equals(search, values0),
  1110. Vector.Equals(search, values1)),
  1111. Vector.Equals(search, values2));
  1112. if (Vector<byte>.Zero.Equals(matches))
  1113. {
  1114. offset -= Vector<byte>.Count;
  1115. lengthToExamine -= Vector<byte>.Count;
  1116. continue;
  1117. }
  1118. // Find offset of first match and add to current offset
  1119. return (int)(offset) - Vector<byte>.Count + LocateLastFoundByte(matches);
  1120. }
  1121. if ((byte*)offset > (byte*)0)
  1122. {
  1123. lengthToExamine = offset;
  1124. goto SequentialScan;
  1125. }
  1126. }
  1127. return -1;
  1128. Found: // Workaround for https://github.com/dotnet/coreclr/issues/13549
  1129. return (int)(byte*)offset;
  1130. Found1:
  1131. return (int)(byte*)(offset + 1);
  1132. Found2:
  1133. return (int)(byte*)(offset + 2);
  1134. Found3:
  1135. return (int)(byte*)(offset + 3);
  1136. Found4:
  1137. return (int)(byte*)(offset + 4);
  1138. Found5:
  1139. return (int)(byte*)(offset + 5);
  1140. Found6:
  1141. return (int)(byte*)(offset + 6);
  1142. Found7:
  1143. return (int)(byte*)(offset + 7);
  1144. }
  1145. // Optimized byte-based SequenceEquals. The "length" parameter for this one is declared a nuint rather than int as we also use it for types other than byte
  1146. // where the length can exceed 2Gb once scaled by sizeof(T).
  1147. [MethodImpl(MethodImplOptions.AggressiveOptimization)]
  1148. public static unsafe bool SequenceEqual(ref byte first, ref byte second, nuint length)
  1149. {
  1150. if (Unsafe.AreSame(ref first, ref second))
  1151. goto Equal;
  1152. IntPtr offset = (IntPtr)0; // Use IntPtr for arithmetic to avoid unnecessary 64->32->64 truncations
  1153. IntPtr lengthToExamine = (IntPtr)(void*)length;
  1154. if (Vector.IsHardwareAccelerated && (byte*)lengthToExamine >= (byte*)Vector<byte>.Count)
  1155. {
  1156. lengthToExamine -= Vector<byte>.Count;
  1157. while ((byte*)lengthToExamine > (byte*)offset)
  1158. {
  1159. if (LoadVector(ref first, offset) != LoadVector(ref second, offset))
  1160. {
  1161. goto NotEqual;
  1162. }
  1163. offset += Vector<byte>.Count;
  1164. }
  1165. return LoadVector(ref first, lengthToExamine) == LoadVector(ref second, lengthToExamine);
  1166. }
  1167. if ((byte*)lengthToExamine >= (byte*)sizeof(UIntPtr))
  1168. {
  1169. lengthToExamine -= sizeof(UIntPtr);
  1170. while ((byte*)lengthToExamine > (byte*)offset)
  1171. {
  1172. if (LoadUIntPtr(ref first, offset) != LoadUIntPtr(ref second, offset))
  1173. {
  1174. goto NotEqual;
  1175. }
  1176. offset += sizeof(UIntPtr);
  1177. }
  1178. return LoadUIntPtr(ref first, lengthToExamine) == LoadUIntPtr(ref second, lengthToExamine);
  1179. }
  1180. while ((byte*)lengthToExamine > (byte*)offset)
  1181. {
  1182. if (Unsafe.AddByteOffset(ref first, offset) != Unsafe.AddByteOffset(ref second, offset))
  1183. goto NotEqual;
  1184. offset += 1;
  1185. }
  1186. Equal:
  1187. return true;
  1188. NotEqual: // Workaround for https://github.com/dotnet/coreclr/issues/13549
  1189. return false;
  1190. }
  1191. // Vector sub-search adapted from https://github.com/aspnet/KestrelHttpServer/pull/1138
  1192. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1193. private static int LocateFirstFoundByte(Vector<byte> match)
  1194. {
  1195. var vector64 = Vector.AsVectorUInt64(match);
  1196. ulong candidate = 0;
  1197. int i = 0;
  1198. // Pattern unrolled by jit https://github.com/dotnet/coreclr/pull/8001
  1199. for (; i < Vector<ulong>.Count; i++)
  1200. {
  1201. candidate = vector64[i];
  1202. if (candidate != 0)
  1203. {
  1204. break;
  1205. }
  1206. }
  1207. // Single LEA instruction with jitted const (using function result)
  1208. return i * 8 + LocateFirstFoundByte(candidate);
  1209. }
  1210. [MethodImpl(MethodImplOptions.AggressiveOptimization)]
  1211. public static unsafe int SequenceCompareTo(ref byte first, int firstLength, ref byte second, int secondLength)
  1212. {
  1213. Debug.Assert(firstLength >= 0);
  1214. Debug.Assert(secondLength >= 0);
  1215. if (Unsafe.AreSame(ref first, ref second))
  1216. goto Equal;
  1217. IntPtr minLength = (IntPtr)((firstLength < secondLength) ? firstLength : secondLength);
  1218. IntPtr offset = (IntPtr)0; // Use IntPtr for arithmetic to avoid unnecessary 64->32->64 truncations
  1219. IntPtr lengthToExamine = (IntPtr)(void*)minLength;
  1220. if (Avx2.IsSupported)
  1221. {
  1222. if ((byte*)lengthToExamine >= (byte*)Vector256<byte>.Count)
  1223. {
  1224. lengthToExamine -= Vector256<byte>.Count;
  1225. uint matches;
  1226. while ((byte*)lengthToExamine > (byte*)offset)
  1227. {
  1228. matches = (uint)Avx2.MoveMask(Avx2.CompareEqual(LoadVector256(ref first, offset), LoadVector256(ref second, offset)));
  1229. // Note that MoveMask has converted the equal vector elements into a set of bit flags,
  1230. // So the bit position in 'matches' corresponds to the element offset.
  1231. // 32 elements in Vector256<byte> so we compare to uint.MaxValue to check if everything matched
  1232. if (matches == uint.MaxValue)
  1233. {
  1234. // All matched
  1235. offset += Vector256<byte>.Count;
  1236. continue;
  1237. }
  1238. goto Difference;
  1239. }
  1240. // Move to Vector length from end for final compare
  1241. offset = lengthToExamine;
  1242. // Same as method as above
  1243. matches = (uint)Avx2.MoveMask(Avx2.CompareEqual(LoadVector256(ref first, offset), LoadVector256(ref second, offset)));
  1244. if (matches == uint.MaxValue)
  1245. {
  1246. // All matched
  1247. goto Equal;
  1248. }
  1249. Difference:
  1250. // Invert matches to find differences
  1251. uint differences = ~matches;
  1252. // Find bitflag offset of first difference and add to current offset
  1253. offset = (IntPtr)((int)(byte*)offset + BitOperations.TrailingZeroCount((int)differences));
  1254. int result = Unsafe.AddByteOffset(ref first, offset).CompareTo(Unsafe.AddByteOffset(ref second, offset));
  1255. Debug.Assert(result != 0);
  1256. return result;
  1257. }
  1258. if ((byte*)lengthToExamine >= (byte*)Vector128<byte>.Count)
  1259. {
  1260. lengthToExamine -= Vector128<byte>.Count;
  1261. uint matches;
  1262. if ((byte*)lengthToExamine > (byte*)offset)
  1263. {
  1264. matches = (uint)Sse2.MoveMask(Sse2.CompareEqual(LoadVector128(ref first, offset), LoadVector128(ref second, offset)));
  1265. // Note that MoveMask has converted the equal vector elements into a set of bit flags,
  1266. // So the bit position in 'matches' corresponds to the element offset.
  1267. // 16 elements in Vector128<byte> so we compare to ushort.MaxValue to check if everything matched
  1268. if (matches == ushort.MaxValue)
  1269. {
  1270. // All matched
  1271. offset += Vector128<byte>.Count;
  1272. }
  1273. else
  1274. {
  1275. goto Difference;
  1276. }
  1277. }
  1278. // Move to Vector length from end for final compare
  1279. offset = lengthToExamine;
  1280. // Same as method as above
  1281. matches = (uint)Sse2.MoveMask(Sse2.CompareEqual(LoadVector128(ref first, offset), LoadVector128(ref second, offset)));
  1282. if (matches == ushort.MaxValue)
  1283. {
  1284. // All matched
  1285. goto Equal;
  1286. }
  1287. Difference:
  1288. // Invert matches to find differences
  1289. uint differences = ~matches;
  1290. // Find bitflag offset of first difference and add to current offset
  1291. offset = (IntPtr)((int)(byte*)offset + BitOperations.TrailingZeroCount((int)differences));
  1292. int result = Unsafe.AddByteOffset(ref first, offset).CompareTo(Unsafe.AddByteOffset(ref second, offset));
  1293. Debug.Assert(result != 0);
  1294. return result;
  1295. }
  1296. }
  1297. else if (Sse2.IsSupported)
  1298. {
  1299. if ((byte*)lengthToExamine >= (byte*)Vector128<byte>.Count)
  1300. {
  1301. lengthToExamine -= Vector128<byte>.Count;
  1302. uint matches;
  1303. while ((byte*)lengthToExamine > (byte*)offset)
  1304. {
  1305. matches = (uint)Sse2.MoveMask(Sse2.CompareEqual(LoadVector128(ref first, offset), LoadVector128(ref second, offset)));
  1306. // Note that MoveMask has converted the equal vector elements into a set of bit flags,
  1307. // So the bit position in 'matches' corresponds to the element offset.
  1308. // 16 elements in Vector128<byte> so we compare to ushort.MaxValue to check if everything matched
  1309. if (matches == ushort.MaxValue)
  1310. {
  1311. // All matched
  1312. offset += Vector128<byte>.Count;
  1313. continue;
  1314. }
  1315. goto Difference;
  1316. }
  1317. // Move to Vector length from end for final compare
  1318. offset = lengthToExamine;
  1319. // Same as method as above
  1320. matches = (uint)Sse2.MoveMask(Sse2.CompareEqual(LoadVector128(ref first, offset), LoadVector128(ref second, offset)));
  1321. if (matches == ushort.MaxValue)
  1322. {
  1323. // All matched
  1324. goto Equal;
  1325. }
  1326. Difference:
  1327. // Invert matches to find differences
  1328. uint differences = ~matches;
  1329. // Find bitflag offset of first difference and add to current offset
  1330. offset = (IntPtr)((int)(byte*)offset + BitOperations.TrailingZeroCount((int)differences));
  1331. int result = Unsafe.AddByteOffset(ref first, offset).CompareTo(Unsafe.AddByteOffset(ref second, offset));
  1332. Debug.Assert(result != 0);
  1333. return result;
  1334. }
  1335. }
  1336. else if (Vector.IsHardwareAccelerated)
  1337. {
  1338. if ((byte*)lengthToExamine > (byte*)Vector<byte>.Count)
  1339. {
  1340. lengthToExamine -= Vector<byte>.Count;
  1341. while ((byte*)lengthToExamine > (byte*)offset)
  1342. {
  1343. if (LoadVector(ref first, offset) != LoadVector(ref second, offset))
  1344. {
  1345. goto BytewiseCheck;
  1346. }
  1347. offset += Vector<byte>.Count;
  1348. }
  1349. goto BytewiseCheck;
  1350. }
  1351. }
  1352. if ((byte*)lengthToExamine > (byte*)sizeof(UIntPtr))
  1353. {
  1354. lengthToExamine -= sizeof(UIntPtr);
  1355. while ((byte*)lengthToExamine > (byte*)offset)
  1356. {
  1357. if (LoadUIntPtr(ref first, offset) != LoadUIntPtr(ref second, offset))
  1358. {
  1359. goto BytewiseCheck;
  1360. }
  1361. offset += sizeof(UIntPtr);
  1362. }
  1363. }
  1364. BytewiseCheck: // Workaround for https://github.com/dotnet/coreclr/issues/13549
  1365. while ((byte*)minLength > (byte*)offset)
  1366. {
  1367. int result = Unsafe.AddByteOffset(ref first, offset).CompareTo(Unsafe.AddByteOffset(ref second, offset));
  1368. if (result != 0)
  1369. return result;
  1370. offset += 1;
  1371. }
  1372. Equal:
  1373. return firstLength - secondLength;
  1374. }
  1375. // Vector sub-search adapted from https://github.com/aspnet/KestrelHttpServer/pull/1138
  1376. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1377. private static int LocateLastFoundByte(Vector<byte> match)
  1378. {
  1379. var vector64 = Vector.AsVectorUInt64(match);
  1380. ulong candidate = 0;
  1381. int i = Vector<ulong>.Count - 1;
  1382. // Pattern unrolled by jit https://github.com/dotnet/coreclr/pull/8001
  1383. for (; i >= 0; i--)
  1384. {
  1385. candidate = vector64[i];
  1386. if (candidate != 0)
  1387. {
  1388. break;
  1389. }
  1390. }
  1391. // Single LEA instruction with jitted const (using function result)
  1392. return i * 8 + LocateLastFoundByte(candidate);
  1393. }
  1394. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1395. private static int LocateFirstFoundByte(ulong match)
  1396. {
  1397. if (Bmi1.X64.IsSupported)
  1398. {
  1399. return (int)(Bmi1.X64.TrailingZeroCount(match) >> 3);
  1400. }
  1401. else
  1402. {
  1403. // Flag least significant power of two bit
  1404. var powerOfTwoFlag = match ^ (match - 1);
  1405. // Shift all powers of two into the high byte and extract
  1406. return (int)((powerOfTwoFlag * XorPowerOfTwoToHighByte) >> 57);
  1407. }
  1408. }
  1409. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1410. private static int LocateLastFoundByte(ulong match)
  1411. {
  1412. return 7 - (BitOperations.LeadingZeroCount(match) >> 3);
  1413. }
  1414. private const ulong XorPowerOfTwoToHighByte = (0x07ul |
  1415. 0x06ul << 8 |
  1416. 0x05ul << 16 |
  1417. 0x04ul << 24 |
  1418. 0x03ul << 32 |
  1419. 0x02ul << 40 |
  1420. 0x01ul << 48) + 1;
  1421. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1422. private static unsafe UIntPtr LoadUIntPtr(ref byte start, IntPtr offset)
  1423. => Unsafe.ReadUnaligned<UIntPtr>(ref Unsafe.AddByteOffset(ref start, offset));
  1424. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1425. private static unsafe Vector<byte> LoadVector(ref byte start, IntPtr offset)
  1426. => Unsafe.ReadUnaligned<Vector<byte>>(ref Unsafe.AddByteOffset(ref start, offset));
  1427. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1428. private static unsafe Vector128<byte> LoadVector128(ref byte start, IntPtr offset)
  1429. => Unsafe.ReadUnaligned<Vector128<byte>>(ref Unsafe.AddByteOffset(ref start, offset));
  1430. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1431. private static unsafe Vector256<byte> LoadVector256(ref byte start, IntPtr offset)
  1432. => Unsafe.ReadUnaligned<Vector256<byte>>(ref Unsafe.AddByteOffset(ref start, offset));
  1433. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1434. private static unsafe IntPtr GetByteVectorSpanLength(IntPtr offset, int length)
  1435. => (IntPtr)((length - (int)(byte*)offset) & ~(Vector<byte>.Count - 1));
  1436. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1437. private static unsafe IntPtr GetByteVector128SpanLength(IntPtr offset, int length)
  1438. => (IntPtr)((length - (int)(byte*)offset) & ~(Vector128<byte>.Count - 1));
  1439. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1440. private static unsafe IntPtr GetByteVector256SpanLength(IntPtr offset, int length)
  1441. => (IntPtr)((length - (int)(byte*)offset) & ~(Vector256<byte>.Count - 1));
  1442. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1443. private static unsafe IntPtr UnalignedCountVector(ref byte searchSpace)
  1444. {
  1445. int unaligned = (int)Unsafe.AsPointer(ref searchSpace) & (Vector<byte>.Count - 1);
  1446. return (IntPtr)((Vector<byte>.Count - unaligned) & (Vector<byte>.Count - 1));
  1447. }
  1448. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1449. private static unsafe IntPtr UnalignedCountVector128(ref byte searchSpace)
  1450. {
  1451. int unaligned = (int)Unsafe.AsPointer(ref searchSpace) & (Vector128<byte>.Count - 1);
  1452. return (IntPtr)((Vector128<byte>.Count - unaligned) & (Vector128<byte>.Count - 1));
  1453. }
  1454. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1455. private static unsafe IntPtr UnalignedCountVectorFromEnd(ref byte searchSpace, int length)
  1456. {
  1457. int unaligned = (int)Unsafe.AsPointer(ref searchSpace) & (Vector<byte>.Count - 1);
  1458. return (IntPtr)(((length & (Vector<byte>.Count - 1)) + unaligned) & (Vector<byte>.Count - 1));
  1459. }
  1460. }
  1461. }