SpanHelpers.Byte.cs 72 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663
  1. // Licensed to the .NET Foundation under one or more agreements.
  2. // The .NET Foundation licenses this file to you under the MIT license.
  3. // See the LICENSE file in the project root for more information.
  4. using System.Diagnostics;
  5. using System.Numerics;
  6. using System.Runtime.CompilerServices;
  7. using System.Runtime.Intrinsics;
  8. using System.Runtime.Intrinsics.X86;
  9. using Internal.Runtime.CompilerServices;
  10. #pragma warning disable SA1121 // explicitly using type aliases instead of built-in types
  11. #if BIT64
  12. using nuint = System.UInt64;
  13. #else
  14. using nuint = System.UInt32;
  15. #endif // BIT64
  16. namespace System
  17. {
  18. internal static partial class SpanHelpers // .Byte
  19. {
  20. public static int IndexOf(ref byte searchSpace, int searchSpaceLength, ref byte value, int valueLength)
  21. {
  22. Debug.Assert(searchSpaceLength >= 0);
  23. Debug.Assert(valueLength >= 0);
  24. if (valueLength == 0)
  25. return 0; // A zero-length sequence is always treated as "found" at the start of the search space.
  26. byte valueHead = value;
  27. ref byte valueTail = ref Unsafe.Add(ref value, 1);
  28. int valueTailLength = valueLength - 1;
  29. int remainingSearchSpaceLength = searchSpaceLength - valueTailLength;
  30. int offset = 0;
  31. while (remainingSearchSpaceLength > 0)
  32. {
  33. // Do a quick search for the first element of "value".
  34. int relativeIndex = IndexOf(ref Unsafe.Add(ref searchSpace, offset), valueHead, remainingSearchSpaceLength);
  35. if (relativeIndex == -1)
  36. break;
  37. remainingSearchSpaceLength -= relativeIndex;
  38. offset += relativeIndex;
  39. if (remainingSearchSpaceLength <= 0)
  40. break; // The unsearched portion is now shorter than the sequence we're looking for. So it can't be there.
  41. // Found the first element of "value". See if the tail matches.
  42. if (SequenceEqual(ref Unsafe.Add(ref searchSpace, offset + 1), ref valueTail, valueTailLength))
  43. return offset; // The tail matched. Return a successful find.
  44. remainingSearchSpaceLength--;
  45. offset++;
  46. }
  47. return -1;
  48. }
  49. public static int IndexOfAny(ref byte searchSpace, int searchSpaceLength, ref byte value, int valueLength)
  50. {
  51. Debug.Assert(searchSpaceLength >= 0);
  52. Debug.Assert(valueLength >= 0);
  53. if (valueLength == 0)
  54. return -1; // A zero-length set of values is always treated as "not found".
  55. int offset = -1;
  56. for (int i = 0; i < valueLength; i++)
  57. {
  58. int tempIndex = IndexOf(ref searchSpace, Unsafe.Add(ref value, i), searchSpaceLength);
  59. if ((uint)tempIndex < (uint)offset)
  60. {
  61. offset = tempIndex;
  62. // Reduce space for search, cause we don't care if we find the search value after the index of a previously found value
  63. searchSpaceLength = tempIndex;
  64. if (offset == 0)
  65. break;
  66. }
  67. }
  68. return offset;
  69. }
  70. public static int LastIndexOfAny(ref byte searchSpace, int searchSpaceLength, ref byte value, int valueLength)
  71. {
  72. Debug.Assert(searchSpaceLength >= 0);
  73. Debug.Assert(valueLength >= 0);
  74. if (valueLength == 0)
  75. return -1; // A zero-length set of values is always treated as "not found".
  76. int offset = -1;
  77. for (int i = 0; i < valueLength; i++)
  78. {
  79. int tempIndex = LastIndexOf(ref searchSpace, Unsafe.Add(ref value, i), searchSpaceLength);
  80. if (tempIndex > offset)
  81. offset = tempIndex;
  82. }
  83. return offset;
  84. }
  85. // Adapted from IndexOf(...)
  86. [MethodImpl(MethodImplOptions.AggressiveOptimization)]
  87. public static unsafe bool Contains(ref byte searchSpace, byte value, int length)
  88. {
  89. Debug.Assert(length >= 0);
  90. uint uValue = value; // Use uint for comparisons to avoid unnecessary 8->32 extensions
  91. IntPtr offset = (IntPtr)0; // Use IntPtr for arithmetic to avoid unnecessary 64->32->64 truncations
  92. IntPtr lengthToExamine = (IntPtr)length;
  93. if (Vector.IsHardwareAccelerated && length >= Vector<byte>.Count * 2)
  94. {
  95. lengthToExamine = UnalignedCountVector(ref searchSpace);
  96. }
  97. SequentialScan:
  98. while ((byte*)lengthToExamine >= (byte*)8)
  99. {
  100. lengthToExamine -= 8;
  101. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 0) ||
  102. uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 1) ||
  103. uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 2) ||
  104. uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 3) ||
  105. uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 4) ||
  106. uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 5) ||
  107. uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 6) ||
  108. uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 7))
  109. {
  110. goto Found;
  111. }
  112. offset += 8;
  113. }
  114. if ((byte*)lengthToExamine >= (byte*)4)
  115. {
  116. lengthToExamine -= 4;
  117. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 0) ||
  118. uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 1) ||
  119. uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 2) ||
  120. uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 3))
  121. {
  122. goto Found;
  123. }
  124. offset += 4;
  125. }
  126. while ((byte*)lengthToExamine > (byte*)0)
  127. {
  128. lengthToExamine -= 1;
  129. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset))
  130. goto Found;
  131. offset += 1;
  132. }
  133. if (Vector.IsHardwareAccelerated && ((int)(byte*)offset < length))
  134. {
  135. lengthToExamine = (IntPtr)((length - (int)(byte*)offset) & ~(Vector<byte>.Count - 1));
  136. Vector<byte> values = new Vector<byte>(value);
  137. while ((byte*)lengthToExamine > (byte*)offset)
  138. {
  139. var matches = Vector.Equals(values, LoadVector(ref searchSpace, offset));
  140. if (Vector<byte>.Zero.Equals(matches))
  141. {
  142. offset += Vector<byte>.Count;
  143. continue;
  144. }
  145. goto Found;
  146. }
  147. if ((int)(byte*)offset < length)
  148. {
  149. lengthToExamine = (IntPtr)(length - (int)(byte*)offset);
  150. goto SequentialScan;
  151. }
  152. }
  153. return false;
  154. Found:
  155. return true;
  156. }
  157. [MethodImpl(MethodImplOptions.AggressiveOptimization)]
  158. public static unsafe int IndexOf(ref byte searchSpace, byte value, int length)
  159. {
  160. Debug.Assert(length >= 0);
  161. uint uValue = value; // Use uint for comparisons to avoid unnecessary 8->32 extensions
  162. IntPtr offset = (IntPtr)0; // Use IntPtr for arithmetic to avoid unnecessary 64->32->64 truncations
  163. IntPtr lengthToExamine = (IntPtr)length;
  164. if (Avx2.IsSupported || Sse2.IsSupported)
  165. {
  166. // Avx2 branch also operates on Sse2 sizes, so check is combined.
  167. if (length >= Vector128<byte>.Count * 2)
  168. {
  169. lengthToExamine = UnalignedCountVector128(ref searchSpace);
  170. }
  171. }
  172. else if (Vector.IsHardwareAccelerated)
  173. {
  174. if (length >= Vector<byte>.Count * 2)
  175. {
  176. lengthToExamine = UnalignedCountVector(ref searchSpace);
  177. }
  178. }
  179. SequentialScan:
  180. while ((byte*)lengthToExamine >= (byte*)8)
  181. {
  182. lengthToExamine -= 8;
  183. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset))
  184. goto Found;
  185. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 1))
  186. goto Found1;
  187. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 2))
  188. goto Found2;
  189. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 3))
  190. goto Found3;
  191. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 4))
  192. goto Found4;
  193. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 5))
  194. goto Found5;
  195. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 6))
  196. goto Found6;
  197. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 7))
  198. goto Found7;
  199. offset += 8;
  200. }
  201. if ((byte*)lengthToExamine >= (byte*)4)
  202. {
  203. lengthToExamine -= 4;
  204. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset))
  205. goto Found;
  206. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 1))
  207. goto Found1;
  208. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 2))
  209. goto Found2;
  210. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 3))
  211. goto Found3;
  212. offset += 4;
  213. }
  214. while ((byte*)lengthToExamine > (byte*)0)
  215. {
  216. lengthToExamine -= 1;
  217. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset))
  218. goto Found;
  219. offset += 1;
  220. }
  221. // We get past SequentialScan only if IsHardwareAccelerated or intrinsic .IsSupported is true; and remain length is greater than Vector length.
  222. // However, we still have the redundant check to allow the JIT to see that the code is unreachable and eliminate it when the platform does not
  223. // have hardware accelerated. After processing Vector lengths we return to SequentialScan to finish any remaining.
  224. if (Avx2.IsSupported)
  225. {
  226. if ((int)(byte*)offset < length)
  227. {
  228. if ((((nuint)Unsafe.AsPointer(ref searchSpace) + (nuint)offset) & (nuint)(Vector256<byte>.Count - 1)) != 0)
  229. {
  230. // Not currently aligned to Vector256 (is aligned to Vector128); this can cause a problem for searches
  231. // with no upper bound e.g. String.strlen.
  232. // Start with a check on Vector128 to align to Vector256, before moving to processing Vector256.
  233. // This ensures we do not fault across memory pages while searching for an end of string.
  234. Vector128<byte> values = Vector128.Create(value);
  235. Vector128<byte> search = LoadVector128(ref searchSpace, offset);
  236. // Same method as below
  237. int matches = Sse2.MoveMask(Sse2.CompareEqual(values, search));
  238. if (matches == 0)
  239. {
  240. // Zero flags set so no matches
  241. offset += Vector128<byte>.Count;
  242. }
  243. else
  244. {
  245. // Find bitflag offset of first match and add to current offset
  246. return ((int)(byte*)offset) + BitOperations.TrailingZeroCount(matches);
  247. }
  248. }
  249. lengthToExamine = GetByteVector256SpanLength(offset, length);
  250. if ((byte*)lengthToExamine > (byte*)offset)
  251. {
  252. Vector256<byte> values = Vector256.Create(value);
  253. do
  254. {
  255. Vector256<byte> search = LoadVector256(ref searchSpace, offset);
  256. int matches = Avx2.MoveMask(Avx2.CompareEqual(values, search));
  257. // Note that MoveMask has converted the equal vector elements into a set of bit flags,
  258. // So the bit position in 'matches' corresponds to the element offset.
  259. if (matches == 0)
  260. {
  261. // Zero flags set so no matches
  262. offset += Vector256<byte>.Count;
  263. continue;
  264. }
  265. // Find bitflag offset of first match and add to current offset
  266. return ((int)(byte*)offset) + BitOperations.TrailingZeroCount(matches);
  267. } while ((byte*)lengthToExamine > (byte*)offset);
  268. }
  269. lengthToExamine = GetByteVector128SpanLength(offset, length);
  270. if ((byte*)lengthToExamine > (byte*)offset)
  271. {
  272. Vector128<byte> values = Vector128.Create(value);
  273. Vector128<byte> search = LoadVector128(ref searchSpace, offset);
  274. // Same method as above
  275. int matches = Sse2.MoveMask(Sse2.CompareEqual(values, search));
  276. if (matches == 0)
  277. {
  278. // Zero flags set so no matches
  279. offset += Vector128<byte>.Count;
  280. }
  281. else
  282. {
  283. // Find bitflag offset of first match and add to current offset
  284. return ((int)(byte*)offset) + BitOperations.TrailingZeroCount(matches);
  285. }
  286. }
  287. if ((int)(byte*)offset < length)
  288. {
  289. lengthToExamine = (IntPtr)(length - (int)(byte*)offset);
  290. goto SequentialScan;
  291. }
  292. }
  293. }
  294. else if (Sse2.IsSupported)
  295. {
  296. if ((int)(byte*)offset < length)
  297. {
  298. lengthToExamine = GetByteVector128SpanLength(offset, length);
  299. Vector128<byte> values = Vector128.Create(value);
  300. while ((byte*)lengthToExamine > (byte*)offset)
  301. {
  302. Vector128<byte> search = LoadVector128(ref searchSpace, offset);
  303. // Same method as above
  304. int matches = Sse2.MoveMask(Sse2.CompareEqual(values, search));
  305. if (matches == 0)
  306. {
  307. // Zero flags set so no matches
  308. offset += Vector128<byte>.Count;
  309. continue;
  310. }
  311. // Find bitflag offset of first match and add to current offset
  312. return ((int)(byte*)offset) + BitOperations.TrailingZeroCount(matches);
  313. }
  314. if ((int)(byte*)offset < length)
  315. {
  316. lengthToExamine = (IntPtr)(length - (int)(byte*)offset);
  317. goto SequentialScan;
  318. }
  319. }
  320. }
  321. else if (Vector.IsHardwareAccelerated)
  322. {
  323. if ((int)(byte*)offset < length)
  324. {
  325. lengthToExamine = GetByteVectorSpanLength(offset, length);
  326. Vector<byte> values = new Vector<byte>(value);
  327. while ((byte*)lengthToExamine > (byte*)offset)
  328. {
  329. var matches = Vector.Equals(values, LoadVector(ref searchSpace, offset));
  330. if (Vector<byte>.Zero.Equals(matches))
  331. {
  332. offset += Vector<byte>.Count;
  333. continue;
  334. }
  335. // Find offset of first match and add to current offset
  336. return (int)(byte*)offset + LocateFirstFoundByte(matches);
  337. }
  338. if ((int)(byte*)offset < length)
  339. {
  340. lengthToExamine = (IntPtr)(length - (int)(byte*)offset);
  341. goto SequentialScan;
  342. }
  343. }
  344. }
  345. return -1;
  346. Found: // Workaround for https://github.com/dotnet/coreclr/issues/13549
  347. return (int)(byte*)offset;
  348. Found1:
  349. return (int)(byte*)(offset + 1);
  350. Found2:
  351. return (int)(byte*)(offset + 2);
  352. Found3:
  353. return (int)(byte*)(offset + 3);
  354. Found4:
  355. return (int)(byte*)(offset + 4);
  356. Found5:
  357. return (int)(byte*)(offset + 5);
  358. Found6:
  359. return (int)(byte*)(offset + 6);
  360. Found7:
  361. return (int)(byte*)(offset + 7);
  362. }
  363. public static int LastIndexOf(ref byte searchSpace, int searchSpaceLength, ref byte value, int valueLength)
  364. {
  365. Debug.Assert(searchSpaceLength >= 0);
  366. Debug.Assert(valueLength >= 0);
  367. if (valueLength == 0)
  368. return 0; // A zero-length sequence is always treated as "found" at the start of the search space.
  369. byte valueHead = value;
  370. ref byte valueTail = ref Unsafe.Add(ref value, 1);
  371. int valueTailLength = valueLength - 1;
  372. int offset = 0;
  373. while (true)
  374. {
  375. Debug.Assert(0 <= offset && offset <= searchSpaceLength); // Ensures no deceptive underflows in the computation of "remainingSearchSpaceLength".
  376. int remainingSearchSpaceLength = searchSpaceLength - offset - valueTailLength;
  377. if (remainingSearchSpaceLength <= 0)
  378. break; // The unsearched portion is now shorter than the sequence we're looking for. So it can't be there.
  379. // Do a quick search for the first element of "value".
  380. int relativeIndex = LastIndexOf(ref searchSpace, valueHead, remainingSearchSpaceLength);
  381. if (relativeIndex == -1)
  382. break;
  383. // Found the first element of "value". See if the tail matches.
  384. if (SequenceEqual(ref Unsafe.Add(ref searchSpace, relativeIndex + 1), ref valueTail, valueTailLength))
  385. return relativeIndex; // The tail matched. Return a successful find.
  386. offset += remainingSearchSpaceLength - relativeIndex;
  387. }
  388. return -1;
  389. }
  390. [MethodImpl(MethodImplOptions.AggressiveOptimization)]
  391. public static unsafe int LastIndexOf(ref byte searchSpace, byte value, int length)
  392. {
  393. Debug.Assert(length >= 0);
  394. uint uValue = value; // Use uint for comparisons to avoid unnecessary 8->32 extensions
  395. IntPtr offset = (IntPtr)length; // Use IntPtr for arithmetic to avoid unnecessary 64->32->64 truncations
  396. IntPtr lengthToExamine = (IntPtr)length;
  397. if (Vector.IsHardwareAccelerated && length >= Vector<byte>.Count * 2)
  398. {
  399. lengthToExamine = UnalignedCountVectorFromEnd(ref searchSpace, length);
  400. }
  401. SequentialScan:
  402. while ((byte*)lengthToExamine >= (byte*)8)
  403. {
  404. lengthToExamine -= 8;
  405. offset -= 8;
  406. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 7))
  407. goto Found7;
  408. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 6))
  409. goto Found6;
  410. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 5))
  411. goto Found5;
  412. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 4))
  413. goto Found4;
  414. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 3))
  415. goto Found3;
  416. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 2))
  417. goto Found2;
  418. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 1))
  419. goto Found1;
  420. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset))
  421. goto Found;
  422. }
  423. if ((byte*)lengthToExamine >= (byte*)4)
  424. {
  425. lengthToExamine -= 4;
  426. offset -= 4;
  427. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 3))
  428. goto Found3;
  429. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 2))
  430. goto Found2;
  431. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset + 1))
  432. goto Found1;
  433. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset))
  434. goto Found;
  435. }
  436. while ((byte*)lengthToExamine > (byte*)0)
  437. {
  438. lengthToExamine -= 1;
  439. offset -= 1;
  440. if (uValue == Unsafe.AddByteOffset(ref searchSpace, offset))
  441. goto Found;
  442. }
  443. if (Vector.IsHardwareAccelerated && ((byte*)offset > (byte*)0))
  444. {
  445. lengthToExamine = (IntPtr)((int)(byte*)offset & ~(Vector<byte>.Count - 1));
  446. Vector<byte> values = new Vector<byte>(value);
  447. while ((byte*)lengthToExamine > (byte*)(Vector<byte>.Count - 1))
  448. {
  449. var matches = Vector.Equals(values, LoadVector(ref searchSpace, offset - Vector<byte>.Count));
  450. if (Vector<byte>.Zero.Equals(matches))
  451. {
  452. offset -= Vector<byte>.Count;
  453. lengthToExamine -= Vector<byte>.Count;
  454. continue;
  455. }
  456. // Find offset of first match and add to current offset
  457. return (int)(offset) - Vector<byte>.Count + LocateLastFoundByte(matches);
  458. }
  459. if ((byte*)offset > (byte*)0)
  460. {
  461. lengthToExamine = offset;
  462. goto SequentialScan;
  463. }
  464. }
  465. return -1;
  466. Found: // Workaround for https://github.com/dotnet/coreclr/issues/13549
  467. return (int)(byte*)offset;
  468. Found1:
  469. return (int)(byte*)(offset + 1);
  470. Found2:
  471. return (int)(byte*)(offset + 2);
  472. Found3:
  473. return (int)(byte*)(offset + 3);
  474. Found4:
  475. return (int)(byte*)(offset + 4);
  476. Found5:
  477. return (int)(byte*)(offset + 5);
  478. Found6:
  479. return (int)(byte*)(offset + 6);
  480. Found7:
  481. return (int)(byte*)(offset + 7);
  482. }
  483. [MethodImpl(MethodImplOptions.AggressiveOptimization)]
  484. public static unsafe int IndexOfAny(ref byte searchSpace, byte value0, byte value1, int length)
  485. {
  486. Debug.Assert(length >= 0);
  487. uint uValue0 = value0; // Use uint for comparisons to avoid unnecessary 8->32 extensions
  488. uint uValue1 = value1; // Use uint for comparisons to avoid unnecessary 8->32 extensions
  489. IntPtr offset = (IntPtr)0; // Use IntPtr for arithmetic to avoid unnecessary 64->32->64 truncations
  490. IntPtr lengthToExamine = (IntPtr)length;
  491. if (Avx2.IsSupported || Sse2.IsSupported)
  492. {
  493. // Avx2 branch also operates on Sse2 sizes, so check is combined.
  494. if (length >= Vector128<byte>.Count * 2)
  495. {
  496. lengthToExamine = UnalignedCountVector128(ref searchSpace);
  497. }
  498. }
  499. else if (Vector.IsHardwareAccelerated)
  500. {
  501. if (length >= Vector<byte>.Count * 2)
  502. {
  503. lengthToExamine = UnalignedCountVector(ref searchSpace);
  504. }
  505. }
  506. SequentialScan:
  507. uint lookUp;
  508. while ((byte*)lengthToExamine >= (byte*)8)
  509. {
  510. lengthToExamine -= 8;
  511. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset);
  512. if (uValue0 == lookUp || uValue1 == lookUp)
  513. goto Found;
  514. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 1);
  515. if (uValue0 == lookUp || uValue1 == lookUp)
  516. goto Found1;
  517. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 2);
  518. if (uValue0 == lookUp || uValue1 == lookUp)
  519. goto Found2;
  520. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 3);
  521. if (uValue0 == lookUp || uValue1 == lookUp)
  522. goto Found3;
  523. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 4);
  524. if (uValue0 == lookUp || uValue1 == lookUp)
  525. goto Found4;
  526. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 5);
  527. if (uValue0 == lookUp || uValue1 == lookUp)
  528. goto Found5;
  529. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 6);
  530. if (uValue0 == lookUp || uValue1 == lookUp)
  531. goto Found6;
  532. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 7);
  533. if (uValue0 == lookUp || uValue1 == lookUp)
  534. goto Found7;
  535. offset += 8;
  536. }
  537. if ((byte*)lengthToExamine >= (byte*)4)
  538. {
  539. lengthToExamine -= 4;
  540. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset);
  541. if (uValue0 == lookUp || uValue1 == lookUp)
  542. goto Found;
  543. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 1);
  544. if (uValue0 == lookUp || uValue1 == lookUp)
  545. goto Found1;
  546. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 2);
  547. if (uValue0 == lookUp || uValue1 == lookUp)
  548. goto Found2;
  549. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 3);
  550. if (uValue0 == lookUp || uValue1 == lookUp)
  551. goto Found3;
  552. offset += 4;
  553. }
  554. while ((byte*)lengthToExamine > (byte*)0)
  555. {
  556. lengthToExamine -= 1;
  557. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset);
  558. if (uValue0 == lookUp || uValue1 == lookUp)
  559. goto Found;
  560. offset += 1;
  561. }
  562. // We get past SequentialScan only if IsHardwareAccelerated or intrinsic .IsSupported is true. However, we still have the redundant check to allow
  563. // the JIT to see that the code is unreachable and eliminate it when the platform does not have hardware accelerated.
  564. if (Avx2.IsSupported)
  565. {
  566. if ((int)(byte*)offset < length)
  567. {
  568. lengthToExamine = GetByteVector256SpanLength(offset, length);
  569. if ((byte*)lengthToExamine > (byte*)offset)
  570. {
  571. Vector256<byte> values0 = Vector256.Create(value0);
  572. Vector256<byte> values1 = Vector256.Create(value1);
  573. do
  574. {
  575. Vector256<byte> search = LoadVector256(ref searchSpace, offset);
  576. // Bitwise Or to combine the matches and MoveMask to convert them to bitflags
  577. int matches = Avx2.MoveMask(
  578. Avx2.Or(
  579. Avx2.CompareEqual(values0, search),
  580. Avx2.CompareEqual(values1, search)));
  581. // Note that MoveMask has converted the equal vector elements into a set of bit flags,
  582. // So the bit position in 'matches' corresponds to the element offset.
  583. if (matches == 0)
  584. {
  585. // Zero flags set so no matches
  586. offset += Vector256<byte>.Count;
  587. continue;
  588. }
  589. // Find bitflag offset of first match and add to current offset
  590. return ((int)(byte*)offset) + BitOperations.TrailingZeroCount(matches);
  591. } while ((byte*)lengthToExamine > (byte*)offset);
  592. }
  593. lengthToExamine = GetByteVector128SpanLength(offset, length);
  594. if ((byte*)lengthToExamine > (byte*)offset)
  595. {
  596. Vector128<byte> values0 = Vector128.Create(value0);
  597. Vector128<byte> values1 = Vector128.Create(value1);
  598. Vector128<byte> search = LoadVector128(ref searchSpace, offset);
  599. // Same method as above
  600. int matches = Sse2.MoveMask(
  601. Sse2.Or(
  602. Sse2.CompareEqual(values0, search),
  603. Sse2.CompareEqual(values1, search)));
  604. if (matches == 0)
  605. {
  606. // Zero flags set so no matches
  607. offset += Vector128<byte>.Count;
  608. }
  609. else
  610. {
  611. // Find bitflag offset of first match and add to current offset
  612. return ((int)(byte*)offset) + BitOperations.TrailingZeroCount(matches);
  613. }
  614. }
  615. if ((int)(byte*)offset < length)
  616. {
  617. lengthToExamine = (IntPtr)(length - (int)(byte*)offset);
  618. goto SequentialScan;
  619. }
  620. }
  621. }
  622. else if (Sse2.IsSupported)
  623. {
  624. if ((int)(byte*)offset < length)
  625. {
  626. lengthToExamine = GetByteVector128SpanLength(offset, length);
  627. Vector128<byte> values0 = Vector128.Create(value0);
  628. Vector128<byte> values1 = Vector128.Create(value1);
  629. while ((byte*)lengthToExamine > (byte*)offset)
  630. {
  631. Vector128<byte> search = LoadVector128(ref searchSpace, offset);
  632. // Same method as above
  633. int matches = Sse2.MoveMask(
  634. Sse2.Or(
  635. Sse2.CompareEqual(values0, search),
  636. Sse2.CompareEqual(values1, search)));
  637. if (matches == 0)
  638. {
  639. // Zero flags set so no matches
  640. offset += Vector128<byte>.Count;
  641. continue;
  642. }
  643. // Find bitflag offset of first match and add to current offset
  644. return ((int)(byte*)offset) + BitOperations.TrailingZeroCount(matches);
  645. }
  646. if ((int)(byte*)offset < length)
  647. {
  648. lengthToExamine = (IntPtr)(length - (int)(byte*)offset);
  649. goto SequentialScan;
  650. }
  651. }
  652. }
  653. else if (Vector.IsHardwareAccelerated)
  654. {
  655. if ((int)(byte*)offset < length)
  656. {
  657. lengthToExamine = GetByteVectorSpanLength(offset, length);
  658. Vector<byte> values0 = new Vector<byte>(value0);
  659. Vector<byte> values1 = new Vector<byte>(value1);
  660. while ((byte*)lengthToExamine > (byte*)offset)
  661. {
  662. Vector<byte> search = LoadVector(ref searchSpace, offset);
  663. var matches = Vector.BitwiseOr(
  664. Vector.Equals(search, values0),
  665. Vector.Equals(search, values1));
  666. if (Vector<byte>.Zero.Equals(matches))
  667. {
  668. offset += Vector<byte>.Count;
  669. continue;
  670. }
  671. // Find offset of first match and add to current offset
  672. return (int)(byte*)offset + LocateFirstFoundByte(matches);
  673. }
  674. if ((int)(byte*)offset < length)
  675. {
  676. lengthToExamine = (IntPtr)(length - (int)(byte*)offset);
  677. goto SequentialScan;
  678. }
  679. }
  680. }
  681. return -1;
  682. Found: // Workaround for https://github.com/dotnet/coreclr/issues/13549
  683. return (int)(byte*)offset;
  684. Found1:
  685. return (int)(byte*)(offset + 1);
  686. Found2:
  687. return (int)(byte*)(offset + 2);
  688. Found3:
  689. return (int)(byte*)(offset + 3);
  690. Found4:
  691. return (int)(byte*)(offset + 4);
  692. Found5:
  693. return (int)(byte*)(offset + 5);
  694. Found6:
  695. return (int)(byte*)(offset + 6);
  696. Found7:
  697. return (int)(byte*)(offset + 7);
  698. }
  699. [MethodImpl(MethodImplOptions.AggressiveOptimization)]
  700. public static unsafe int IndexOfAny(ref byte searchSpace, byte value0, byte value1, byte value2, int length)
  701. {
  702. Debug.Assert(length >= 0);
  703. uint uValue0 = value0; // Use uint for comparisons to avoid unnecessary 8->32 extensions
  704. uint uValue1 = value1;
  705. uint uValue2 = value2;
  706. IntPtr offset = (IntPtr)0; // Use IntPtr for arithmetic to avoid unnecessary 64->32->64 truncations
  707. IntPtr lengthToExamine = (IntPtr)length;
  708. if (Avx2.IsSupported || Sse2.IsSupported)
  709. {
  710. // Avx2 branch also operates on Sse2 sizes, so check is combined.
  711. if (length >= Vector128<byte>.Count * 2)
  712. {
  713. lengthToExamine = UnalignedCountVector128(ref searchSpace);
  714. }
  715. }
  716. else if (Vector.IsHardwareAccelerated)
  717. {
  718. if (length >= Vector<byte>.Count * 2)
  719. {
  720. lengthToExamine = UnalignedCountVector(ref searchSpace);
  721. }
  722. }
  723. SequentialScan:
  724. uint lookUp;
  725. while ((byte*)lengthToExamine >= (byte*)8)
  726. {
  727. lengthToExamine -= 8;
  728. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset);
  729. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  730. goto Found;
  731. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 1);
  732. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  733. goto Found1;
  734. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 2);
  735. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  736. goto Found2;
  737. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 3);
  738. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  739. goto Found3;
  740. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 4);
  741. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  742. goto Found4;
  743. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 5);
  744. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  745. goto Found5;
  746. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 6);
  747. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  748. goto Found6;
  749. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 7);
  750. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  751. goto Found7;
  752. offset += 8;
  753. }
  754. if ((byte*)lengthToExamine >= (byte*)4)
  755. {
  756. lengthToExamine -= 4;
  757. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset);
  758. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  759. goto Found;
  760. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 1);
  761. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  762. goto Found1;
  763. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 2);
  764. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  765. goto Found2;
  766. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 3);
  767. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  768. goto Found3;
  769. offset += 4;
  770. }
  771. while ((byte*)lengthToExamine > (byte*)0)
  772. {
  773. lengthToExamine -= 1;
  774. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset);
  775. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  776. goto Found;
  777. offset += 1;
  778. }
  779. if (Avx2.IsSupported)
  780. {
  781. if ((int)(byte*)offset < length)
  782. {
  783. lengthToExamine = GetByteVector256SpanLength(offset, length);
  784. if ((byte*)lengthToExamine > (byte*)offset)
  785. {
  786. Vector256<byte> values0 = Vector256.Create(value0);
  787. Vector256<byte> values1 = Vector256.Create(value1);
  788. Vector256<byte> values2 = Vector256.Create(value2);
  789. do
  790. {
  791. Vector256<byte> search = LoadVector256(ref searchSpace, offset);
  792. Vector256<byte> matches0 = Avx2.CompareEqual(values0, search);
  793. Vector256<byte> matches1 = Avx2.CompareEqual(values1, search);
  794. Vector256<byte> matches2 = Avx2.CompareEqual(values2, search);
  795. // Bitwise Or to combine the matches and MoveMask to convert them to bitflags
  796. int matches = Avx2.MoveMask(Avx2.Or(Avx2.Or(matches0, matches1), matches2));
  797. // Note that MoveMask has converted the equal vector elements into a set of bit flags,
  798. // So the bit position in 'matches' corresponds to the element offset.
  799. if (matches == 0)
  800. {
  801. // Zero flags set so no matches
  802. offset += Vector256<byte>.Count;
  803. continue;
  804. }
  805. // Find bitflag offset of first match and add to current offset
  806. return ((int)(byte*)offset) + BitOperations.TrailingZeroCount(matches);
  807. } while ((byte*)lengthToExamine > (byte*)offset);
  808. }
  809. lengthToExamine = GetByteVector128SpanLength(offset, length);
  810. if ((byte*)lengthToExamine > (byte*)offset)
  811. {
  812. Vector128<byte> values0 = Vector128.Create(value0);
  813. Vector128<byte> values1 = Vector128.Create(value1);
  814. Vector128<byte> values2 = Vector128.Create(value2);
  815. Vector128<byte> search = LoadVector128(ref searchSpace, offset);
  816. Vector128<byte> matches0 = Sse2.CompareEqual(values0, search);
  817. Vector128<byte> matches1 = Sse2.CompareEqual(values1, search);
  818. Vector128<byte> matches2 = Sse2.CompareEqual(values2, search);
  819. // Same method as above
  820. int matches = Sse2.MoveMask(Sse2.Or(Sse2.Or(matches0, matches1), matches2));
  821. if (matches == 0)
  822. {
  823. // Zero flags set so no matches
  824. offset += Vector128<byte>.Count;
  825. }
  826. else
  827. {
  828. // Find bitflag offset of first match and add to current offset
  829. return ((int)(byte*)offset) + BitOperations.TrailingZeroCount(matches);
  830. }
  831. }
  832. if ((int)(byte*)offset < length)
  833. {
  834. lengthToExamine = (IntPtr)(length - (int)(byte*)offset);
  835. goto SequentialScan;
  836. }
  837. }
  838. }
  839. else if (Sse2.IsSupported)
  840. {
  841. if ((int)(byte*)offset < length)
  842. {
  843. lengthToExamine = GetByteVector128SpanLength(offset, length);
  844. Vector128<byte> values0 = Vector128.Create(value0);
  845. Vector128<byte> values1 = Vector128.Create(value1);
  846. Vector128<byte> values2 = Vector128.Create(value2);
  847. while ((byte*)lengthToExamine > (byte*)offset)
  848. {
  849. Vector128<byte> search = LoadVector128(ref searchSpace, offset);
  850. Vector128<byte> matches0 = Sse2.CompareEqual(values0, search);
  851. Vector128<byte> matches1 = Sse2.CompareEqual(values1, search);
  852. Vector128<byte> matches2 = Sse2.CompareEqual(values2, search);
  853. // Same method as above
  854. int matches = Sse2.MoveMask(Sse2.Or(Sse2.Or(matches0, matches1), matches2));
  855. if (matches == 0)
  856. {
  857. // Zero flags set so no matches
  858. offset += Vector128<byte>.Count;
  859. continue;
  860. }
  861. // Find bitflag offset of first match and add to current offset
  862. return ((int)(byte*)offset) + BitOperations.TrailingZeroCount(matches);
  863. }
  864. if ((int)(byte*)offset < length)
  865. {
  866. lengthToExamine = (IntPtr)(length - (int)(byte*)offset);
  867. goto SequentialScan;
  868. }
  869. }
  870. }
  871. else if (Vector.IsHardwareAccelerated)
  872. {
  873. if ((int)(byte*)offset < length)
  874. {
  875. lengthToExamine = GetByteVectorSpanLength(offset, length);
  876. Vector<byte> values0 = new Vector<byte>(value0);
  877. Vector<byte> values1 = new Vector<byte>(value1);
  878. Vector<byte> values2 = new Vector<byte>(value2);
  879. while ((byte*)lengthToExamine > (byte*)offset)
  880. {
  881. Vector<byte> search = LoadVector(ref searchSpace, offset);
  882. var matches = Vector.BitwiseOr(
  883. Vector.BitwiseOr(
  884. Vector.Equals(search, values0),
  885. Vector.Equals(search, values1)),
  886. Vector.Equals(search, values2));
  887. if (Vector<byte>.Zero.Equals(matches))
  888. {
  889. offset += Vector<byte>.Count;
  890. continue;
  891. }
  892. // Find offset of first match and add to current offset
  893. return (int)(byte*)offset + LocateFirstFoundByte(matches);
  894. }
  895. if ((int)(byte*)offset < length)
  896. {
  897. lengthToExamine = (IntPtr)(length - (int)(byte*)offset);
  898. goto SequentialScan;
  899. }
  900. }
  901. }
  902. return -1;
  903. Found: // Workaround for https://github.com/dotnet/coreclr/issues/13549
  904. return (int)(byte*)offset;
  905. Found1:
  906. return (int)(byte*)(offset + 1);
  907. Found2:
  908. return (int)(byte*)(offset + 2);
  909. Found3:
  910. return (int)(byte*)(offset + 3);
  911. Found4:
  912. return (int)(byte*)(offset + 4);
  913. Found5:
  914. return (int)(byte*)(offset + 5);
  915. Found6:
  916. return (int)(byte*)(offset + 6);
  917. Found7:
  918. return (int)(byte*)(offset + 7);
  919. }
  920. public static unsafe int LastIndexOfAny(ref byte searchSpace, byte value0, byte value1, int length)
  921. {
  922. Debug.Assert(length >= 0);
  923. uint uValue0 = value0; // Use uint for comparisons to avoid unnecessary 8->32 extensions
  924. uint uValue1 = value1;
  925. IntPtr offset = (IntPtr)length; // Use IntPtr for arithmetic to avoid unnecessary 64->32->64 truncations
  926. IntPtr lengthToExamine = (IntPtr)length;
  927. if (Vector.IsHardwareAccelerated && length >= Vector<byte>.Count * 2)
  928. {
  929. lengthToExamine = UnalignedCountVectorFromEnd(ref searchSpace, length);
  930. }
  931. SequentialScan:
  932. uint lookUp;
  933. while ((byte*)lengthToExamine >= (byte*)8)
  934. {
  935. lengthToExamine -= 8;
  936. offset -= 8;
  937. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 7);
  938. if (uValue0 == lookUp || uValue1 == lookUp)
  939. goto Found7;
  940. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 6);
  941. if (uValue0 == lookUp || uValue1 == lookUp)
  942. goto Found6;
  943. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 5);
  944. if (uValue0 == lookUp || uValue1 == lookUp)
  945. goto Found5;
  946. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 4);
  947. if (uValue0 == lookUp || uValue1 == lookUp)
  948. goto Found4;
  949. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 3);
  950. if (uValue0 == lookUp || uValue1 == lookUp)
  951. goto Found3;
  952. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 2);
  953. if (uValue0 == lookUp || uValue1 == lookUp)
  954. goto Found2;
  955. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 1);
  956. if (uValue0 == lookUp || uValue1 == lookUp)
  957. goto Found1;
  958. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset);
  959. if (uValue0 == lookUp || uValue1 == lookUp)
  960. goto Found;
  961. }
  962. if ((byte*)lengthToExamine >= (byte*)4)
  963. {
  964. lengthToExamine -= 4;
  965. offset -= 4;
  966. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 3);
  967. if (uValue0 == lookUp || uValue1 == lookUp)
  968. goto Found3;
  969. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 2);
  970. if (uValue0 == lookUp || uValue1 == lookUp)
  971. goto Found2;
  972. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 1);
  973. if (uValue0 == lookUp || uValue1 == lookUp)
  974. goto Found1;
  975. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset);
  976. if (uValue0 == lookUp || uValue1 == lookUp)
  977. goto Found;
  978. }
  979. while ((byte*)lengthToExamine > (byte*)0)
  980. {
  981. lengthToExamine -= 1;
  982. offset -= 1;
  983. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset);
  984. if (uValue0 == lookUp || uValue1 == lookUp)
  985. goto Found;
  986. }
  987. if (Vector.IsHardwareAccelerated && ((byte*)offset > (byte*)0))
  988. {
  989. lengthToExamine = (IntPtr)((int)(byte*)offset & ~(Vector<byte>.Count - 1));
  990. Vector<byte> values0 = new Vector<byte>(value0);
  991. Vector<byte> values1 = new Vector<byte>(value1);
  992. while ((byte*)lengthToExamine > (byte*)(Vector<byte>.Count - 1))
  993. {
  994. Vector<byte> search = LoadVector(ref searchSpace, offset - Vector<byte>.Count);
  995. var matches = Vector.BitwiseOr(
  996. Vector.Equals(search, values0),
  997. Vector.Equals(search, values1));
  998. if (Vector<byte>.Zero.Equals(matches))
  999. {
  1000. offset -= Vector<byte>.Count;
  1001. lengthToExamine -= Vector<byte>.Count;
  1002. continue;
  1003. }
  1004. // Find offset of first match and add to current offset
  1005. return (int)(offset) - Vector<byte>.Count + LocateLastFoundByte(matches);
  1006. }
  1007. if ((byte*)offset > (byte*)0)
  1008. {
  1009. lengthToExamine = offset;
  1010. goto SequentialScan;
  1011. }
  1012. }
  1013. return -1;
  1014. Found: // Workaround for https://github.com/dotnet/coreclr/issues/13549
  1015. return (int)(byte*)offset;
  1016. Found1:
  1017. return (int)(byte*)(offset + 1);
  1018. Found2:
  1019. return (int)(byte*)(offset + 2);
  1020. Found3:
  1021. return (int)(byte*)(offset + 3);
  1022. Found4:
  1023. return (int)(byte*)(offset + 4);
  1024. Found5:
  1025. return (int)(byte*)(offset + 5);
  1026. Found6:
  1027. return (int)(byte*)(offset + 6);
  1028. Found7:
  1029. return (int)(byte*)(offset + 7);
  1030. }
  1031. public static unsafe int LastIndexOfAny(ref byte searchSpace, byte value0, byte value1, byte value2, int length)
  1032. {
  1033. Debug.Assert(length >= 0);
  1034. uint uValue0 = value0; // Use uint for comparisons to avoid unnecessary 8->32 extensions
  1035. uint uValue1 = value1;
  1036. uint uValue2 = value2;
  1037. IntPtr offset = (IntPtr)length; // Use IntPtr for arithmetic to avoid unnecessary 64->32->64 truncations
  1038. IntPtr lengthToExamine = (IntPtr)length;
  1039. if (Vector.IsHardwareAccelerated && length >= Vector<byte>.Count * 2)
  1040. {
  1041. lengthToExamine = UnalignedCountVectorFromEnd(ref searchSpace, length);
  1042. }
  1043. SequentialScan:
  1044. uint lookUp;
  1045. while ((byte*)lengthToExamine >= (byte*)8)
  1046. {
  1047. lengthToExamine -= 8;
  1048. offset -= 8;
  1049. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 7);
  1050. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1051. goto Found7;
  1052. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 6);
  1053. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1054. goto Found6;
  1055. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 5);
  1056. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1057. goto Found5;
  1058. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 4);
  1059. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1060. goto Found4;
  1061. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 3);
  1062. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1063. goto Found3;
  1064. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 2);
  1065. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1066. goto Found2;
  1067. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 1);
  1068. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1069. goto Found1;
  1070. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset);
  1071. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1072. goto Found;
  1073. }
  1074. if ((byte*)lengthToExamine >= (byte*)4)
  1075. {
  1076. lengthToExamine -= 4;
  1077. offset -= 4;
  1078. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 3);
  1079. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1080. goto Found3;
  1081. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 2);
  1082. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1083. goto Found2;
  1084. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset + 1);
  1085. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1086. goto Found1;
  1087. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset);
  1088. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1089. goto Found;
  1090. }
  1091. while ((byte*)lengthToExamine > (byte*)0)
  1092. {
  1093. lengthToExamine -= 1;
  1094. offset -= 1;
  1095. lookUp = Unsafe.AddByteOffset(ref searchSpace, offset);
  1096. if (uValue0 == lookUp || uValue1 == lookUp || uValue2 == lookUp)
  1097. goto Found;
  1098. }
  1099. if (Vector.IsHardwareAccelerated && ((byte*)offset > (byte*)0))
  1100. {
  1101. lengthToExamine = (IntPtr)((int)(byte*)offset & ~(Vector<byte>.Count - 1));
  1102. Vector<byte> values0 = new Vector<byte>(value0);
  1103. Vector<byte> values1 = new Vector<byte>(value1);
  1104. Vector<byte> values2 = new Vector<byte>(value2);
  1105. while ((byte*)lengthToExamine > (byte*)(Vector<byte>.Count - 1))
  1106. {
  1107. Vector<byte> search = LoadVector(ref searchSpace, offset - Vector<byte>.Count);
  1108. var matches = Vector.BitwiseOr(
  1109. Vector.BitwiseOr(
  1110. Vector.Equals(search, values0),
  1111. Vector.Equals(search, values1)),
  1112. Vector.Equals(search, values2));
  1113. if (Vector<byte>.Zero.Equals(matches))
  1114. {
  1115. offset -= Vector<byte>.Count;
  1116. lengthToExamine -= Vector<byte>.Count;
  1117. continue;
  1118. }
  1119. // Find offset of first match and add to current offset
  1120. return (int)(offset) - Vector<byte>.Count + LocateLastFoundByte(matches);
  1121. }
  1122. if ((byte*)offset > (byte*)0)
  1123. {
  1124. lengthToExamine = offset;
  1125. goto SequentialScan;
  1126. }
  1127. }
  1128. return -1;
  1129. Found: // Workaround for https://github.com/dotnet/coreclr/issues/13549
  1130. return (int)(byte*)offset;
  1131. Found1:
  1132. return (int)(byte*)(offset + 1);
  1133. Found2:
  1134. return (int)(byte*)(offset + 2);
  1135. Found3:
  1136. return (int)(byte*)(offset + 3);
  1137. Found4:
  1138. return (int)(byte*)(offset + 4);
  1139. Found5:
  1140. return (int)(byte*)(offset + 5);
  1141. Found6:
  1142. return (int)(byte*)(offset + 6);
  1143. Found7:
  1144. return (int)(byte*)(offset + 7);
  1145. }
  1146. // Optimized byte-based SequenceEquals. The "length" parameter for this one is declared a nuint rather than int as we also use it for types other than byte
  1147. // where the length can exceed 2Gb once scaled by sizeof(T).
  1148. [MethodImpl(MethodImplOptions.AggressiveOptimization)]
  1149. public static unsafe bool SequenceEqual(ref byte first, ref byte second, nuint length)
  1150. {
  1151. if (Unsafe.AreSame(ref first, ref second))
  1152. goto Equal;
  1153. IntPtr offset = (IntPtr)0; // Use IntPtr for arithmetic to avoid unnecessary 64->32->64 truncations
  1154. IntPtr lengthToExamine = (IntPtr)(void*)length;
  1155. if (Vector.IsHardwareAccelerated && (byte*)lengthToExamine >= (byte*)Vector<byte>.Count)
  1156. {
  1157. lengthToExamine -= Vector<byte>.Count;
  1158. while ((byte*)lengthToExamine > (byte*)offset)
  1159. {
  1160. if (LoadVector(ref first, offset) != LoadVector(ref second, offset))
  1161. {
  1162. goto NotEqual;
  1163. }
  1164. offset += Vector<byte>.Count;
  1165. }
  1166. return LoadVector(ref first, lengthToExamine) == LoadVector(ref second, lengthToExamine);
  1167. }
  1168. if ((byte*)lengthToExamine >= (byte*)sizeof(UIntPtr))
  1169. {
  1170. lengthToExamine -= sizeof(UIntPtr);
  1171. while ((byte*)lengthToExamine > (byte*)offset)
  1172. {
  1173. if (LoadUIntPtr(ref first, offset) != LoadUIntPtr(ref second, offset))
  1174. {
  1175. goto NotEqual;
  1176. }
  1177. offset += sizeof(UIntPtr);
  1178. }
  1179. return LoadUIntPtr(ref first, lengthToExamine) == LoadUIntPtr(ref second, lengthToExamine);
  1180. }
  1181. while ((byte*)lengthToExamine > (byte*)offset)
  1182. {
  1183. if (Unsafe.AddByteOffset(ref first, offset) != Unsafe.AddByteOffset(ref second, offset))
  1184. goto NotEqual;
  1185. offset += 1;
  1186. }
  1187. Equal:
  1188. return true;
  1189. NotEqual: // Workaround for https://github.com/dotnet/coreclr/issues/13549
  1190. return false;
  1191. }
  1192. // Vector sub-search adapted from https://github.com/aspnet/KestrelHttpServer/pull/1138
  1193. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1194. private static int LocateFirstFoundByte(Vector<byte> match)
  1195. {
  1196. var vector64 = Vector.AsVectorUInt64(match);
  1197. ulong candidate = 0;
  1198. int i = 0;
  1199. // Pattern unrolled by jit https://github.com/dotnet/coreclr/pull/8001
  1200. for (; i < Vector<ulong>.Count; i++)
  1201. {
  1202. candidate = vector64[i];
  1203. if (candidate != 0)
  1204. {
  1205. break;
  1206. }
  1207. }
  1208. // Single LEA instruction with jitted const (using function result)
  1209. return i * 8 + LocateFirstFoundByte(candidate);
  1210. }
  1211. [MethodImpl(MethodImplOptions.AggressiveOptimization)]
  1212. public static unsafe int SequenceCompareTo(ref byte first, int firstLength, ref byte second, int secondLength)
  1213. {
  1214. Debug.Assert(firstLength >= 0);
  1215. Debug.Assert(secondLength >= 0);
  1216. if (Unsafe.AreSame(ref first, ref second))
  1217. goto Equal;
  1218. IntPtr minLength = (IntPtr)((firstLength < secondLength) ? firstLength : secondLength);
  1219. IntPtr offset = (IntPtr)0; // Use IntPtr for arithmetic to avoid unnecessary 64->32->64 truncations
  1220. IntPtr lengthToExamine = (IntPtr)(void*)minLength;
  1221. if (Avx2.IsSupported)
  1222. {
  1223. if ((byte*)lengthToExamine >= (byte*)Vector256<byte>.Count)
  1224. {
  1225. lengthToExamine -= Vector256<byte>.Count;
  1226. uint matches;
  1227. while ((byte*)lengthToExamine > (byte*)offset)
  1228. {
  1229. matches = (uint)Avx2.MoveMask(Avx2.CompareEqual(LoadVector256(ref first, offset), LoadVector256(ref second, offset)));
  1230. // Note that MoveMask has converted the equal vector elements into a set of bit flags,
  1231. // So the bit position in 'matches' corresponds to the element offset.
  1232. // 32 elements in Vector256<byte> so we compare to uint.MaxValue to check if everything matched
  1233. if (matches == uint.MaxValue)
  1234. {
  1235. // All matched
  1236. offset += Vector256<byte>.Count;
  1237. continue;
  1238. }
  1239. goto Difference;
  1240. }
  1241. // Move to Vector length from end for final compare
  1242. offset = lengthToExamine;
  1243. // Same as method as above
  1244. matches = (uint)Avx2.MoveMask(Avx2.CompareEqual(LoadVector256(ref first, offset), LoadVector256(ref second, offset)));
  1245. if (matches == uint.MaxValue)
  1246. {
  1247. // All matched
  1248. goto Equal;
  1249. }
  1250. Difference:
  1251. // Invert matches to find differences
  1252. uint differences = ~matches;
  1253. // Find bitflag offset of first difference and add to current offset
  1254. offset = (IntPtr)((int)(byte*)offset + BitOperations.TrailingZeroCount((int)differences));
  1255. int result = Unsafe.AddByteOffset(ref first, offset).CompareTo(Unsafe.AddByteOffset(ref second, offset));
  1256. Debug.Assert(result != 0);
  1257. return result;
  1258. }
  1259. if ((byte*)lengthToExamine >= (byte*)Vector128<byte>.Count)
  1260. {
  1261. lengthToExamine -= Vector128<byte>.Count;
  1262. uint matches;
  1263. if ((byte*)lengthToExamine > (byte*)offset)
  1264. {
  1265. matches = (uint)Sse2.MoveMask(Sse2.CompareEqual(LoadVector128(ref first, offset), LoadVector128(ref second, offset)));
  1266. // Note that MoveMask has converted the equal vector elements into a set of bit flags,
  1267. // So the bit position in 'matches' corresponds to the element offset.
  1268. // 16 elements in Vector128<byte> so we compare to ushort.MaxValue to check if everything matched
  1269. if (matches != ushort.MaxValue)
  1270. {
  1271. goto Difference;
  1272. }
  1273. }
  1274. // Move to Vector length from end for final compare
  1275. offset = lengthToExamine;
  1276. // Same as method as above
  1277. matches = (uint)Sse2.MoveMask(Sse2.CompareEqual(LoadVector128(ref first, offset), LoadVector128(ref second, offset)));
  1278. if (matches == ushort.MaxValue)
  1279. {
  1280. // All matched
  1281. goto Equal;
  1282. }
  1283. Difference:
  1284. // Invert matches to find differences
  1285. uint differences = ~matches;
  1286. // Find bitflag offset of first difference and add to current offset
  1287. offset = (IntPtr)((int)(byte*)offset + BitOperations.TrailingZeroCount((int)differences));
  1288. int result = Unsafe.AddByteOffset(ref first, offset).CompareTo(Unsafe.AddByteOffset(ref second, offset));
  1289. Debug.Assert(result != 0);
  1290. return result;
  1291. }
  1292. }
  1293. else if (Sse2.IsSupported)
  1294. {
  1295. if ((byte*)lengthToExamine >= (byte*)Vector128<byte>.Count)
  1296. {
  1297. lengthToExamine -= Vector128<byte>.Count;
  1298. uint matches;
  1299. while ((byte*)lengthToExamine > (byte*)offset)
  1300. {
  1301. matches = (uint)Sse2.MoveMask(Sse2.CompareEqual(LoadVector128(ref first, offset), LoadVector128(ref second, offset)));
  1302. // Note that MoveMask has converted the equal vector elements into a set of bit flags,
  1303. // So the bit position in 'matches' corresponds to the element offset.
  1304. // 16 elements in Vector128<byte> so we compare to ushort.MaxValue to check if everything matched
  1305. if (matches == ushort.MaxValue)
  1306. {
  1307. // All matched
  1308. offset += Vector128<byte>.Count;
  1309. continue;
  1310. }
  1311. goto Difference;
  1312. }
  1313. // Move to Vector length from end for final compare
  1314. offset = lengthToExamine;
  1315. // Same as method as above
  1316. matches = (uint)Sse2.MoveMask(Sse2.CompareEqual(LoadVector128(ref first, offset), LoadVector128(ref second, offset)));
  1317. if (matches == ushort.MaxValue)
  1318. {
  1319. // All matched
  1320. goto Equal;
  1321. }
  1322. Difference:
  1323. // Invert matches to find differences
  1324. uint differences = ~matches;
  1325. // Find bitflag offset of first difference and add to current offset
  1326. offset = (IntPtr)((int)(byte*)offset + BitOperations.TrailingZeroCount((int)differences));
  1327. int result = Unsafe.AddByteOffset(ref first, offset).CompareTo(Unsafe.AddByteOffset(ref second, offset));
  1328. Debug.Assert(result != 0);
  1329. return result;
  1330. }
  1331. }
  1332. else if (Vector.IsHardwareAccelerated)
  1333. {
  1334. if ((byte*)lengthToExamine > (byte*)Vector<byte>.Count)
  1335. {
  1336. lengthToExamine -= Vector<byte>.Count;
  1337. while ((byte*)lengthToExamine > (byte*)offset)
  1338. {
  1339. if (LoadVector(ref first, offset) != LoadVector(ref second, offset))
  1340. {
  1341. goto BytewiseCheck;
  1342. }
  1343. offset += Vector<byte>.Count;
  1344. }
  1345. goto BytewiseCheck;
  1346. }
  1347. }
  1348. if ((byte*)lengthToExamine > (byte*)sizeof(UIntPtr))
  1349. {
  1350. lengthToExamine -= sizeof(UIntPtr);
  1351. while ((byte*)lengthToExamine > (byte*)offset)
  1352. {
  1353. if (LoadUIntPtr(ref first, offset) != LoadUIntPtr(ref second, offset))
  1354. {
  1355. goto BytewiseCheck;
  1356. }
  1357. offset += sizeof(UIntPtr);
  1358. }
  1359. }
  1360. BytewiseCheck: // Workaround for https://github.com/dotnet/coreclr/issues/13549
  1361. while ((byte*)minLength > (byte*)offset)
  1362. {
  1363. int result = Unsafe.AddByteOffset(ref first, offset).CompareTo(Unsafe.AddByteOffset(ref second, offset));
  1364. if (result != 0)
  1365. return result;
  1366. offset += 1;
  1367. }
  1368. Equal:
  1369. return firstLength - secondLength;
  1370. }
  1371. // Vector sub-search adapted from https://github.com/aspnet/KestrelHttpServer/pull/1138
  1372. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1373. private static int LocateLastFoundByte(Vector<byte> match)
  1374. {
  1375. var vector64 = Vector.AsVectorUInt64(match);
  1376. ulong candidate = 0;
  1377. int i = Vector<ulong>.Count - 1;
  1378. // Pattern unrolled by jit https://github.com/dotnet/coreclr/pull/8001
  1379. for (; i >= 0; i--)
  1380. {
  1381. candidate = vector64[i];
  1382. if (candidate != 0)
  1383. {
  1384. break;
  1385. }
  1386. }
  1387. // Single LEA instruction with jitted const (using function result)
  1388. return i * 8 + LocateLastFoundByte(candidate);
  1389. }
  1390. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1391. private static int LocateFirstFoundByte(ulong match)
  1392. {
  1393. if (Bmi1.X64.IsSupported)
  1394. {
  1395. return (int)(Bmi1.X64.TrailingZeroCount(match) >> 3);
  1396. }
  1397. else
  1398. {
  1399. // Flag least significant power of two bit
  1400. ulong powerOfTwoFlag = match ^ (match - 1);
  1401. // Shift all powers of two into the high byte and extract
  1402. return (int)((powerOfTwoFlag * XorPowerOfTwoToHighByte) >> 57);
  1403. }
  1404. }
  1405. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1406. private static int LocateLastFoundByte(ulong match)
  1407. {
  1408. return 7 - (BitOperations.LeadingZeroCount(match) >> 3);
  1409. }
  1410. private const ulong XorPowerOfTwoToHighByte = (0x07ul |
  1411. 0x06ul << 8 |
  1412. 0x05ul << 16 |
  1413. 0x04ul << 24 |
  1414. 0x03ul << 32 |
  1415. 0x02ul << 40 |
  1416. 0x01ul << 48) + 1;
  1417. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1418. private static unsafe UIntPtr LoadUIntPtr(ref byte start, IntPtr offset)
  1419. => Unsafe.ReadUnaligned<UIntPtr>(ref Unsafe.AddByteOffset(ref start, offset));
  1420. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1421. private static unsafe Vector<byte> LoadVector(ref byte start, IntPtr offset)
  1422. => Unsafe.ReadUnaligned<Vector<byte>>(ref Unsafe.AddByteOffset(ref start, offset));
  1423. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1424. private static unsafe Vector128<byte> LoadVector128(ref byte start, IntPtr offset)
  1425. => Unsafe.ReadUnaligned<Vector128<byte>>(ref Unsafe.AddByteOffset(ref start, offset));
  1426. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1427. private static unsafe Vector256<byte> LoadVector256(ref byte start, IntPtr offset)
  1428. => Unsafe.ReadUnaligned<Vector256<byte>>(ref Unsafe.AddByteOffset(ref start, offset));
  1429. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1430. private static unsafe IntPtr GetByteVectorSpanLength(IntPtr offset, int length)
  1431. => (IntPtr)((length - (int)(byte*)offset) & ~(Vector<byte>.Count - 1));
  1432. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1433. private static unsafe IntPtr GetByteVector128SpanLength(IntPtr offset, int length)
  1434. => (IntPtr)((length - (int)(byte*)offset) & ~(Vector128<byte>.Count - 1));
  1435. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1436. private static unsafe IntPtr GetByteVector256SpanLength(IntPtr offset, int length)
  1437. => (IntPtr)((length - (int)(byte*)offset) & ~(Vector256<byte>.Count - 1));
  1438. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1439. private static unsafe IntPtr UnalignedCountVector(ref byte searchSpace)
  1440. {
  1441. int unaligned = (int)Unsafe.AsPointer(ref searchSpace) & (Vector<byte>.Count - 1);
  1442. return (IntPtr)((Vector<byte>.Count - unaligned) & (Vector<byte>.Count - 1));
  1443. }
  1444. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1445. private static unsafe IntPtr UnalignedCountVector128(ref byte searchSpace)
  1446. {
  1447. int unaligned = (int)Unsafe.AsPointer(ref searchSpace) & (Vector128<byte>.Count - 1);
  1448. return (IntPtr)((Vector128<byte>.Count - unaligned) & (Vector128<byte>.Count - 1));
  1449. }
  1450. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  1451. private static unsafe IntPtr UnalignedCountVectorFromEnd(ref byte searchSpace, int length)
  1452. {
  1453. int unaligned = (int)Unsafe.AsPointer(ref searchSpace) & (Vector<byte>.Count - 1);
  1454. return (IntPtr)(((length & (Vector<byte>.Count - 1)) + unaligned) & (Vector<byte>.Count - 1));
  1455. }
  1456. }
  1457. }