Buffer.cs 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552
  1. // Licensed to the .NET Foundation under one or more agreements.
  2. // The .NET Foundation licenses this file to you under the MIT license.
  3. // See the LICENSE file in the project root for more information.
  4. #if AMD64 || ARM64 || (BIT32 && !ARM)
  5. #define HAS_CUSTOM_BLOCKS
  6. #endif
  7. using System.Diagnostics;
  8. using System.Runtime;
  9. using System.Runtime.CompilerServices;
  10. using System.Runtime.InteropServices;
  11. using Internal.Runtime.CompilerServices;
  12. #if BIT64
  13. using nint = System.Int64;
  14. using nuint = System.UInt64;
  15. #else
  16. using nint = System.Int32;
  17. using nuint = System.UInt32;
  18. #endif
  19. namespace System
  20. {
  21. public static partial class Buffer
  22. {
  23. public static int ByteLength(Array array)
  24. {
  25. // Is the array present?
  26. if (array == null)
  27. throw new ArgumentNullException(nameof(array));
  28. // Is it of primitive types?
  29. if (!IsPrimitiveTypeArray(array))
  30. throw new ArgumentException(SR.Arg_MustBePrimArray, nameof(array));
  31. return _ByteLength(array);
  32. }
  33. public static byte GetByte(Array array, int index)
  34. {
  35. // Is the array present?
  36. if (array == null)
  37. throw new ArgumentNullException(nameof(array));
  38. // Is it of primitive types?
  39. if (!IsPrimitiveTypeArray(array))
  40. throw new ArgumentException(SR.Arg_MustBePrimArray, nameof(array));
  41. // Is the index in valid range of the array?
  42. if ((uint)index >= (uint)_ByteLength(array))
  43. throw new ArgumentOutOfRangeException(nameof(index));
  44. return Unsafe.Add<byte>(ref array.GetRawArrayData(), index);
  45. }
  46. public static void SetByte(Array array, int index, byte value)
  47. {
  48. // Is the array present?
  49. if (array == null)
  50. throw new ArgumentNullException(nameof(array));
  51. // Is it of primitive types?
  52. if (!IsPrimitiveTypeArray(array))
  53. throw new ArgumentException(SR.Arg_MustBePrimArray, nameof(array));
  54. // Is the index in valid range of the array?
  55. if ((uint)index >= (uint)_ByteLength(array))
  56. throw new ArgumentOutOfRangeException(nameof(index));
  57. Unsafe.Add<byte>(ref array.GetRawArrayData(), index) = value;
  58. }
  59. // This is currently used by System.IO.UnmanagedMemoryStream
  60. internal static unsafe void ZeroMemory(byte* dest, long len)
  61. {
  62. Debug.Assert((ulong)(len) == (nuint)(len));
  63. ZeroMemory(dest, (nuint)(len));
  64. }
  65. // This method has different signature for x64 and other platforms and is done for performance reasons.
  66. internal static unsafe void ZeroMemory(byte* dest, nuint len)
  67. {
  68. SpanHelpers.ClearWithoutReferences(ref *dest, len);
  69. }
  70. // The attributes on this method are chosen for best JIT performance.
  71. // Please do not edit unless intentional.
  72. [MethodImplAttribute(MethodImplOptions.AggressiveInlining)]
  73. [CLSCompliant(false)]
  74. public static unsafe void MemoryCopy(void* source, void* destination, long destinationSizeInBytes, long sourceBytesToCopy)
  75. {
  76. if (sourceBytesToCopy > destinationSizeInBytes)
  77. {
  78. ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.sourceBytesToCopy);
  79. }
  80. Memmove((byte*)destination, (byte*)source, checked((nuint)sourceBytesToCopy));
  81. }
  82. // The attributes on this method are chosen for best JIT performance.
  83. // Please do not edit unless intentional.
  84. [MethodImplAttribute(MethodImplOptions.AggressiveInlining)]
  85. [CLSCompliant(false)]
  86. public static unsafe void MemoryCopy(void* source, void* destination, ulong destinationSizeInBytes, ulong sourceBytesToCopy)
  87. {
  88. if (sourceBytesToCopy > destinationSizeInBytes)
  89. {
  90. ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.sourceBytesToCopy);
  91. }
  92. Memmove((byte*)destination, (byte*)source, checked((nuint)sourceBytesToCopy));
  93. }
  94. internal static unsafe void Memcpy(byte[] dest, int destIndex, byte* src, int srcIndex, int len)
  95. {
  96. Debug.Assert((srcIndex >= 0) && (destIndex >= 0) && (len >= 0), "Index and length must be non-negative!");
  97. Debug.Assert(dest.Length - destIndex >= len, "not enough bytes in dest");
  98. // If dest has 0 elements, the fixed statement will throw an
  99. // IndexOutOfRangeException. Special-case 0-byte copies.
  100. if (len == 0)
  101. return;
  102. fixed (byte* pDest = dest)
  103. {
  104. Memcpy(pDest + destIndex, src + srcIndex, len);
  105. }
  106. }
  107. internal static unsafe void Memcpy(byte* pDest, int destIndex, byte[] src, int srcIndex, int len)
  108. {
  109. Debug.Assert((srcIndex >= 0) && (destIndex >= 0) && (len >= 0), "Index and length must be non-negative!");
  110. Debug.Assert(src.Length - srcIndex >= len, "not enough bytes in src");
  111. // If dest has 0 elements, the fixed statement will throw an
  112. // IndexOutOfRangeException. Special-case 0-byte copies.
  113. if (len == 0)
  114. return;
  115. fixed (byte* pSrc = src)
  116. {
  117. Memcpy(pDest + destIndex, pSrc + srcIndex, len);
  118. }
  119. }
  120. // This method has different signature for x64 and other platforms and is done for performance reasons.
  121. internal static unsafe void Memmove(byte* dest, byte* src, nuint len)
  122. {
  123. // P/Invoke into the native version when the buffers are overlapping.
  124. if (((nuint)dest - (nuint)src < len) || ((nuint)src - (nuint)dest < len))
  125. {
  126. goto PInvoke;
  127. }
  128. byte* srcEnd = src + len;
  129. byte* destEnd = dest + len;
  130. if (len <= 16) goto MCPY02;
  131. if (len > 64) goto MCPY05;
  132. MCPY00:
  133. // Copy bytes which are multiples of 16 and leave the remainder for MCPY01 to handle.
  134. Debug.Assert(len > 16 && len <= 64);
  135. #if HAS_CUSTOM_BLOCKS
  136. *(Block16*)dest = *(Block16*)src; // [0,16]
  137. #elif BIT64
  138. *(long*)dest = *(long*)src;
  139. *(long*)(dest + 8) = *(long*)(src + 8); // [0,16]
  140. #else
  141. *(int*)dest = *(int*)src;
  142. *(int*)(dest + 4) = *(int*)(src + 4);
  143. *(int*)(dest + 8) = *(int*)(src + 8);
  144. *(int*)(dest + 12) = *(int*)(src + 12); // [0,16]
  145. #endif
  146. if (len <= 32) goto MCPY01;
  147. #if HAS_CUSTOM_BLOCKS
  148. *(Block16*)(dest + 16) = *(Block16*)(src + 16); // [0,32]
  149. #elif BIT64
  150. *(long*)(dest + 16) = *(long*)(src + 16);
  151. *(long*)(dest + 24) = *(long*)(src + 24); // [0,32]
  152. #else
  153. *(int*)(dest + 16) = *(int*)(src + 16);
  154. *(int*)(dest + 20) = *(int*)(src + 20);
  155. *(int*)(dest + 24) = *(int*)(src + 24);
  156. *(int*)(dest + 28) = *(int*)(src + 28); // [0,32]
  157. #endif
  158. if (len <= 48) goto MCPY01;
  159. #if HAS_CUSTOM_BLOCKS
  160. *(Block16*)(dest + 32) = *(Block16*)(src + 32); // [0,48]
  161. #elif BIT64
  162. *(long*)(dest + 32) = *(long*)(src + 32);
  163. *(long*)(dest + 40) = *(long*)(src + 40); // [0,48]
  164. #else
  165. *(int*)(dest + 32) = *(int*)(src + 32);
  166. *(int*)(dest + 36) = *(int*)(src + 36);
  167. *(int*)(dest + 40) = *(int*)(src + 40);
  168. *(int*)(dest + 44) = *(int*)(src + 44); // [0,48]
  169. #endif
  170. MCPY01:
  171. // Unconditionally copy the last 16 bytes using destEnd and srcEnd and return.
  172. Debug.Assert(len > 16 && len <= 64);
  173. #if HAS_CUSTOM_BLOCKS
  174. *(Block16*)(destEnd - 16) = *(Block16*)(srcEnd - 16);
  175. #elif BIT64
  176. *(long*)(destEnd - 16) = *(long*)(srcEnd - 16);
  177. *(long*)(destEnd - 8) = *(long*)(srcEnd - 8);
  178. #else
  179. *(int*)(destEnd - 16) = *(int*)(srcEnd - 16);
  180. *(int*)(destEnd - 12) = *(int*)(srcEnd - 12);
  181. *(int*)(destEnd - 8) = *(int*)(srcEnd - 8);
  182. *(int*)(destEnd - 4) = *(int*)(srcEnd - 4);
  183. #endif
  184. return;
  185. MCPY02:
  186. // Copy the first 8 bytes and then unconditionally copy the last 8 bytes and return.
  187. if ((len & 24) == 0) goto MCPY03;
  188. Debug.Assert(len >= 8 && len <= 16);
  189. #if BIT64
  190. *(long*)dest = *(long*)src;
  191. *(long*)(destEnd - 8) = *(long*)(srcEnd - 8);
  192. #else
  193. *(int*)dest = *(int*)src;
  194. *(int*)(dest + 4) = *(int*)(src + 4);
  195. *(int*)(destEnd - 8) = *(int*)(srcEnd - 8);
  196. *(int*)(destEnd - 4) = *(int*)(srcEnd - 4);
  197. #endif
  198. return;
  199. MCPY03:
  200. // Copy the first 4 bytes and then unconditionally copy the last 4 bytes and return.
  201. if ((len & 4) == 0) goto MCPY04;
  202. Debug.Assert(len >= 4 && len < 8);
  203. *(int*)dest = *(int*)src;
  204. *(int*)(destEnd - 4) = *(int*)(srcEnd - 4);
  205. return;
  206. MCPY04:
  207. // Copy the first byte. For pending bytes, do an unconditionally copy of the last 2 bytes and return.
  208. Debug.Assert(len < 4);
  209. if (len == 0) return;
  210. *dest = *src;
  211. if ((len & 2) == 0) return;
  212. *(short*)(destEnd - 2) = *(short*)(srcEnd - 2);
  213. return;
  214. MCPY05:
  215. // PInvoke to the native version when the copy length exceeds the threshold.
  216. if (len > MemmoveNativeThreshold)
  217. {
  218. goto PInvoke;
  219. }
  220. // Copy 64-bytes at a time until the remainder is less than 64.
  221. // If remainder is greater than 16 bytes, then jump to MCPY00. Otherwise, unconditionally copy the last 16 bytes and return.
  222. Debug.Assert(len > 64 && len <= MemmoveNativeThreshold);
  223. nuint n = len >> 6;
  224. MCPY06:
  225. #if HAS_CUSTOM_BLOCKS
  226. *(Block64*)dest = *(Block64*)src;
  227. #elif BIT64
  228. *(long*)dest = *(long*)src;
  229. *(long*)(dest + 8) = *(long*)(src + 8);
  230. *(long*)(dest + 16) = *(long*)(src + 16);
  231. *(long*)(dest + 24) = *(long*)(src + 24);
  232. *(long*)(dest + 32) = *(long*)(src + 32);
  233. *(long*)(dest + 40) = *(long*)(src + 40);
  234. *(long*)(dest + 48) = *(long*)(src + 48);
  235. *(long*)(dest + 56) = *(long*)(src + 56);
  236. #else
  237. *(int*)dest = *(int*)src;
  238. *(int*)(dest + 4) = *(int*)(src + 4);
  239. *(int*)(dest + 8) = *(int*)(src + 8);
  240. *(int*)(dest + 12) = *(int*)(src + 12);
  241. *(int*)(dest + 16) = *(int*)(src + 16);
  242. *(int*)(dest + 20) = *(int*)(src + 20);
  243. *(int*)(dest + 24) = *(int*)(src + 24);
  244. *(int*)(dest + 28) = *(int*)(src + 28);
  245. *(int*)(dest + 32) = *(int*)(src + 32);
  246. *(int*)(dest + 36) = *(int*)(src + 36);
  247. *(int*)(dest + 40) = *(int*)(src + 40);
  248. *(int*)(dest + 44) = *(int*)(src + 44);
  249. *(int*)(dest + 48) = *(int*)(src + 48);
  250. *(int*)(dest + 52) = *(int*)(src + 52);
  251. *(int*)(dest + 56) = *(int*)(src + 56);
  252. *(int*)(dest + 60) = *(int*)(src + 60);
  253. #endif
  254. dest += 64;
  255. src += 64;
  256. n--;
  257. if (n != 0) goto MCPY06;
  258. len %= 64;
  259. if (len > 16) goto MCPY00;
  260. #if HAS_CUSTOM_BLOCKS
  261. *(Block16*)(destEnd - 16) = *(Block16*)(srcEnd - 16);
  262. #elif BIT64
  263. *(long*)(destEnd - 16) = *(long*)(srcEnd - 16);
  264. *(long*)(destEnd - 8) = *(long*)(srcEnd - 8);
  265. #else
  266. *(int*)(destEnd - 16) = *(int*)(srcEnd - 16);
  267. *(int*)(destEnd - 12) = *(int*)(srcEnd - 12);
  268. *(int*)(destEnd - 8) = *(int*)(srcEnd - 8);
  269. *(int*)(destEnd - 4) = *(int*)(srcEnd - 4);
  270. #endif
  271. return;
  272. PInvoke:
  273. _Memmove(dest, src, len);
  274. }
  275. // This method has different signature for x64 and other platforms and is done for performance reasons.
  276. [MethodImpl(MethodImplOptions.AggressiveInlining)]
  277. internal static void Memmove<T>(ref T destination, ref T source, nuint elementCount)
  278. {
  279. if (!RuntimeHelpers.IsReferenceOrContainsReferences<T>())
  280. {
  281. // Blittable memmove
  282. Memmove(
  283. ref Unsafe.As<T, byte>(ref destination),
  284. ref Unsafe.As<T, byte>(ref source),
  285. elementCount * (nuint)Unsafe.SizeOf<T>());
  286. }
  287. else
  288. {
  289. // Non-blittable memmove
  290. // Try to avoid calling RhBulkMoveWithWriteBarrier if we can get away
  291. // with a no-op.
  292. if (!Unsafe.AreSame(ref destination, ref source) && elementCount != 0)
  293. {
  294. RuntimeImports.RhBulkMoveWithWriteBarrier(
  295. ref Unsafe.As<T, byte>(ref destination),
  296. ref Unsafe.As<T, byte>(ref source),
  297. elementCount * (nuint)Unsafe.SizeOf<T>());
  298. }
  299. }
  300. }
  301. // This method has different signature for x64 and other platforms and is done for performance reasons.
  302. private static void Memmove(ref byte dest, ref byte src, nuint len)
  303. {
  304. // P/Invoke into the native version when the buffers are overlapping.
  305. if (((nuint)Unsafe.ByteOffset(ref src, ref dest) < len) || ((nuint)Unsafe.ByteOffset(ref dest, ref src) < len))
  306. {
  307. goto BuffersOverlap;
  308. }
  309. // Use "(IntPtr)(nint)len" to avoid overflow checking on the explicit cast to IntPtr
  310. ref byte srcEnd = ref Unsafe.Add(ref src, (IntPtr)(nint)len);
  311. ref byte destEnd = ref Unsafe.Add(ref dest, (IntPtr)(nint)len);
  312. if (len <= 16)
  313. goto MCPY02;
  314. if (len > 64)
  315. goto MCPY05;
  316. MCPY00:
  317. // Copy bytes which are multiples of 16 and leave the remainder for MCPY01 to handle.
  318. Debug.Assert(len > 16 && len <= 64);
  319. #if HAS_CUSTOM_BLOCKS
  320. Unsafe.As<byte, Block16>(ref dest) = Unsafe.As<byte, Block16>(ref src); // [0,16]
  321. #elif BIT64
  322. Unsafe.As<byte, long>(ref dest) = Unsafe.As<byte, long>(ref src);
  323. Unsafe.As<byte, long>(ref Unsafe.Add(ref dest, 8)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref src, 8)); // [0,16]
  324. #else
  325. Unsafe.As<byte, int>(ref dest) = Unsafe.As<byte, int>(ref src);
  326. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 4)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 4));
  327. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 8)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 8));
  328. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 12)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 12)); // [0,16]
  329. #endif
  330. if (len <= 32)
  331. goto MCPY01;
  332. #if HAS_CUSTOM_BLOCKS
  333. Unsafe.As<byte, Block16>(ref Unsafe.Add(ref dest, 16)) = Unsafe.As<byte, Block16>(ref Unsafe.Add(ref src, 16)); // [0,32]
  334. #elif BIT64
  335. Unsafe.As<byte, long>(ref Unsafe.Add(ref dest, 16)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref src, 16));
  336. Unsafe.As<byte, long>(ref Unsafe.Add(ref dest, 24)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref src, 24)); // [0,32]
  337. #else
  338. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 16)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 16));
  339. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 20)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 20));
  340. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 24)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 24));
  341. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 28)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 28)); // [0,32]
  342. #endif
  343. if (len <= 48)
  344. goto MCPY01;
  345. #if HAS_CUSTOM_BLOCKS
  346. Unsafe.As<byte, Block16>(ref Unsafe.Add(ref dest, 32)) = Unsafe.As<byte, Block16>(ref Unsafe.Add(ref src, 32)); // [0,48]
  347. #elif BIT64
  348. Unsafe.As<byte, long>(ref Unsafe.Add(ref dest, 32)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref src, 32));
  349. Unsafe.As<byte, long>(ref Unsafe.Add(ref dest, 40)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref src, 40)); // [0,48]
  350. #else
  351. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 32)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 32));
  352. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 36)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 36));
  353. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 40)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 40));
  354. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 44)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 44)); // [0,48]
  355. #endif
  356. MCPY01:
  357. // Unconditionally copy the last 16 bytes using destEnd and srcEnd and return.
  358. Debug.Assert(len > 16 && len <= 64);
  359. #if HAS_CUSTOM_BLOCKS
  360. Unsafe.As<byte, Block16>(ref Unsafe.Add(ref destEnd, -16)) = Unsafe.As<byte, Block16>(ref Unsafe.Add(ref srcEnd, -16));
  361. #elif BIT64
  362. Unsafe.As<byte, long>(ref Unsafe.Add(ref destEnd, -16)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref srcEnd, -16));
  363. Unsafe.As<byte, long>(ref Unsafe.Add(ref destEnd, -8)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref srcEnd, -8));
  364. #else
  365. Unsafe.As<byte, int>(ref Unsafe.Add(ref destEnd, -16)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref srcEnd, -16));
  366. Unsafe.As<byte, int>(ref Unsafe.Add(ref destEnd, -12)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref srcEnd, -12));
  367. Unsafe.As<byte, int>(ref Unsafe.Add(ref destEnd, -8)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref srcEnd, -8));
  368. Unsafe.As<byte, int>(ref Unsafe.Add(ref destEnd, -4)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref srcEnd, -4));
  369. #endif
  370. return;
  371. MCPY02:
  372. // Copy the first 8 bytes and then unconditionally copy the last 8 bytes and return.
  373. if ((len & 24) == 0)
  374. goto MCPY03;
  375. Debug.Assert(len >= 8 && len <= 16);
  376. #if BIT64
  377. Unsafe.As<byte, long>(ref dest) = Unsafe.As<byte, long>(ref src);
  378. Unsafe.As<byte, long>(ref Unsafe.Add(ref destEnd, -8)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref srcEnd, -8));
  379. #else
  380. Unsafe.As<byte, int>(ref dest) = Unsafe.As<byte, int>(ref src);
  381. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 4)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 4));
  382. Unsafe.As<byte, int>(ref Unsafe.Add(ref destEnd, -8)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref srcEnd, -8));
  383. Unsafe.As<byte, int>(ref Unsafe.Add(ref destEnd, -4)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref srcEnd, -4));
  384. #endif
  385. return;
  386. MCPY03:
  387. // Copy the first 4 bytes and then unconditionally copy the last 4 bytes and return.
  388. if ((len & 4) == 0)
  389. goto MCPY04;
  390. Debug.Assert(len >= 4 && len < 8);
  391. Unsafe.As<byte, int>(ref dest) = Unsafe.As<byte, int>(ref src);
  392. Unsafe.As<byte, int>(ref Unsafe.Add(ref destEnd, -4)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref srcEnd, -4));
  393. return;
  394. MCPY04:
  395. // Copy the first byte. For pending bytes, do an unconditionally copy of the last 2 bytes and return.
  396. Debug.Assert(len < 4);
  397. if (len == 0)
  398. return;
  399. dest = src;
  400. if ((len & 2) == 0)
  401. return;
  402. Unsafe.As<byte, short>(ref Unsafe.Add(ref destEnd, -2)) = Unsafe.As<byte, short>(ref Unsafe.Add(ref srcEnd, -2));
  403. return;
  404. MCPY05:
  405. // PInvoke to the native version when the copy length exceeds the threshold.
  406. if (len > MemmoveNativeThreshold)
  407. {
  408. goto PInvoke;
  409. }
  410. // Copy 64-bytes at a time until the remainder is less than 64.
  411. // If remainder is greater than 16 bytes, then jump to MCPY00. Otherwise, unconditionally copy the last 16 bytes and return.
  412. Debug.Assert(len > 64 && len <= MemmoveNativeThreshold);
  413. nuint n = len >> 6;
  414. MCPY06:
  415. #if HAS_CUSTOM_BLOCKS
  416. Unsafe.As<byte, Block64>(ref dest) = Unsafe.As<byte, Block64>(ref src);
  417. #elif BIT64
  418. Unsafe.As<byte, long>(ref dest) = Unsafe.As<byte, long>(ref src);
  419. Unsafe.As<byte, long>(ref Unsafe.Add(ref dest, 8)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref src, 8));
  420. Unsafe.As<byte, long>(ref Unsafe.Add(ref dest, 16)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref src, 16));
  421. Unsafe.As<byte, long>(ref Unsafe.Add(ref dest, 24)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref src, 24));
  422. Unsafe.As<byte, long>(ref Unsafe.Add(ref dest, 32)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref src, 32));
  423. Unsafe.As<byte, long>(ref Unsafe.Add(ref dest, 40)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref src, 40));
  424. Unsafe.As<byte, long>(ref Unsafe.Add(ref dest, 48)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref src, 48));
  425. Unsafe.As<byte, long>(ref Unsafe.Add(ref dest, 56)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref src, 56));
  426. #else
  427. Unsafe.As<byte, int>(ref dest) = Unsafe.As<byte, int>(ref src);
  428. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 4)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 4));
  429. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 8)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 8));
  430. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 12)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 12));
  431. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 16)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 16));
  432. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 20)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 20));
  433. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 24)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 24));
  434. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 28)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 28));
  435. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 32)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 32));
  436. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 36)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 36));
  437. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 40)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 40));
  438. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 44)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 44));
  439. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 48)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 48));
  440. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 52)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 52));
  441. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 56)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 56));
  442. Unsafe.As<byte, int>(ref Unsafe.Add(ref dest, 60)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref src, 60));
  443. #endif
  444. dest = ref Unsafe.Add(ref dest, 64);
  445. src = ref Unsafe.Add(ref src, 64);
  446. n--;
  447. if (n != 0)
  448. goto MCPY06;
  449. len %= 64;
  450. if (len > 16)
  451. goto MCPY00;
  452. #if HAS_CUSTOM_BLOCKS
  453. Unsafe.As<byte, Block16>(ref Unsafe.Add(ref destEnd, -16)) = Unsafe.As<byte, Block16>(ref Unsafe.Add(ref srcEnd, -16));
  454. #elif BIT64
  455. Unsafe.As<byte, long>(ref Unsafe.Add(ref destEnd, -16)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref srcEnd, -16));
  456. Unsafe.As<byte, long>(ref Unsafe.Add(ref destEnd, -8)) = Unsafe.As<byte, long>(ref Unsafe.Add(ref srcEnd, -8));
  457. #else
  458. Unsafe.As<byte, int>(ref Unsafe.Add(ref destEnd, -16)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref srcEnd, -16));
  459. Unsafe.As<byte, int>(ref Unsafe.Add(ref destEnd, -12)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref srcEnd, -12));
  460. Unsafe.As<byte, int>(ref Unsafe.Add(ref destEnd, -8)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref srcEnd, -8));
  461. Unsafe.As<byte, int>(ref Unsafe.Add(ref destEnd, -4)) = Unsafe.As<byte, int>(ref Unsafe.Add(ref srcEnd, -4));
  462. #endif
  463. return;
  464. BuffersOverlap:
  465. // If the buffers overlap perfectly, there's no point to copying the data.
  466. if (Unsafe.AreSame(ref dest, ref src))
  467. {
  468. return;
  469. }
  470. PInvoke:
  471. _Memmove(ref dest, ref src, len);
  472. }
  473. // Non-inlinable wrapper around the QCall that avoids polluting the fast path
  474. // with P/Invoke prolog/epilog.
  475. [MethodImplAttribute(MethodImplOptions.NoInlining)]
  476. private static unsafe void _Memmove(byte* dest, byte* src, nuint len)
  477. {
  478. __Memmove(dest, src, len);
  479. }
  480. // Non-inlinable wrapper around the QCall that avoids polluting the fast path
  481. // with P/Invoke prolog/epilog.
  482. [MethodImplAttribute(MethodImplOptions.NoInlining)]
  483. private static unsafe void _Memmove(ref byte dest, ref byte src, nuint len)
  484. {
  485. fixed (byte* pDest = &dest)
  486. fixed (byte* pSrc = &src)
  487. __Memmove(pDest, pSrc, len);
  488. }
  489. #if HAS_CUSTOM_BLOCKS
  490. [StructLayout(LayoutKind.Sequential, Size = 16)]
  491. private struct Block16 { }
  492. [StructLayout(LayoutKind.Sequential, Size = 64)]
  493. private struct Block64 { }
  494. #endif // HAS_CUSTOM_BLOCKS
  495. }
  496. }