SimdTest.cpp 190 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330
  1. 
  2. #include "../testTools.h"
  3. #include "../../DFPSR/base/simd.h"
  4. #include "../../DFPSR/base/endian.h"
  5. // TODO: Set up a test where SIMD is disabled to force using the reference implementation.
  6. // TODO: Keep the reference implementation alongside the SIMD types during brute-force testing with millions of random inputs.
  7. #define ASSERT_EQUAL_SIMD(A, B) ASSERT_COMP(A, B, allLanesEqual, "==")
  8. #define ASSERT_NOTEQUAL_SIMD(A, B) ASSERT_COMP(A, B, !allLanesEqual, "!=")
  9. static void testComparisons() {
  10. // Test non-vectorized comparison functions. (Used for test conditions and debug assertions)
  11. ASSERT_EQUAL(allLanesEqual(I32x4(-2, 1, 4, 7345), I32x4(-2, 1, 4, 7345)), true);
  12. ASSERT_EQUAL(allLanesEqual(I32x4(-2, 1, 4, 7345), I32x4( 2, 1, 4, 7345)), false);
  13. ASSERT_EQUAL(allLanesEqual(I32x4(-2, 1, 4, 7345), I32x4(-2, 5, 4, 7345)), false);
  14. ASSERT_EQUAL(allLanesEqual(I32x4(-2, 1, 4, 7345), I32x4(-2, 1, 2, 7345)), false);
  15. ASSERT_EQUAL(allLanesEqual(I32x4(-2, 1, 4, 7345), I32x4(-2, 1, 4, 6531)), false);
  16. ASSERT_EQUAL(allLanesEqual(I32x4(-2, 1, 4, 7345), I32x4(-2, 0, 4, 385)), false);
  17. ASSERT_EQUAL(allLanesEqual(I32x4( 0, 0, 0, 0), I32x4(-2, 1, 4, 7345)), false);
  18. ASSERT_EQUAL(allLanesNotEqual(I32x4(-2, 1, 4, 5), I32x4( 6, 8, 3, 7)), true);
  19. ASSERT_EQUAL(allLanesNotEqual(I32x4(-2, 1, 4, 5), I32x4(-2, 8, 3, 7)), false);
  20. ASSERT_EQUAL(allLanesNotEqual(I32x4(-2, 1, 4, 5), I32x4( 6, 1, 3, 7)), false);
  21. ASSERT_EQUAL(allLanesNotEqual(I32x4(-2, 1, 4, 5), I32x4( 6, 8, 4, 7)), false);
  22. ASSERT_EQUAL(allLanesNotEqual(I32x4(-2, 1, 4, 5), I32x4( 6, 8, 3, 5)), false);
  23. ASSERT_EQUAL(allLanesNotEqual(I32x4(-2, 1, 4, 5), I32x4(-2, 8, 3, 5)), false);
  24. ASSERT_EQUAL(allLanesNotEqual(I32x4(-2, 1, 4, 5), I32x4( 6, 1, 4, 7)), false);
  25. ASSERT_EQUAL(allLanesLesser (I32x4(-4, -1, 1, 3), I32x4(-3, 0, 2, 4)), true);
  26. ASSERT_EQUAL(allLanesLesser (I32x4(-3, -1, 1, 3), I32x4(-3, 0, 2, 4)), false);
  27. ASSERT_EQUAL(allLanesLesser (I32x4(-4, 0, 1, 3), I32x4(-3, 0, 2, 4)), false);
  28. ASSERT_EQUAL(allLanesLesser (I32x4(-4, -1, 2, 3), I32x4(-3, 0, 2, 4)), false);
  29. ASSERT_EQUAL(allLanesLesser (I32x4(-4, -1, 1, 4), I32x4(-3, 0, 2, 4)), false);
  30. ASSERT_EQUAL(allLanesLesser (I32x4(36, -1, 1, 3), I32x4(-3, 0, 2, 4)), false);
  31. ASSERT_EQUAL(allLanesLesser (I32x4(-4, 86, 1, 3), I32x4(-3, 0, 2, 4)), false);
  32. ASSERT_EQUAL(allLanesLesser (I32x4(-4, -1, 35, 3), I32x4(-3, 0, 2, 4)), false);
  33. ASSERT_EQUAL(allLanesLesser (I32x4(-4, -1, 1, 75), I32x4(-3, 0, 2, 4)), false);
  34. ASSERT_EQUAL(allLanesGreater(I32x4(-2, 1, 4, 5), I32x4(-3, 0, 2, 4)), true);
  35. ASSERT_EQUAL(allLanesGreater(I32x4(-3, 1, 4, 5), I32x4(-3, 0, 2, 4)), false);
  36. ASSERT_EQUAL(allLanesGreater(I32x4(-2, 0, 4, 5), I32x4(-3, 0, 2, 4)), false);
  37. ASSERT_EQUAL(allLanesGreater(I32x4(-2, 1, 2, 5), I32x4(-3, 0, 2, 4)), false);
  38. ASSERT_EQUAL(allLanesGreater(I32x4(-2, 1, 4, 4), I32x4(-3, 0, 2, 4)), false);
  39. ASSERT_EQUAL(allLanesGreater(I32x4(-5, 1, 4, 5), I32x4(-3, 0, 2, 4)), false);
  40. ASSERT_EQUAL(allLanesGreater(I32x4(-2, -5, 4, 5), I32x4(-3, 0, 2, 4)), false);
  41. ASSERT_EQUAL(allLanesGreater(I32x4(-2, 1, -7, 5), I32x4(-3, 0, 2, 4)), false);
  42. ASSERT_EQUAL(allLanesGreater(I32x4(-2, 1, 4, -4), I32x4(-3, 0, 2, 4)), false);
  43. ASSERT_EQUAL(allLanesLesserOrEqual (I32x4(-4, -1, 1, 3), I32x4(-3, 0, 2, 4)), true);
  44. ASSERT_EQUAL(allLanesLesserOrEqual (I32x4(-3, -1, 1, 3), I32x4(-3, 0, 2, 4)), true);
  45. ASSERT_EQUAL(allLanesLesserOrEqual (I32x4(-4, 0, 1, 3), I32x4(-3, 0, 2, 4)), true);
  46. ASSERT_EQUAL(allLanesLesserOrEqual (I32x4(-4, -1, 2, 3), I32x4(-3, 0, 2, 4)), true);
  47. ASSERT_EQUAL(allLanesLesserOrEqual (I32x4(-4, -1, 1, 4), I32x4(-3, 0, 2, 4)), true);
  48. ASSERT_EQUAL(allLanesLesserOrEqual (I32x4(36, -1, 1, 3), I32x4(-3, 0, 2, 4)), false);
  49. ASSERT_EQUAL(allLanesLesserOrEqual (I32x4(-4, 86, 1, 3), I32x4(-3, 0, 2, 4)), false);
  50. ASSERT_EQUAL(allLanesLesserOrEqual (I32x4(-4, -1, 35, 3), I32x4(-3, 0, 2, 4)), false);
  51. ASSERT_EQUAL(allLanesLesserOrEqual (I32x4(-4, -1, 1, 75), I32x4(-3, 0, 2, 4)), false);
  52. ASSERT_EQUAL(allLanesGreaterOrEqual(I32x4(-2, 1, 4, 5), I32x4(-3, 0, 2, 4)), true);
  53. ASSERT_EQUAL(allLanesGreaterOrEqual(I32x4(-3, 1, 4, 5), I32x4(-3, 0, 2, 4)), true);
  54. ASSERT_EQUAL(allLanesGreaterOrEqual(I32x4(-2, 0, 4, 5), I32x4(-3, 0, 2, 4)), true);
  55. ASSERT_EQUAL(allLanesGreaterOrEqual(I32x4(-2, 1, 2, 5), I32x4(-3, 0, 2, 4)), true);
  56. ASSERT_EQUAL(allLanesGreaterOrEqual(I32x4(-2, 1, 4, 4), I32x4(-3, 0, 2, 4)), true);
  57. ASSERT_EQUAL(allLanesGreaterOrEqual(I32x4(-5, 1, 4, 5), I32x4(-3, 0, 2, 4)), false);
  58. ASSERT_EQUAL(allLanesGreaterOrEqual(I32x4(-2, -5, 4, 5), I32x4(-3, 0, 2, 4)), false);
  59. ASSERT_EQUAL(allLanesGreaterOrEqual(I32x4(-2, 1, -7, 5), I32x4(-3, 0, 2, 4)), false);
  60. ASSERT_EQUAL(allLanesGreaterOrEqual(I32x4(-2, 1, 4, -4), I32x4(-3, 0, 2, 4)), false);
  61. ASSERT_EQUAL(allLanesEqual (I32x8(-2, 1, 4, 8, 74, 23, 5, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), true);
  62. ASSERT_EQUAL(allLanesEqual (I32x8( 0, 1, 4, 8, 74, 23, 5, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  63. ASSERT_EQUAL(allLanesEqual (I32x8(-2, 0, 4, 8, 74, 23, 5, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  64. ASSERT_EQUAL(allLanesEqual (I32x8(-2, 1, 0, 8, 74, 23, 5, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  65. ASSERT_EQUAL(allLanesEqual (I32x8(-2, 1, 4, 0, 74, 23, 5, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  66. ASSERT_EQUAL(allLanesEqual (I32x8(-2, 1, 4, 8, 0, 23, 5, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  67. ASSERT_EQUAL(allLanesEqual (I32x8(-2, 1, 4, 8, 74, 0, 5, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  68. ASSERT_EQUAL(allLanesEqual (I32x8(-2, 1, 4, 8, 74, 23, 0, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  69. ASSERT_EQUAL(allLanesEqual (I32x8(-2, 1, 4, 8, 74, 23, 5, 0), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  70. ASSERT_EQUAL(allLanesNotEqual (I32x8( 5, 8, 6, 9, 35, 75, 3, 75), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), true);
  71. ASSERT_EQUAL(allLanesNotEqual (I32x8(-2, 8, 6, 9, 35, 75, 3, 75), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  72. ASSERT_EQUAL(allLanesNotEqual (I32x8( 5, 1, 6, 9, 35, 75, 3, 75), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  73. ASSERT_EQUAL(allLanesNotEqual (I32x8( 5, 8, 4, 9, 35, 75, 3, 75), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  74. ASSERT_EQUAL(allLanesNotEqual (I32x8( 5, 8, 6, 8, 35, 75, 3, 75), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  75. ASSERT_EQUAL(allLanesNotEqual (I32x8( 5, 8, 6, 9, 74, 75, 3, 75), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  76. ASSERT_EQUAL(allLanesNotEqual (I32x8( 5, 8, 6, 9, 35, 23, 3, 75), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  77. ASSERT_EQUAL(allLanesNotEqual (I32x8( 5, 8, 6, 9, 35, 75, 5, 75), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  78. ASSERT_EQUAL(allLanesNotEqual (I32x8( 5, 8, 6, 9, 35, 75, 3, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  79. ASSERT_EQUAL(allLanesNotEqual (I32x8(-2, 1, 4, 8, 74, 23, 5, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  80. ASSERT_EQUAL(allLanesLesser (I32x8(-3, 0, 3, 7, 73, 22, 4, 63), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), true);
  81. ASSERT_EQUAL(allLanesGreater (I32x8(-1, 2, 5, 9, 75, 24, 6, 65), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), true);
  82. ASSERT_EQUAL(allLanesGreater (I32x8(-2, 2, 5, 9, 75, 24, 6, 65), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  83. ASSERT_EQUAL(allLanesGreater (I32x8(-1, 0, 5, 9, 75, 24, 6, 65), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  84. ASSERT_EQUAL(allLanesGreater (I32x8(-1, 2, 4, 9, 75, 24, 6, 65), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  85. ASSERT_EQUAL(allLanesGreater (I32x8(-1, 2, 5, 8, 75, 24, 6, 65), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  86. ASSERT_EQUAL(allLanesGreater (I32x8(-1, 2, 5, 9, 3, 24, 6, 65), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  87. ASSERT_EQUAL(allLanesGreater (I32x8(-1, 2, 5, 9, 75, 23, 6, 65), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  88. ASSERT_EQUAL(allLanesGreater (I32x8(-1, 2, 5, 9, 75, 24, 2, 65), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  89. ASSERT_EQUAL(allLanesGreater (I32x8(-1, 2, 5, 9, 75, 24, 6, 5), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  90. ASSERT_EQUAL(allLanesLesserOrEqual (I32x8(-2, 1, 4, 8, 74, 23, 5, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), true);
  91. ASSERT_EQUAL(allLanesLesserOrEqual (I32x8(-1, 1, 4, 8, 74, 23, 5, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  92. ASSERT_EQUAL(allLanesLesserOrEqual (I32x8(-2, 2, 4, 8, 74, 23, 5, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  93. ASSERT_EQUAL(allLanesLesserOrEqual (I32x8(-2, 1, 5, 8, 74, 23, 5, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  94. ASSERT_EQUAL(allLanesLesserOrEqual (I32x8(-2, 1, 4, 9, 74, 23, 5, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  95. ASSERT_EQUAL(allLanesLesserOrEqual (I32x8(-2, 1, 4, 8, 75, 23, 5, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  96. ASSERT_EQUAL(allLanesLesserOrEqual (I32x8(-2, 1, 4, 8, 74, 73, 5, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  97. ASSERT_EQUAL(allLanesLesserOrEqual (I32x8(-2, 1, 4, 8, 74, 23, 6, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  98. ASSERT_EQUAL(allLanesLesserOrEqual (I32x8(-2, 1, 4, 8, 74, 23, 5, 69), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  99. ASSERT_EQUAL(allLanesGreaterOrEqual(I32x8(-2, 1, 4, 8, 74, 23, 5, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), true);
  100. ASSERT_EQUAL(allLanesGreaterOrEqual(I32x8(-3, 1, 4, 8, 74, 23, 5, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  101. ASSERT_EQUAL(allLanesGreaterOrEqual(I32x8(-2, 0, 4, 8, 74, 23, 5, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  102. ASSERT_EQUAL(allLanesGreaterOrEqual(I32x8(-2, 1, 2, 8, 74, 23, 5, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  103. ASSERT_EQUAL(allLanesGreaterOrEqual(I32x8(-2, 1, 4, 5, 74, 23, 5, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  104. ASSERT_EQUAL(allLanesGreaterOrEqual(I32x8(-2, 1, 4, 8, 34, 23, 5, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  105. ASSERT_EQUAL(allLanesGreaterOrEqual(I32x8(-2, 1, 4, 8, 74, 1, 5, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  106. ASSERT_EQUAL(allLanesGreaterOrEqual(I32x8(-2, 1, 4, 8, 74, 23, 3, 64), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  107. ASSERT_EQUAL(allLanesGreaterOrEqual(I32x8(-2, 1, 4, 8, 74, 23, 5, 4), I32x8(-2, 1, 4, 8, 74, 23, 5, 64)), false);
  108. ASSERT_EQUAL(allLanesEqual(U32x4(8, 1, 4, 7345), U32x4(8, 1, 4, 7345)), true);
  109. ASSERT_EQUAL(allLanesEqual(U32x4(8, 1, 4, 7345), U32x4(2, 1, 4, 7345)), false);
  110. ASSERT_EQUAL(allLanesEqual(U32x4(8, 1, 4, 7345), U32x4(8, 5, 4, 7345)), false);
  111. ASSERT_EQUAL(allLanesEqual(U32x4(8, 1, 4, 7345), U32x4(8, 1, 2, 7345)), false);
  112. ASSERT_EQUAL(allLanesEqual(U32x4(8, 1, 4, 7345), U32x4(8, 1, 4, 6531)), false);
  113. ASSERT_EQUAL(allLanesNotEqual(U32x4(8, 1, 4, 5), U32x4(6, 8, 3, 7)), true);
  114. ASSERT_EQUAL(allLanesNotEqual(U32x4(8, 1, 4, 5), U32x4(8, 8, 3, 7)), false);
  115. ASSERT_EQUAL(allLanesNotEqual(U32x4(8, 1, 4, 5), U32x4(6, 1, 3, 7)), false);
  116. ASSERT_EQUAL(allLanesNotEqual(U32x4(8, 1, 4, 5), U32x4(6, 8, 4, 7)), false);
  117. ASSERT_EQUAL(allLanesNotEqual(U32x4(8, 1, 4, 5), U32x4(6, 8, 3, 5)), false);
  118. ASSERT_EQUAL(allLanesLesser (U32x4( 7, 4, 1, 3), U32x4( 8, 5, 2, 4)), true);
  119. ASSERT_EQUAL(allLanesLesser (U32x4( 8, 4, 1, 3), U32x4( 8, 5, 2, 4)), false);
  120. ASSERT_EQUAL(allLanesLesser (U32x4( 7, 5, 1, 3), U32x4( 8, 5, 2, 4)), false);
  121. ASSERT_EQUAL(allLanesLesser (U32x4( 7, 4, 2, 3), U32x4( 8, 5, 2, 4)), false);
  122. ASSERT_EQUAL(allLanesLesser (U32x4( 7, 4, 1, 4), U32x4( 8, 5, 2, 4)), false);
  123. ASSERT_EQUAL(allLanesLesser (U32x4(36, 4, 1, 3), U32x4( 8, 5, 2, 4)), false);
  124. ASSERT_EQUAL(allLanesLesser (U32x4( 7,48, 1, 3), U32x4( 8, 5, 2, 4)), false);
  125. ASSERT_EQUAL(allLanesLesser (U32x4( 7, 4, 35, 3), U32x4( 8, 5, 2, 4)), false);
  126. ASSERT_EQUAL(allLanesLesser (U32x4( 7, 4, 1, 75), U32x4( 8, 5, 2, 4)), false);
  127. ASSERT_EQUAL(allLanesGreater(U32x4( 9, 6, 3, 5), U32x4( 8, 5, 2, 4)), true);
  128. ASSERT_EQUAL(allLanesGreater(U32x4( 8, 6, 3, 5), U32x4( 8, 5, 2, 4)), false);
  129. ASSERT_EQUAL(allLanesGreater(U32x4( 9, 5, 3, 5), U32x4( 8, 5, 2, 4)), false);
  130. ASSERT_EQUAL(allLanesGreater(U32x4( 9, 6, 2, 5), U32x4( 8, 5, 2, 4)), false);
  131. ASSERT_EQUAL(allLanesGreater(U32x4( 9, 6, 3, 4), U32x4( 8, 5, 2, 4)), false);
  132. ASSERT_EQUAL(allLanesGreater(U32x4( 4, 6, 3, 5), U32x4( 8, 5, 2, 4)), false);
  133. ASSERT_EQUAL(allLanesGreater(U32x4( 9, 2, 3, 5), U32x4( 8, 5, 2, 4)), false);
  134. ASSERT_EQUAL(allLanesGreater(U32x4( 9, 6, 1, 5), U32x4( 8, 5, 2, 4)), false);
  135. ASSERT_EQUAL(allLanesGreater(U32x4( 9, 6, 3, 0), U32x4( 8, 5, 2, 4)), false);
  136. ASSERT_EQUAL(allLanesLesserOrEqual (U32x4( 6, 9, 1, 3), U32x4(7,10, 2, 4)), true);
  137. ASSERT_EQUAL(allLanesLesserOrEqual (U32x4( 7, 9, 1, 3), U32x4(7,10, 2, 4)), true);
  138. ASSERT_EQUAL(allLanesLesserOrEqual (U32x4( 6,10, 1, 3), U32x4(7,10, 2, 4)), true);
  139. ASSERT_EQUAL(allLanesLesserOrEqual (U32x4( 6, 9, 2, 3), U32x4(7,10, 2, 4)), true);
  140. ASSERT_EQUAL(allLanesLesserOrEqual (U32x4( 6, 9, 1, 4), U32x4(7,10, 2, 4)), true);
  141. ASSERT_EQUAL(allLanesLesserOrEqual (U32x4(36, 9, 1, 3), U32x4(7,10, 2, 4)), false);
  142. ASSERT_EQUAL(allLanesLesserOrEqual (U32x4( 6,86, 1, 3), U32x4(7,10, 2, 4)), false);
  143. ASSERT_EQUAL(allLanesLesserOrEqual (U32x4( 6, 9,35, 3), U32x4(7,10, 2, 4)), false);
  144. ASSERT_EQUAL(allLanesLesserOrEqual (U32x4( 6, 9, 1,75), U32x4(7,10, 2, 4)), false);
  145. ASSERT_EQUAL(allLanesEqual (U32x8( 8, 1, 4, 8, 74, 23, 5, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), true);
  146. ASSERT_EQUAL(allLanesEqual (U32x8( 0, 1, 4, 8, 74, 23, 5, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  147. ASSERT_EQUAL(allLanesEqual (U32x8( 8, 0, 4, 8, 74, 23, 5, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  148. ASSERT_EQUAL(allLanesEqual (U32x8( 8, 1, 0, 8, 74, 23, 5, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  149. ASSERT_EQUAL(allLanesEqual (U32x8( 8, 1, 4, 0, 74, 23, 5, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  150. ASSERT_EQUAL(allLanesEqual (U32x8( 8, 1, 4, 8, 0, 23, 5, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  151. ASSERT_EQUAL(allLanesEqual (U32x8( 8, 1, 4, 8, 74, 0, 5, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  152. ASSERT_EQUAL(allLanesEqual (U32x8( 8, 1, 4, 8, 74, 23, 0, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  153. ASSERT_EQUAL(allLanesEqual (U32x8( 8, 1, 4, 8, 74, 23, 5, 0), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  154. ASSERT_EQUAL(allLanesNotEqual (U32x8( 5, 8, 6, 9, 35, 75, 3, 75), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), true);
  155. ASSERT_EQUAL(allLanesNotEqual (U32x8( 8, 8, 6, 9, 35, 75, 3, 75), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  156. ASSERT_EQUAL(allLanesNotEqual (U32x8( 5, 1, 6, 9, 35, 75, 3, 75), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  157. ASSERT_EQUAL(allLanesNotEqual (U32x8( 5, 8, 4, 9, 35, 75, 3, 75), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  158. ASSERT_EQUAL(allLanesNotEqual (U32x8( 5, 8, 6, 8, 35, 75, 3, 75), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  159. ASSERT_EQUAL(allLanesNotEqual (U32x8( 5, 8, 6, 9, 74, 75, 3, 75), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  160. ASSERT_EQUAL(allLanesNotEqual (U32x8( 5, 8, 6, 9, 35, 23, 3, 75), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  161. ASSERT_EQUAL(allLanesNotEqual (U32x8( 5, 8, 6, 9, 35, 75, 5, 75), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  162. ASSERT_EQUAL(allLanesNotEqual (U32x8( 5, 8, 6, 9, 35, 75, 3, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  163. ASSERT_EQUAL(allLanesNotEqual (U32x8( 8, 1, 4, 8, 74, 23, 5, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  164. ASSERT_EQUAL(allLanesLesser (U32x8( 7, 0, 3, 7, 73, 22, 4, 63), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), true);
  165. ASSERT_EQUAL(allLanesGreater (U32x8( 9, 2, 5, 9, 75, 24, 6, 65), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), true);
  166. ASSERT_EQUAL(allLanesGreater (U32x8( 8, 2, 5, 9, 75, 24, 6, 65), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  167. ASSERT_EQUAL(allLanesGreater (U32x8( 9, 0, 5, 9, 75, 24, 6, 65), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  168. ASSERT_EQUAL(allLanesGreater (U32x8( 9, 2, 4, 9, 75, 24, 6, 65), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  169. ASSERT_EQUAL(allLanesGreater (U32x8( 9, 2, 5, 8, 75, 24, 6, 65), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  170. ASSERT_EQUAL(allLanesGreater (U32x8( 9, 2, 5, 9, 3, 24, 6, 65), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  171. ASSERT_EQUAL(allLanesGreater (U32x8( 9, 2, 5, 9, 75, 23, 6, 65), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  172. ASSERT_EQUAL(allLanesGreater (U32x8( 9, 2, 5, 9, 75, 24, 2, 65), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  173. ASSERT_EQUAL(allLanesGreater (U32x8( 9, 2, 5, 9, 75, 24, 6, 5), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  174. ASSERT_EQUAL(allLanesLesserOrEqual (U32x8( 8, 1, 4, 8, 74, 23, 5, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), true);
  175. ASSERT_EQUAL(allLanesLesserOrEqual (U32x8( 9, 1, 4, 8, 74, 23, 5, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  176. ASSERT_EQUAL(allLanesLesserOrEqual (U32x8( 8, 2, 4, 8, 74, 23, 5, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  177. ASSERT_EQUAL(allLanesLesserOrEqual (U32x8( 8, 1, 5, 8, 74, 23, 5, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  178. ASSERT_EQUAL(allLanesLesserOrEqual (U32x8( 8, 1, 4, 9, 74, 23, 5, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  179. ASSERT_EQUAL(allLanesLesserOrEqual (U32x8( 8, 1, 4, 8, 75, 23, 5, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  180. ASSERT_EQUAL(allLanesLesserOrEqual (U32x8( 8, 1, 4, 8, 74, 73, 5, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  181. ASSERT_EQUAL(allLanesLesserOrEqual (U32x8( 8, 1, 4, 8, 74, 23, 6, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  182. ASSERT_EQUAL(allLanesLesserOrEqual (U32x8( 8, 1, 4, 8, 74, 23, 5, 69), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  183. ASSERT_EQUAL(allLanesGreaterOrEqual(U32x8( 8, 1, 4, 8, 74, 23, 5, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), true);
  184. ASSERT_EQUAL(allLanesGreaterOrEqual(U32x8( 7, 1, 4, 8, 74, 23, 5, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  185. ASSERT_EQUAL(allLanesGreaterOrEqual(U32x8( 8, 0, 4, 8, 74, 23, 5, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  186. ASSERT_EQUAL(allLanesGreaterOrEqual(U32x8( 8, 1, 2, 8, 74, 23, 5, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  187. ASSERT_EQUAL(allLanesGreaterOrEqual(U32x8( 8, 1, 4, 5, 74, 23, 5, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  188. ASSERT_EQUAL(allLanesGreaterOrEqual(U32x8( 8, 1, 4, 8, 34, 23, 5, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  189. ASSERT_EQUAL(allLanesGreaterOrEqual(U32x8( 8, 1, 4, 8, 74, 1, 5, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  190. ASSERT_EQUAL(allLanesGreaterOrEqual(U32x8( 8, 1, 4, 8, 74, 23, 3, 64), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  191. ASSERT_EQUAL(allLanesGreaterOrEqual(U32x8( 8, 1, 4, 8, 74, 23, 5, 4), U32x8( 8, 1, 4, 8, 74, 23, 5, 64)), false);
  192. // F32x4 Comparisons
  193. ASSERT_EQUAL_SIMD(F32x4(1.5f), F32x4(1.5f, 1.5f, 1.5f, 1.5f));
  194. ASSERT_EQUAL_SIMD(F32x4(-1.5f), F32x4(-1.5f, -1.5f, -1.5f, -1.5f));
  195. ASSERT_EQUAL_SIMD(F32x4(1.2f, 3.4f, 5.6f, 7.8f), F32x4(1.2f, 3.4f, 5.6f, 7.8f));
  196. ASSERT_EQUAL(F32x4(1.2f, 3.4f, 5.6f, 7.8f).get().x, 1.2f);
  197. ASSERT_EQUAL(F32x4(1.2f, 3.4f, 5.6f, 7.8f).get().y, 3.4f);
  198. ASSERT_EQUAL(F32x4(1.2f, 3.4f, 5.6f, 7.8f).get().z, 5.6f);
  199. ASSERT_EQUAL(F32x4(1.2f, 3.4f, 5.6f, 7.8f).get().w, 7.8f);
  200. ASSERT_NOTEQUAL_SIMD(F32x4(1.3f, 3.4f, 5.6f, 7.8f), F32x4(1.2f, 3.4f, 5.6f, 7.8f));
  201. ASSERT_NOTEQUAL_SIMD(F32x4(1.2f, 3.4f, 5.6f, 7.8f), F32x4(1.2f, -1.4f, 5.6f, 7.8f));
  202. ASSERT_NOTEQUAL_SIMD(F32x4(1.2f, 3.4f, 5.5f, 7.8f), F32x4(1.2f, 3.4f, 5.6f, 7.8f));
  203. ASSERT_NOTEQUAL_SIMD(F32x4(1.2f, 3.4f, 5.6f, 7.8f), F32x4(1.2f, 3.4f, 5.6f, -7.8f));
  204. // F32x8 Comparisons
  205. ASSERT_EQUAL_SIMD(F32x8(1.5f), F32x8(1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f));
  206. ASSERT_EQUAL_SIMD(F32x8(-1.5f), F32x8(-1.5f, -1.5f, -1.5f, -1.5f, -1.5f, -1.5f, -1.5f, -1.5f));
  207. ASSERT_EQUAL_SIMD(F32x8(1.2f, 3.4f, 5.6f, 7.8f, -2.4f, 452.351f, 1000000.0f, -1000.0f), F32x8(1.2f, 3.4f, 5.6f, 7.8f, -2.4f, 452.351f, 1000000.0f, -1000.0f));
  208. ASSERT_NOTEQUAL_SIMD(F32x8(1.3f, 3.4f, 5.6f, 7.8f, 5.3f, 6.7f, 1.4f, -5.2f), F32x8(1.2f, 3.4f, 5.6f, 7.8f, 5.3f, 6.7f, 1.4f, -5.2f));
  209. ASSERT_NOTEQUAL_SIMD(F32x8(1.2f, 3.4f, 5.6f, 7.8f, 5.3f, 6.7f, 1.4f, -5.2f), F32x8(1.2f, -1.4f, 5.6f, 7.8f, 5.3f, 6.7f, 1.4f, -5.2f));
  210. ASSERT_NOTEQUAL_SIMD(F32x8(1.2f, 3.4f, 5.5f, 7.8f, 5.3f, 6.7f, 1.4f, -5.2f), F32x8(1.2f, 3.4f, 5.6f, 7.8f, 5.3f, 6.7f, 1.4f, -5.2f));
  211. ASSERT_NOTEQUAL_SIMD(F32x8(1.2f, 3.4f, 5.6f, 7.8f, 5.3f, 6.7f, 1.4f, -5.2f), F32x8(1.2f, 3.4f, 5.6f, -7.8f, 5.3f, 6.7f, 1.4f, -5.2f));
  212. ASSERT_NOTEQUAL_SIMD(F32x8(1.2f, 3.4f, 5.6f, 7.8f, 5.3f, 6.7f, 1.4f, -5.2f), F32x8(1.2f, 3.4f, 5.6f, 7.8f, 0.0f, 6.7f, 1.4f, -5.2f));
  213. ASSERT_NOTEQUAL_SIMD(F32x8(1.2f, 3.4f, 5.6f, 7.8f, 5.3f, 6.7f, 1.4f, -5.2f), F32x8(1.2f, 3.4f, 5.6f, 7.8f, 5.3f, 6.69f, 1.4f, -5.2f));
  214. ASSERT_NOTEQUAL_SIMD(F32x8(1.2f, 3.4f, 5.6f, 7.8f, 5.3f, 6.7f, 1.4f, -5.2f), F32x8(1.2f, 3.4f, 5.6f, 7.8f, 5.3f, 6.7f, 1.3f, -5.2f));
  215. ASSERT_NOTEQUAL_SIMD(F32x8(1.2f, 3.4f, 5.6f, 7.8f, 5.3f, 6.7f, 1.4f, -5.2f), F32x8(1.2f, 3.4f, 5.6f, 7.8f, 5.3f, 6.7f, 1.4f, 5.2f));
  216. // I32x4 Comparisons
  217. ASSERT_EQUAL_SIMD(I32x4(4), I32x4(4, 4, 4, 4));
  218. ASSERT_EQUAL_SIMD(I32x4(-4), I32x4(-4, -4, -4, -4));
  219. ASSERT_EQUAL_SIMD(I32x4(-1, 2, -3, 4), I32x4(-1, 2, -3, 4));
  220. ASSERT_NOTEQUAL_SIMD(I32x4(-1, 2, 7, 4), I32x4(-1, 2, -3, 4));
  221. // I32x8 Comparisons
  222. ASSERT_EQUAL_SIMD(I32x8(4), I32x8(4, 4, 4, 4, 4, 4, 4, 4));
  223. ASSERT_EQUAL_SIMD(I32x8(-4), I32x8(-4, -4, -4, -4, -4, -4, -4, -4));
  224. ASSERT_EQUAL_SIMD(I32x8(-1, 2, -3, 4, -5, 6, -7, 8), I32x8(-1, 2, -3, 4, -5, 6, -7, 8));
  225. ASSERT_NOTEQUAL_SIMD(I32x8(-1, 2, 7, 4, 8, 3, 5, 45), I32x8(-1, 2, -3, 4, 8, 3, 5, 45));
  226. // U32x4 Comparisons
  227. ASSERT_EQUAL_SIMD(U32x4(4), U32x4(4, 4, 4, 4));
  228. ASSERT_EQUAL_SIMD(U32x4(1, 2, 3, 4), U32x4(1, 2, 3, 4));
  229. ASSERT_NOTEQUAL_SIMD(U32x4(1, 2, 7, 4), U32x4(1, 2, 3, 4));
  230. // U32x8 Comparisons
  231. ASSERT_EQUAL_SIMD(U32x8(4), U32x8(4, 4, 4, 4, 4, 4, 4, 4));
  232. ASSERT_EQUAL_SIMD(U32x8(1, 2, 3, 4, 5, 6, 7, 8), U32x8(1, 2, 3, 4, 5, 6, 7, 8));
  233. ASSERT_NOTEQUAL_SIMD(U32x8(1, 2, 3, 4, 5, 6, 12, 8), U32x8(1, 2, 3, 4, 5, 6, 7, 8));
  234. // U16x8 Comparisons
  235. ASSERT_EQUAL_SIMD(U16x8(8), U16x8(8, 8, 8, 8, 8, 8, 8, 8));
  236. ASSERT_EQUAL_SIMD(U16x8(1, 2, 3, 4, 5, 6, 7, 8), U16x8(1, 2, 3, 4, 5, 6, 7, 8));
  237. ASSERT_NOTEQUAL_SIMD(U16x8(0, 2, 3, 4, 5, 6, 7, 8), U16x8(1, 2, 3, 4, 5, 6, 7, 8));
  238. ASSERT_NOTEQUAL_SIMD(U16x8(1, 0, 3, 4, 5, 6, 7, 8), U16x8(1, 2, 3, 4, 5, 6, 7, 8));
  239. ASSERT_NOTEQUAL_SIMD(U16x8(1, 2, 0, 4, 5, 6, 7, 8), U16x8(1, 2, 3, 4, 5, 6, 7, 8));
  240. ASSERT_NOTEQUAL_SIMD(U16x8(1, 2, 3, 0, 5, 6, 7, 8), U16x8(1, 2, 3, 4, 5, 6, 7, 8));
  241. ASSERT_NOTEQUAL_SIMD(U16x8(1, 2, 3, 4, 0, 6, 7, 8), U16x8(1, 2, 3, 4, 5, 6, 7, 8));
  242. ASSERT_NOTEQUAL_SIMD(U16x8(1, 2, 3, 4, 5, 0, 7, 8), U16x8(1, 2, 3, 4, 5, 6, 7, 8));
  243. ASSERT_NOTEQUAL_SIMD(U16x8(1, 2, 3, 4, 5, 6, 0, 8), U16x8(1, 2, 3, 4, 5, 6, 7, 8));
  244. ASSERT_NOTEQUAL_SIMD(U16x8(1, 2, 3, 4, 5, 6, 7, 0), U16x8(1, 2, 3, 4, 5, 6, 7, 8));
  245. ASSERT_NOTEQUAL_SIMD(U16x8(1, 2, 0, 4, 5, 0, 7, 8), U16x8(1, 2, 3, 4, 5, 6, 7, 8));
  246. ASSERT_NOTEQUAL_SIMD(U16x8(1, 0, 3, 4, 5, 6, 0, 0), U16x8(1, 2, 3, 4, 5, 6, 7, 8));
  247. ASSERT_NOTEQUAL_SIMD(U16x8(0, 2, 3, 4, 0, 6, 7, 8), U16x8(1, 2, 3, 4, 5, 6, 7, 8));
  248. ASSERT_NOTEQUAL_SIMD(U16x8(0, 0, 0, 0, 0, 0, 0, 0), U16x8(1, 2, 3, 4, 5, 6, 7, 8));
  249. // U16x16 Comparisons
  250. ASSERT_EQUAL_SIMD(U16x16(8), U16x16(8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8));
  251. ASSERT_EQUAL_SIMD(U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16));
  252. ASSERT_NOTEQUAL_SIMD(U16x16(0, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16));
  253. ASSERT_NOTEQUAL_SIMD(U16x16(1, 0, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16));
  254. ASSERT_NOTEQUAL_SIMD(U16x16(1, 2, 0, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16));
  255. ASSERT_NOTEQUAL_SIMD(U16x16(1, 2, 3, 0, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16));
  256. ASSERT_NOTEQUAL_SIMD(U16x16(1, 2, 3, 4, 0, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16));
  257. ASSERT_NOTEQUAL_SIMD(U16x16(1, 2, 3, 4, 5, 0, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16));
  258. ASSERT_NOTEQUAL_SIMD(U16x16(1, 2, 3, 4, 5, 6, 0, 8, 9, 10, 11, 12, 13, 14, 15, 16), U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16));
  259. ASSERT_NOTEQUAL_SIMD(U16x16(1, 2, 3, 4, 5, 6, 7, 0, 9, 10, 11, 12, 13, 14, 15, 16), U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16));
  260. ASSERT_NOTEQUAL_SIMD(U16x16(1, 2, 3, 4, 5, 6, 7, 8, 0, 10, 11, 12, 13, 14, 15, 16), U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16));
  261. ASSERT_NOTEQUAL_SIMD(U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 11, 12, 13, 14, 15, 16), U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16));
  262. ASSERT_NOTEQUAL_SIMD(U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 12, 13, 14, 15, 16), U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16));
  263. ASSERT_NOTEQUAL_SIMD(U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 0, 13, 14, 15, 16), U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16));
  264. ASSERT_NOTEQUAL_SIMD(U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 0, 14, 15, 16), U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16));
  265. ASSERT_NOTEQUAL_SIMD(U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 0, 15, 16), U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16));
  266. ASSERT_NOTEQUAL_SIMD(U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 0, 16), U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16));
  267. ASSERT_NOTEQUAL_SIMD(U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0), U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16));
  268. ASSERT_NOTEQUAL_SIMD(U16x16(1, 2, 0, 4, 5, 0, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16));
  269. ASSERT_NOTEQUAL_SIMD(U16x16(1, 0, 3, 4, 5, 6, 0, 0, 9, 10, 11, 12, 13, 0, 15, 16), U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16));
  270. ASSERT_NOTEQUAL_SIMD(U16x16(0, 2, 3, 4, 0, 6, 7, 8, 9, 10, 11, 0, 13, 14, 15, 16), U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16));
  271. ASSERT_NOTEQUAL_SIMD(U16x16(0, 0, 0, 0, 0, 0, 0, 0, 9, 10, 11, 0, 13, 14, 0, 16), U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16));
  272. // U8x16 Comparisons
  273. ASSERT_EQUAL_SIMD(U8x16(250), U8x16(250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250));
  274. ASSERT_EQUAL_SIMD(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255), U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255));
  275. ASSERT_NOTEQUAL_SIMD(U8x16(0, 2, 3, 4, 5, 6, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255), U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255));
  276. ASSERT_NOTEQUAL_SIMD(U8x16(1, 0, 3, 4, 5, 6, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255), U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255));
  277. ASSERT_NOTEQUAL_SIMD(U8x16(1, 2, 0, 4, 5, 6, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255), U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255));
  278. ASSERT_NOTEQUAL_SIMD(U8x16(1, 2, 3, 0, 5, 6, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255), U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255));
  279. ASSERT_NOTEQUAL_SIMD(U8x16(1, 2, 3, 4, 0, 6, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255), U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255));
  280. ASSERT_NOTEQUAL_SIMD(U8x16(1, 2, 3, 4, 5, 0, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255), U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255));
  281. ASSERT_NOTEQUAL_SIMD(U8x16(1, 2, 3, 4, 5, 6, 0, 8, 9, 10, 250, 251, 252, 253, 254, 255), U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255));
  282. ASSERT_NOTEQUAL_SIMD(U8x16(1, 2, 3, 4, 5, 6, 7, 0, 9, 10, 250, 251, 252, 253, 254, 255), U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255));
  283. ASSERT_NOTEQUAL_SIMD(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 0, 10, 250, 251, 252, 253, 254, 255), U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255));
  284. ASSERT_NOTEQUAL_SIMD(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 250, 251, 252, 253, 254, 255), U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255));
  285. ASSERT_NOTEQUAL_SIMD(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 251, 252, 253, 254, 255), U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255));
  286. ASSERT_NOTEQUAL_SIMD(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 250, 0, 252, 253, 254, 255), U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255));
  287. ASSERT_NOTEQUAL_SIMD(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 250, 251, 0, 253, 254, 255), U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255));
  288. ASSERT_NOTEQUAL_SIMD(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 250, 251, 252, 0, 254, 255), U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255));
  289. ASSERT_NOTEQUAL_SIMD(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 250, 251, 252, 253, 0, 255), U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255));
  290. ASSERT_NOTEQUAL_SIMD(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 250, 251, 252, 253, 254, 0), U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255));
  291. ASSERT_NOTEQUAL_SIMD(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 251, 252, 6, 254, 255), U8x16(1, 2, 3, 4, 5, 9, 7, 8, 9, 10, 250, 251, 252, 253, 254, 255));
  292. ASSERT_NOTEQUAL_SIMD(U8x16(1, 2, 3, 0, 5, 6, 7, 8, 9, 0, 250, 251, 252, 253, 254, 255), U8x16(1, 2, 3, 4, 5, 6, 4, 8, 9, 10, 250, 251, 252, 253, 254, 255));
  293. // U8x32 Comparisons
  294. ASSERT_EQUAL_SIMD(U8x32((uint8_t)250), U8x32(250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250));
  295. ASSERT_NOTEQUAL_SIMD(U8x32((uint8_t)250), U8x32(250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 100, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250));
  296. ASSERT_NOTEQUAL_SIMD(U8x32((uint8_t)250), U8x32(0, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250));
  297. ASSERT_NOTEQUAL_SIMD(U8x32((uint8_t)250), U8x32(250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 0));
  298. }
  299. static void testBitMasks() {
  300. ASSERT_EQUAL_SIMD(U32x4(0xFFFFFFFF, 0x12345678, 0xF0F0F0F0, 0x00000000) & 0x0000FFFF, U32x4(0x0000FFFF, 0x00005678, 0x0000F0F0, 0x00000000));
  301. ASSERT_EQUAL_SIMD(U32x4(0xFFFFFFFF, 0x12345678, 0xF0F0F0F0, 0x00000000) & 0xFFFF0000, U32x4(0xFFFF0000, 0x12340000, 0xF0F00000, 0x00000000));
  302. ASSERT_EQUAL_SIMD(U32x4(0xFFFFFFFF, 0x12345678, 0xF0F0F0F0, 0x00000000) | 0x0000FFFF, U32x4(0xFFFFFFFF, 0x1234FFFF, 0xF0F0FFFF, 0x0000FFFF));
  303. ASSERT_EQUAL_SIMD(U32x4(0xFFFFFFFF, 0x12345678, 0xF0F0F0F0, 0x00000000) | 0xFFFF0000, U32x4(0xFFFFFFFF, 0xFFFF5678, 0xFFFFF0F0, 0xFFFF0000));
  304. ASSERT_EQUAL_SIMD(U32x4(0xFFFFFFFF, 0xFFF000FF, 0xF0F0F0F0, 0x12345678) & U32x4(0xFF00FF00, 0xFFFF0000, 0x000FF000, 0x0FF00FF0), U32x4(0xFF00FF00, 0xFFF00000, 0x0000F000, 0x02300670));
  305. ASSERT_EQUAL_SIMD(U32x4(0xF00F000F, 0xFFF000FF, 0x10010011, 0xABC00000) | U32x4(0x0000FF00, 0xFFFF0000, 0x000FF000, 0x000DEF00), U32x4(0xF00FFF0F, 0xFFFF00FF, 0x100FF011, 0xABCDEF00));
  306. ASSERT_EQUAL_SIMD(U32x4(0xFFFFFFFF, 0x01234567, 0xF0F0F0F0, 0x00000000) ^ 0x0000FFFF, U32x4(0xFFFF0000, 0x0123BA98, 0xF0F00F0F, 0x0000FFFF));
  307. ASSERT_EQUAL_SIMD(
  308. U32x8(0xFFFFFFFF, 0x12345678, 0xF0F0F0F0, 0x00000000, 0xEEEEEEEE, 0x87654321, 0x0F0F0F0F, 0x00010001)
  309. & 0x0000FFFF,
  310. U32x8(0x0000FFFF, 0x00005678, 0x0000F0F0, 0x00000000, 0x0000EEEE, 0x00004321, 0x00000F0F, 0x00000001));
  311. ASSERT_EQUAL_SIMD(
  312. U32x8(0xFFFFFFFF, 0x12345678, 0xF0F0F0F0, 0x00000000, 0xEEEEEEEE, 0x87654321, 0x0F0F0F0F, 0x00010001)
  313. & 0xFFFF0000,
  314. U32x8(0xFFFF0000, 0x12340000, 0xF0F00000, 0x00000000, 0xEEEE0000, 0x87650000, 0x0F0F0000, 0x00010000));
  315. ASSERT_EQUAL_SIMD(
  316. U32x8(0xFFFFFFFF, 0x12345678, 0xF0F0F0F0, 0x00000000, 0xEEEEEEEE, 0x87654321, 0x0F0F0F0F, 0x00010001)
  317. | 0x0000FFFF,
  318. U32x8(0xFFFFFFFF, 0x1234FFFF, 0xF0F0FFFF, 0x0000FFFF, 0xEEEEFFFF, 0x8765FFFF, 0x0F0FFFFF, 0x0001FFFF));
  319. ASSERT_EQUAL_SIMD(
  320. U32x8(0xFFFFFFFF, 0x12345678, 0xF0F0F0F0, 0x00000000, 0xEEEEEEEE, 0x87654321, 0x0F0F0F0F, 0x00010001)
  321. | 0xFFFF0000,
  322. U32x8(0xFFFFFFFF, 0xFFFF5678, 0xFFFFF0F0, 0xFFFF0000, 0xFFFFEEEE, 0xFFFF4321, 0xFFFF0F0F, 0xFFFF0001));
  323. ASSERT_EQUAL_SIMD(
  324. U32x8(0xFFFFFFFF, 0xFFF000FF, 0xF0F0F0F0, 0x12345678, 0xEEEEEEEE, 0x87654321, 0x0F0F0F0F, 0x00010001)
  325. & U32x8(0xFF00FF00, 0xFFFF0000, 0x000FF000, 0x0FF00FF0, 0xF00FF00F, 0x00FFFF00, 0xF0F0F0F0, 0x0000FFFF),
  326. U32x8(0xFF00FF00, 0xFFF00000, 0x0000F000, 0x02300670, 0xE00EE00E, 0x00654300, 0x00000000, 0x00000001));
  327. ASSERT_EQUAL_SIMD(
  328. U32x8(0xFFFFFFFF, 0xFFF000FF, 0xF0F0F0F0, 0x12345678, 0xEEEEEEEE, 0x87654321, 0x0F0F0F0F, 0x00010001)
  329. | U32x8(0xFF00FF00, 0xFFFF0000, 0x000FF000, 0x0FF00FF0, 0xF00FF00F, 0x00FFFF00, 0xF0F0F0F0, 0x0000FFFF),
  330. U32x8(0xFFFFFFFF, 0xFFFF00FF, 0xF0FFF0F0, 0x1FF45FF8, 0xFEEFFEEF, 0x87FFFF21, 0xFFFFFFFF, 0x0001FFFF));
  331. ASSERT_EQUAL_SIMD(
  332. U32x8(0b11001100110000110101010010110011, 0b00101011001011101010001101111001, 0b11001010000110111010010100101100, 0b01010111010001010010101110010110, 0b10101110100110100010101011011001, 0b00101110100111010001101010110000, 0b11101010001011100010101110001111, 0b00101010111100010110010110001000)
  333. ^ U32x8(0b00101101001110100011010010100001, 0b10101110100101000011101001010011, 0b00101011100101001011000010100100, 0b11010011101001000110010110110111, 0b00111100101000101010001101001010, 0b00101110100110000111110011010101, 0b11001010010101010010110010101000, 0b11110000111100001111000011110000),
  334. U32x8(0b11100001111110010110000000010010, 0b10000101101110101001100100101010, 0b11100001100011110001010110001000, 0b10000100111000010100111000100001, 0b10010010001110001000100110010011, 0b00000000000001010110011001100101, 0b00100000011110110000011100100111, 0b11011010000000011001010101111000));
  335. }
  336. static void testBitShift() {
  337. // Bit shift with dynamic uniform offset.
  338. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) << 0,
  339. U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010));
  340. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) << 1,
  341. U16x8(0b1000110110010110, 0b1010101101001100, 0b1001000101100110, 0b1101001011001010, 0b1011001100101010, 0b0110011000011100, 0b0100101010010110, 0b0101101100100100));
  342. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) << 2,
  343. U16x8(0b0001101100101100, 0b0101011010011000, 0b0010001011001100, 0b1010010110010100, 0b0110011001010100, 0b1100110000111000, 0b1001010100101100, 0b1011011001001000));
  344. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) << 3,
  345. U16x8(0b0011011001011000, 0b1010110100110000, 0b0100010110011000, 0b0100101100101000, 0b1100110010101000, 0b1001100001110000, 0b0010101001011000, 0b0110110010010000));
  346. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) << 4,
  347. U16x8(0b0110110010110000, 0b0101101001100000, 0b1000101100110000, 0b1001011001010000, 0b1001100101010000, 0b0011000011100000, 0b0101010010110000, 0b1101100100100000));
  348. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) << 5,
  349. U16x8(0b1101100101100000, 0b1011010011000000, 0b0001011001100000, 0b0010110010100000, 0b0011001010100000, 0b0110000111000000, 0b1010100101100000, 0b1011001001000000));
  350. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) << 6,
  351. U16x8(0b1011001011000000, 0b0110100110000000, 0b0010110011000000, 0b0101100101000000, 0b0110010101000000, 0b1100001110000000, 0b0101001011000000, 0b0110010010000000));
  352. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) << 7,
  353. U16x8(0b0110010110000000, 0b1101001100000000, 0b0101100110000000, 0b1011001010000000, 0b1100101010000000, 0b1000011100000000, 0b1010010110000000, 0b1100100100000000));
  354. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) << 8,
  355. U16x8(0b1100101100000000, 0b1010011000000000, 0b1011001100000000, 0b0110010100000000, 0b1001010100000000, 0b0000111000000000, 0b0100101100000000, 0b1001001000000000));
  356. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) << 9,
  357. U16x8(0b1001011000000000, 0b0100110000000000, 0b0110011000000000, 0b1100101000000000, 0b0010101000000000, 0b0001110000000000, 0b1001011000000000, 0b0010010000000000));
  358. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) << 10,
  359. U16x8(0b0010110000000000, 0b1001100000000000, 0b1100110000000000, 0b1001010000000000, 0b0101010000000000, 0b0011100000000000, 0b0010110000000000, 0b0100100000000000));
  360. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) << 11,
  361. U16x8(0b0101100000000000, 0b0011000000000000, 0b1001100000000000, 0b0010100000000000, 0b1010100000000000, 0b0111000000000000, 0b0101100000000000, 0b1001000000000000));
  362. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) << 12,
  363. U16x8(0b1011000000000000, 0b0110000000000000, 0b0011000000000000, 0b0101000000000000, 0b0101000000000000, 0b1110000000000000, 0b1011000000000000, 0b0010000000000000));
  364. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) << 13,
  365. U16x8(0b0110000000000000, 0b1100000000000000, 0b0110000000000000, 0b1010000000000000, 0b1010000000000000, 0b1100000000000000, 0b0110000000000000, 0b0100000000000000));
  366. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) << 14,
  367. U16x8(0b1100000000000000, 0b1000000000000000, 0b1100000000000000, 0b0100000000000000, 0b0100000000000000, 0b1000000000000000, 0b1100000000000000, 0b1000000000000000));
  368. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) << 15,
  369. U16x8(0b1000000000000000, 0b0000000000000000, 0b1000000000000000, 0b1000000000000000, 0b1000000000000000, 0b0000000000000000, 0b1000000000000000, 0b0000000000000000));
  370. ASSERT_CRASH(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) << 16, U"Tried to shift ");
  371. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) >> 0,
  372. U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010));
  373. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) >> 1,
  374. U16x8(0b0110001101100101, 0b0010101011010011, 0b0110010001011001, 0b0011010010110010, 0b0010110011001010, 0b0001100110000111, 0b0101001010100101, 0b0001011011001001));
  375. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) >> 2,
  376. U16x8(0b0011000110110010, 0b0001010101101001, 0b0011001000101100, 0b0001101001011001, 0b0001011001100101, 0b0000110011000011, 0b0010100101010010, 0b0000101101100100));
  377. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) >> 3,
  378. U16x8(0b0001100011011001, 0b0000101010110100, 0b0001100100010110, 0b0000110100101100, 0b0000101100110010, 0b0000011001100001, 0b0001010010101001, 0b0000010110110010));
  379. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) >> 4,
  380. U16x8(0b0000110001101100, 0b0000010101011010, 0b0000110010001011, 0b0000011010010110, 0b0000010110011001, 0b0000001100110000, 0b0000101001010100, 0b0000001011011001));
  381. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) >> 5,
  382. U16x8(0b0000011000110110, 0b0000001010101101, 0b0000011001000101, 0b0000001101001011, 0b0000001011001100, 0b0000000110011000, 0b0000010100101010, 0b0000000101101100));
  383. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) >> 6,
  384. U16x8(0b0000001100011011, 0b0000000101010110, 0b0000001100100010, 0b0000000110100101, 0b0000000101100110, 0b0000000011001100, 0b0000001010010101, 0b0000000010110110));
  385. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) >> 7,
  386. U16x8(0b0000000110001101, 0b0000000010101011, 0b0000000110010001, 0b0000000011010010, 0b0000000010110011, 0b0000000001100110, 0b0000000101001010, 0b0000000001011011));
  387. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) >> 8,
  388. U16x8(0b0000000011000110, 0b0000000001010101, 0b0000000011001000, 0b0000000001101001, 0b0000000001011001, 0b0000000000110011, 0b0000000010100101, 0b0000000000101101));
  389. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) >> 9,
  390. U16x8(0b0000000001100011, 0b0000000000101010, 0b0000000001100100, 0b0000000000110100, 0b0000000000101100, 0b0000000000011001, 0b0000000001010010, 0b0000000000010110));
  391. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) >> 10,
  392. U16x8(0b0000000000110001, 0b0000000000010101, 0b0000000000110010, 0b0000000000011010, 0b0000000000010110, 0b0000000000001100, 0b0000000000101001, 0b0000000000001011));
  393. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) >> 11,
  394. U16x8(0b0000000000011000, 0b0000000000001010, 0b0000000000011001, 0b0000000000001101, 0b0000000000001011, 0b0000000000000110, 0b0000000000010100, 0b0000000000000101));
  395. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) >> 12,
  396. U16x8(0b0000000000001100, 0b0000000000000101, 0b0000000000001100, 0b0000000000000110, 0b0000000000000101, 0b0000000000000011, 0b0000000000001010, 0b0000000000000010));
  397. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) >> 13,
  398. U16x8(0b0000000000000110, 0b0000000000000010, 0b0000000000000110, 0b0000000000000011, 0b0000000000000010, 0b0000000000000001, 0b0000000000000101, 0b0000000000000001));
  399. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) >> 14,
  400. U16x8(0b0000000000000011, 0b0000000000000001, 0b0000000000000011, 0b0000000000000001, 0b0000000000000001, 0b0000000000000000, 0b0000000000000010, 0b0000000000000000));
  401. ASSERT_EQUAL_SIMD(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) >> 15,
  402. U16x8(0b0000000000000001, 0b0000000000000000, 0b0000000000000001, 0b0000000000000000, 0b0000000000000000, 0b0000000000000000, 0b0000000000000001, 0b0000000000000000));
  403. ASSERT_CRASH(U16x8(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010) >> 16, U"Tried to shift ");
  404. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 0,
  405. U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010));
  406. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 1,
  407. U32x4(0b10001101100101101010101101001100, 0b10010001011001101101001011001010, 0b10110011001010100110011000011100, 0b01001010100101100101101100100100));
  408. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 2,
  409. U32x4(0b00011011001011010101011010011000, 0b00100010110011011010010110010100, 0b01100110010101001100110000111000, 0b10010101001011001011011001001000));
  410. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 3,
  411. U32x4(0b00110110010110101010110100110000, 0b01000101100110110100101100101000, 0b11001100101010011001100001110000, 0b00101010010110010110110010010000));
  412. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 4,
  413. U32x4(0b01101100101101010101101001100000, 0b10001011001101101001011001010000, 0b10011001010100110011000011100000, 0b01010100101100101101100100100000));
  414. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 5,
  415. U32x4(0b11011001011010101011010011000000, 0b00010110011011010010110010100000, 0b00110010101001100110000111000000, 0b10101001011001011011001001000000));
  416. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 6,
  417. U32x4(0b10110010110101010110100110000000, 0b00101100110110100101100101000000, 0b01100101010011001100001110000000, 0b01010010110010110110010010000000));
  418. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 7,
  419. U32x4(0b01100101101010101101001100000000, 0b01011001101101001011001010000000, 0b11001010100110011000011100000000, 0b10100101100101101100100100000000));
  420. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 8,
  421. U32x4(0b11001011010101011010011000000000, 0b10110011011010010110010100000000, 0b10010101001100110000111000000000, 0b01001011001011011001001000000000));
  422. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 9,
  423. U32x4(0b10010110101010110100110000000000, 0b01100110110100101100101000000000, 0b00101010011001100001110000000000, 0b10010110010110110010010000000000));
  424. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 10,
  425. U32x4(0b00101101010101101001100000000000, 0b11001101101001011001010000000000, 0b01010100110011000011100000000000, 0b00101100101101100100100000000000));
  426. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 11,
  427. U32x4(0b01011010101011010011000000000000, 0b10011011010010110010100000000000, 0b10101001100110000111000000000000, 0b01011001011011001001000000000000));
  428. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 12,
  429. U32x4(0b10110101010110100110000000000000, 0b00110110100101100101000000000000, 0b01010011001100001110000000000000, 0b10110010110110010010000000000000));
  430. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 13,
  431. U32x4(0b01101010101101001100000000000000, 0b01101101001011001010000000000000, 0b10100110011000011100000000000000, 0b01100101101100100100000000000000));
  432. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 14,
  433. U32x4(0b11010101011010011000000000000000, 0b11011010010110010100000000000000, 0b01001100110000111000000000000000, 0b11001011011001001000000000000000));
  434. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 15,
  435. U32x4(0b10101010110100110000000000000000, 0b10110100101100101000000000000000, 0b10011001100001110000000000000000, 0b10010110110010010000000000000000));
  436. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 16,
  437. U32x4(0b01010101101001100000000000000000, 0b01101001011001010000000000000000, 0b00110011000011100000000000000000, 0b00101101100100100000000000000000));
  438. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 17,
  439. U32x4(0b10101011010011000000000000000000, 0b11010010110010100000000000000000, 0b01100110000111000000000000000000, 0b01011011001001000000000000000000));
  440. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 18,
  441. U32x4(0b01010110100110000000000000000000, 0b10100101100101000000000000000000, 0b11001100001110000000000000000000, 0b10110110010010000000000000000000));
  442. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 19,
  443. U32x4(0b10101101001100000000000000000000, 0b01001011001010000000000000000000, 0b10011000011100000000000000000000, 0b01101100100100000000000000000000));
  444. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 20,
  445. U32x4(0b01011010011000000000000000000000, 0b10010110010100000000000000000000, 0b00110000111000000000000000000000, 0b11011001001000000000000000000000));
  446. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 21,
  447. U32x4(0b10110100110000000000000000000000, 0b00101100101000000000000000000000, 0b01100001110000000000000000000000, 0b10110010010000000000000000000000));
  448. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 22,
  449. U32x4(0b01101001100000000000000000000000, 0b01011001010000000000000000000000, 0b11000011100000000000000000000000, 0b01100100100000000000000000000000));
  450. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 23,
  451. U32x4(0b11010011000000000000000000000000, 0b10110010100000000000000000000000, 0b10000111000000000000000000000000, 0b11001001000000000000000000000000));
  452. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 24,
  453. U32x4(0b10100110000000000000000000000000, 0b01100101000000000000000000000000, 0b00001110000000000000000000000000, 0b10010010000000000000000000000000));
  454. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 25,
  455. U32x4(0b01001100000000000000000000000000, 0b11001010000000000000000000000000, 0b00011100000000000000000000000000, 0b00100100000000000000000000000000));
  456. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 26,
  457. U32x4(0b10011000000000000000000000000000, 0b10010100000000000000000000000000, 0b00111000000000000000000000000000, 0b01001000000000000000000000000000));
  458. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 27,
  459. U32x4(0b00110000000000000000000000000000, 0b00101000000000000000000000000000, 0b01110000000000000000000000000000, 0b10010000000000000000000000000000));
  460. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 28,
  461. U32x4(0b01100000000000000000000000000000, 0b01010000000000000000000000000000, 0b11100000000000000000000000000000, 0b00100000000000000000000000000000));
  462. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 29,
  463. U32x4(0b11000000000000000000000000000000, 0b10100000000000000000000000000000, 0b11000000000000000000000000000000, 0b01000000000000000000000000000000));
  464. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 30,
  465. U32x4(0b10000000000000000000000000000000, 0b01000000000000000000000000000000, 0b10000000000000000000000000000000, 0b10000000000000000000000000000000));
  466. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 31,
  467. U32x4(0b00000000000000000000000000000000, 0b10000000000000000000000000000000, 0b00000000000000000000000000000000, 0b00000000000000000000000000000000));
  468. ASSERT_CRASH(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) << 32, U"Tried to shift ");
  469. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 0,
  470. U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010));
  471. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 1,
  472. U32x4(0b01100011011001011010101011010011, 0b01100100010110011011010010110010, 0b00101100110010101001100110000111, 0b01010010101001011001011011001001));
  473. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 2,
  474. U32x4(0b00110001101100101101010101101001, 0b00110010001011001101101001011001, 0b00010110011001010100110011000011, 0b00101001010100101100101101100100));
  475. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 3,
  476. U32x4(0b00011000110110010110101010110100, 0b00011001000101100110110100101100, 0b00001011001100101010011001100001, 0b00010100101010010110010110110010));
  477. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 4,
  478. U32x4(0b00001100011011001011010101011010, 0b00001100100010110011011010010110, 0b00000101100110010101001100110000, 0b00001010010101001011001011011001));
  479. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 5,
  480. U32x4(0b00000110001101100101101010101101, 0b00000110010001011001101101001011, 0b00000010110011001010100110011000, 0b00000101001010100101100101101100));
  481. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 6,
  482. U32x4(0b00000011000110110010110101010110, 0b00000011001000101100110110100101, 0b00000001011001100101010011001100, 0b00000010100101010010110010110110));
  483. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 7,
  484. U32x4(0b00000001100011011001011010101011, 0b00000001100100010110011011010010, 0b00000000101100110010101001100110, 0b00000001010010101001011001011011));
  485. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 8,
  486. U32x4(0b00000000110001101100101101010101, 0b00000000110010001011001101101001, 0b00000000010110011001010100110011, 0b00000000101001010100101100101101));
  487. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 9,
  488. U32x4(0b00000000011000110110010110101010, 0b00000000011001000101100110110100, 0b00000000001011001100101010011001, 0b00000000010100101010010110010110));
  489. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 10,
  490. U32x4(0b00000000001100011011001011010101, 0b00000000001100100010110011011010, 0b00000000000101100110010101001100, 0b00000000001010010101001011001011));
  491. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 11,
  492. U32x4(0b00000000000110001101100101101010, 0b00000000000110010001011001101101, 0b00000000000010110011001010100110, 0b00000000000101001010100101100101));
  493. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 12,
  494. U32x4(0b00000000000011000110110010110101, 0b00000000000011001000101100110110, 0b00000000000001011001100101010011, 0b00000000000010100101010010110010));
  495. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 13,
  496. U32x4(0b00000000000001100011011001011010, 0b00000000000001100100010110011011, 0b00000000000000101100110010101001, 0b00000000000001010010101001011001));
  497. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 14,
  498. U32x4(0b00000000000000110001101100101101, 0b00000000000000110010001011001101, 0b00000000000000010110011001010100, 0b00000000000000101001010100101100));
  499. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 15,
  500. U32x4(0b00000000000000011000110110010110, 0b00000000000000011001000101100110, 0b00000000000000001011001100101010, 0b00000000000000010100101010010110));
  501. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 16,
  502. U32x4(0b00000000000000001100011011001011, 0b00000000000000001100100010110011, 0b00000000000000000101100110010101, 0b00000000000000001010010101001011));
  503. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 17,
  504. U32x4(0b00000000000000000110001101100101, 0b00000000000000000110010001011001, 0b00000000000000000010110011001010, 0b00000000000000000101001010100101));
  505. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 18,
  506. U32x4(0b00000000000000000011000110110010, 0b00000000000000000011001000101100, 0b00000000000000000001011001100101, 0b00000000000000000010100101010010));
  507. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 19,
  508. U32x4(0b00000000000000000001100011011001, 0b00000000000000000001100100010110, 0b00000000000000000000101100110010, 0b00000000000000000001010010101001));
  509. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 20,
  510. U32x4(0b00000000000000000000110001101100, 0b00000000000000000000110010001011, 0b00000000000000000000010110011001, 0b00000000000000000000101001010100));
  511. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 21,
  512. U32x4(0b00000000000000000000011000110110, 0b00000000000000000000011001000101, 0b00000000000000000000001011001100, 0b00000000000000000000010100101010));
  513. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 22,
  514. U32x4(0b00000000000000000000001100011011, 0b00000000000000000000001100100010, 0b00000000000000000000000101100110, 0b00000000000000000000001010010101));
  515. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 23,
  516. U32x4(0b00000000000000000000000110001101, 0b00000000000000000000000110010001, 0b00000000000000000000000010110011, 0b00000000000000000000000101001010));
  517. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 24,
  518. U32x4(0b00000000000000000000000011000110, 0b00000000000000000000000011001000, 0b00000000000000000000000001011001, 0b00000000000000000000000010100101));
  519. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 25,
  520. U32x4(0b00000000000000000000000001100011, 0b00000000000000000000000001100100, 0b00000000000000000000000000101100, 0b00000000000000000000000001010010));
  521. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 26,
  522. U32x4(0b00000000000000000000000000110001, 0b00000000000000000000000000110010, 0b00000000000000000000000000010110, 0b00000000000000000000000000101001));
  523. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 27,
  524. U32x4(0b00000000000000000000000000011000, 0b00000000000000000000000000011001, 0b00000000000000000000000000001011, 0b00000000000000000000000000010100));
  525. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 28,
  526. U32x4(0b00000000000000000000000000001100, 0b00000000000000000000000000001100, 0b00000000000000000000000000000101, 0b00000000000000000000000000001010));
  527. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 29,
  528. U32x4(0b00000000000000000000000000000110, 0b00000000000000000000000000000110, 0b00000000000000000000000000000010, 0b00000000000000000000000000000101));
  529. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 30,
  530. U32x4(0b00000000000000000000000000000011, 0b00000000000000000000000000000011, 0b00000000000000000000000000000001, 0b00000000000000000000000000000010));
  531. ASSERT_EQUAL_SIMD(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 31,
  532. U32x4(0b00000000000000000000000000000001, 0b00000000000000000000000000000001, 0b00000000000000000000000000000000, 0b00000000000000000000000000000001));
  533. ASSERT_CRASH(U32x4(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010) >> 32, U"Tried to shift ");
  534. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) << 0,
  535. U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010));
  536. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) << 1,
  537. U16x16(0b1000110110010110, 0b1010101101001100, 0b1001000101100110, 0b1101001011001010, 0b1011001100101010, 0b0110011000011100, 0b0100101010010110, 0b0101101100100100, 0b1110010110100100, 0b0001011010100110, 0b1011001000111010, 0b0101011101001010, 0b0111010100101000, 0b1101001010011000, 0b1010001110001010, 0b0110101010010100));
  538. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) << 2,
  539. U16x16(0b0001101100101100, 0b0101011010011000, 0b0010001011001100, 0b1010010110010100, 0b0110011001010100, 0b1100110000111000, 0b1001010100101100, 0b1011011001001000, 0b1100101101001000, 0b0010110101001100, 0b0110010001110100, 0b1010111010010100, 0b1110101001010000, 0b1010010100110000, 0b0100011100010100, 0b1101010100101000));
  540. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) << 3,
  541. U16x16(0b0011011001011000, 0b1010110100110000, 0b0100010110011000, 0b0100101100101000, 0b1100110010101000, 0b1001100001110000, 0b0010101001011000, 0b0110110010010000, 0b1001011010010000, 0b0101101010011000, 0b1100100011101000, 0b0101110100101000, 0b1101010010100000, 0b0100101001100000, 0b1000111000101000, 0b1010101001010000));
  542. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) << 4,
  543. U16x16(0b0110110010110000, 0b0101101001100000, 0b1000101100110000, 0b1001011001010000, 0b1001100101010000, 0b0011000011100000, 0b0101010010110000, 0b1101100100100000, 0b0010110100100000, 0b1011010100110000, 0b1001000111010000, 0b1011101001010000, 0b1010100101000000, 0b1001010011000000, 0b0001110001010000, 0b0101010010100000));
  544. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) << 5,
  545. U16x16(0b1101100101100000, 0b1011010011000000, 0b0001011001100000, 0b0010110010100000, 0b0011001010100000, 0b0110000111000000, 0b1010100101100000, 0b1011001001000000, 0b0101101001000000, 0b0110101001100000, 0b0010001110100000, 0b0111010010100000, 0b0101001010000000, 0b0010100110000000, 0b0011100010100000, 0b1010100101000000));
  546. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) << 6,
  547. U16x16(0b1011001011000000, 0b0110100110000000, 0b0010110011000000, 0b0101100101000000, 0b0110010101000000, 0b1100001110000000, 0b0101001011000000, 0b0110010010000000, 0b1011010010000000, 0b1101010011000000, 0b0100011101000000, 0b1110100101000000, 0b1010010100000000, 0b0101001100000000, 0b0111000101000000, 0b0101001010000000));
  548. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) << 7,
  549. U16x16(0b0110010110000000, 0b1101001100000000, 0b0101100110000000, 0b1011001010000000, 0b1100101010000000, 0b1000011100000000, 0b1010010110000000, 0b1100100100000000, 0b0110100100000000, 0b1010100110000000, 0b1000111010000000, 0b1101001010000000, 0b0100101000000000, 0b1010011000000000, 0b1110001010000000, 0b1010010100000000));
  550. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) << 8,
  551. U16x16(0b1100101100000000, 0b1010011000000000, 0b1011001100000000, 0b0110010100000000, 0b1001010100000000, 0b0000111000000000, 0b0100101100000000, 0b1001001000000000, 0b1101001000000000, 0b0101001100000000, 0b0001110100000000, 0b1010010100000000, 0b1001010000000000, 0b0100110000000000, 0b1100010100000000, 0b0100101000000000));
  552. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) << 9,
  553. U16x16(0b1001011000000000, 0b0100110000000000, 0b0110011000000000, 0b1100101000000000, 0b0010101000000000, 0b0001110000000000, 0b1001011000000000, 0b0010010000000000, 0b1010010000000000, 0b1010011000000000, 0b0011101000000000, 0b0100101000000000, 0b0010100000000000, 0b1001100000000000, 0b1000101000000000, 0b1001010000000000));
  554. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) << 10,
  555. U16x16(0b0010110000000000, 0b1001100000000000, 0b1100110000000000, 0b1001010000000000, 0b0101010000000000, 0b0011100000000000, 0b0010110000000000, 0b0100100000000000, 0b0100100000000000, 0b0100110000000000, 0b0111010000000000, 0b1001010000000000, 0b0101000000000000, 0b0011000000000000, 0b0001010000000000, 0b0010100000000000));
  556. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) << 11,
  557. U16x16(0b0101100000000000, 0b0011000000000000, 0b1001100000000000, 0b0010100000000000, 0b1010100000000000, 0b0111000000000000, 0b0101100000000000, 0b1001000000000000, 0b1001000000000000, 0b1001100000000000, 0b1110100000000000, 0b0010100000000000, 0b1010000000000000, 0b0110000000000000, 0b0010100000000000, 0b0101000000000000));
  558. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) << 12,
  559. U16x16(0b1011000000000000, 0b0110000000000000, 0b0011000000000000, 0b0101000000000000, 0b0101000000000000, 0b1110000000000000, 0b1011000000000000, 0b0010000000000000, 0b0010000000000000, 0b0011000000000000, 0b1101000000000000, 0b0101000000000000, 0b0100000000000000, 0b1100000000000000, 0b0101000000000000, 0b1010000000000000));
  560. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) << 13,
  561. U16x16(0b0110000000000000, 0b1100000000000000, 0b0110000000000000, 0b1010000000000000, 0b1010000000000000, 0b1100000000000000, 0b0110000000000000, 0b0100000000000000, 0b0100000000000000, 0b0110000000000000, 0b1010000000000000, 0b1010000000000000, 0b1000000000000000, 0b1000000000000000, 0b1010000000000000, 0b0100000000000000));
  562. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) << 14,
  563. U16x16(0b1100000000000000, 0b1000000000000000, 0b1100000000000000, 0b0100000000000000, 0b0100000000000000, 0b1000000000000000, 0b1100000000000000, 0b1000000000000000, 0b1000000000000000, 0b1100000000000000, 0b0100000000000000, 0b0100000000000000, 0b0000000000000000, 0b0000000000000000, 0b0100000000000000, 0b1000000000000000));
  564. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) << 15,
  565. U16x16(0b1000000000000000, 0b0000000000000000, 0b1000000000000000, 0b1000000000000000, 0b1000000000000000, 0b0000000000000000, 0b1000000000000000, 0b0000000000000000, 0b0000000000000000, 0b1000000000000000, 0b1000000000000000, 0b1000000000000000, 0b0000000000000000, 0b0000000000000000, 0b1000000000000000, 0b0000000000000000));
  566. ASSERT_CRASH(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) << 16, U"Tried to shift ");
  567. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) >> 0,
  568. U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010));
  569. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) >> 1,
  570. U16x16(0b0110001101100101, 0b0010101011010011, 0b0110010001011001, 0b0011010010110010, 0b0010110011001010, 0b0001100110000111, 0b0101001010100101, 0b0001011011001001, 0b0011100101101001, 0b0100010110101001, 0b0010110010001110, 0b0001010111010010, 0b0001110101001010, 0b0011010010100110, 0b0110100011100010, 0b0001101010100101));
  571. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) >> 2,
  572. U16x16(0b0011000110110010, 0b0001010101101001, 0b0011001000101100, 0b0001101001011001, 0b0001011001100101, 0b0000110011000011, 0b0010100101010010, 0b0000101101100100, 0b0001110010110100, 0b0010001011010100, 0b0001011001000111, 0b0000101011101001, 0b0000111010100101, 0b0001101001010011, 0b0011010001110001, 0b0000110101010010));
  573. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) >> 3,
  574. U16x16(0b0001100011011001, 0b0000101010110100, 0b0001100100010110, 0b0000110100101100, 0b0000101100110010, 0b0000011001100001, 0b0001010010101001, 0b0000010110110010, 0b0000111001011010, 0b0001000101101010, 0b0000101100100011, 0b0000010101110100, 0b0000011101010010, 0b0000110100101001, 0b0001101000111000, 0b0000011010101001));
  575. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) >> 4,
  576. U16x16(0b0000110001101100, 0b0000010101011010, 0b0000110010001011, 0b0000011010010110, 0b0000010110011001, 0b0000001100110000, 0b0000101001010100, 0b0000001011011001, 0b0000011100101101, 0b0000100010110101, 0b0000010110010001, 0b0000001010111010, 0b0000001110101001, 0b0000011010010100, 0b0000110100011100, 0b0000001101010100));
  577. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) >> 5,
  578. U16x16(0b0000011000110110, 0b0000001010101101, 0b0000011001000101, 0b0000001101001011, 0b0000001011001100, 0b0000000110011000, 0b0000010100101010, 0b0000000101101100, 0b0000001110010110, 0b0000010001011010, 0b0000001011001000, 0b0000000101011101, 0b0000000111010100, 0b0000001101001010, 0b0000011010001110, 0b0000000110101010));
  579. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) >> 6,
  580. U16x16(0b0000001100011011, 0b0000000101010110, 0b0000001100100010, 0b0000000110100101, 0b0000000101100110, 0b0000000011001100, 0b0000001010010101, 0b0000000010110110, 0b0000000111001011, 0b0000001000101101, 0b0000000101100100, 0b0000000010101110, 0b0000000011101010, 0b0000000110100101, 0b0000001101000111, 0b0000000011010101));
  581. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) >> 7,
  582. U16x16(0b0000000110001101, 0b0000000010101011, 0b0000000110010001, 0b0000000011010010, 0b0000000010110011, 0b0000000001100110, 0b0000000101001010, 0b0000000001011011, 0b0000000011100101, 0b0000000100010110, 0b0000000010110010, 0b0000000001010111, 0b0000000001110101, 0b0000000011010010, 0b0000000110100011, 0b0000000001101010));
  583. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) >> 8,
  584. U16x16(0b0000000011000110, 0b0000000001010101, 0b0000000011001000, 0b0000000001101001, 0b0000000001011001, 0b0000000000110011, 0b0000000010100101, 0b0000000000101101, 0b0000000001110010, 0b0000000010001011, 0b0000000001011001, 0b0000000000101011, 0b0000000000111010, 0b0000000001101001, 0b0000000011010001, 0b0000000000110101));
  585. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) >> 9,
  586. U16x16(0b0000000001100011, 0b0000000000101010, 0b0000000001100100, 0b0000000000110100, 0b0000000000101100, 0b0000000000011001, 0b0000000001010010, 0b0000000000010110, 0b0000000000111001, 0b0000000001000101, 0b0000000000101100, 0b0000000000010101, 0b0000000000011101, 0b0000000000110100, 0b0000000001101000, 0b0000000000011010));
  587. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) >> 10,
  588. U16x16(0b0000000000110001, 0b0000000000010101, 0b0000000000110010, 0b0000000000011010, 0b0000000000010110, 0b0000000000001100, 0b0000000000101001, 0b0000000000001011, 0b0000000000011100, 0b0000000000100010, 0b0000000000010110, 0b0000000000001010, 0b0000000000001110, 0b0000000000011010, 0b0000000000110100, 0b0000000000001101));
  589. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) >> 11,
  590. U16x16(0b0000000000011000, 0b0000000000001010, 0b0000000000011001, 0b0000000000001101, 0b0000000000001011, 0b0000000000000110, 0b0000000000010100, 0b0000000000000101, 0b0000000000001110, 0b0000000000010001, 0b0000000000001011, 0b0000000000000101, 0b0000000000000111, 0b0000000000001101, 0b0000000000011010, 0b0000000000000110));
  591. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) >> 12,
  592. U16x16(0b0000000000001100, 0b0000000000000101, 0b0000000000001100, 0b0000000000000110, 0b0000000000000101, 0b0000000000000011, 0b0000000000001010, 0b0000000000000010, 0b0000000000000111, 0b0000000000001000, 0b0000000000000101, 0b0000000000000010, 0b0000000000000011, 0b0000000000000110, 0b0000000000001101, 0b0000000000000011));
  593. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) >> 13,
  594. U16x16(0b0000000000000110, 0b0000000000000010, 0b0000000000000110, 0b0000000000000011, 0b0000000000000010, 0b0000000000000001, 0b0000000000000101, 0b0000000000000001, 0b0000000000000011, 0b0000000000000100, 0b0000000000000010, 0b0000000000000001, 0b0000000000000001, 0b0000000000000011, 0b0000000000000110, 0b0000000000000001));
  595. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) >> 14,
  596. U16x16(0b0000000000000011, 0b0000000000000001, 0b0000000000000011, 0b0000000000000001, 0b0000000000000001, 0b0000000000000000, 0b0000000000000010, 0b0000000000000000, 0b0000000000000001, 0b0000000000000010, 0b0000000000000001, 0b0000000000000000, 0b0000000000000000, 0b0000000000000001, 0b0000000000000011, 0b0000000000000000));
  597. ASSERT_EQUAL_SIMD(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) >> 15,
  598. U16x16(0b0000000000000001, 0b0000000000000000, 0b0000000000000001, 0b0000000000000000, 0b0000000000000000, 0b0000000000000000, 0b0000000000000001, 0b0000000000000000, 0b0000000000000000, 0b0000000000000001, 0b0000000000000000, 0b0000000000000000, 0b0000000000000000, 0b0000000000000000, 0b0000000000000001, 0b0000000000000000));
  599. ASSERT_CRASH(U16x16(0b1100011011001011, 0b0101010110100110, 0b1100100010110011, 0b0110100101100101, 0b0101100110010101, 0b0011001100001110, 0b1010010101001011, 0b0010110110010010, 0b0111001011010010, 0b1000101101010011, 0b0101100100011101, 0b0010101110100101, 0b0011101010010100, 0b0110100101001100, 0b1101000111000101, 0b0011010101001010) >> 16, U"Tried to shift ");
  600. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 0,
  601. U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011));
  602. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 1,
  603. U32x8(0b10001101100101101010101101001100, 0b10010001011001101101001011001010, 0b10110011001010100110011000011100, 0b01001010100101100101101100100100, 0b10110100011011010101001011010110, 0b10110101011011001011010110101010, 0b10100010101010010010010010110100, 0b00101011010101011001010101010110));
  604. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 2,
  605. U32x8(0b00011011001011010101011010011000, 0b00100010110011011010010110010100, 0b01100110010101001100110000111000, 0b10010101001011001011011001001000, 0b01101000110110101010010110101100, 0b01101010110110010110101101010100, 0b01000101010100100100100101101000, 0b01010110101010110010101010101100));
  606. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 3,
  607. U32x8(0b00110110010110101010110100110000, 0b01000101100110110100101100101000, 0b11001100101010011001100001110000, 0b00101010010110010110110010010000, 0b11010001101101010100101101011000, 0b11010101101100101101011010101000, 0b10001010101001001001001011010000, 0b10101101010101100101010101011000));
  608. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 4,
  609. U32x8(0b01101100101101010101101001100000, 0b10001011001101101001011001010000, 0b10011001010100110011000011100000, 0b01010100101100101101100100100000, 0b10100011011010101001011010110000, 0b10101011011001011010110101010000, 0b00010101010010010010010110100000, 0b01011010101011001010101010110000));
  610. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 5,
  611. U32x8(0b11011001011010101011010011000000, 0b00010110011011010010110010100000, 0b00110010101001100110000111000000, 0b10101001011001011011001001000000, 0b01000110110101010010110101100000, 0b01010110110010110101101010100000, 0b00101010100100100100101101000000, 0b10110101010110010101010101100000));
  612. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 6,
  613. U32x8(0b10110010110101010110100110000000, 0b00101100110110100101100101000000, 0b01100101010011001100001110000000, 0b01010010110010110110010010000000, 0b10001101101010100101101011000000, 0b10101101100101101011010101000000, 0b01010101001001001001011010000000, 0b01101010101100101010101011000000));
  614. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 7,
  615. U32x8(0b01100101101010101101001100000000, 0b01011001101101001011001010000000, 0b11001010100110011000011100000000, 0b10100101100101101100100100000000, 0b00011011010101001011010110000000, 0b01011011001011010110101010000000, 0b10101010010010010010110100000000, 0b11010101011001010101010110000000));
  616. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 8,
  617. U32x8(0b11001011010101011010011000000000, 0b10110011011010010110010100000000, 0b10010101001100110000111000000000, 0b01001011001011011001001000000000, 0b00110110101010010110101100000000, 0b10110110010110101101010100000000, 0b01010100100100100101101000000000, 0b10101010110010101010101100000000));
  618. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 9,
  619. U32x8(0b10010110101010110100110000000000, 0b01100110110100101100101000000000, 0b00101010011001100001110000000000, 0b10010110010110110010010000000000, 0b01101101010100101101011000000000, 0b01101100101101011010101000000000, 0b10101001001001001011010000000000, 0b01010101100101010101011000000000));
  620. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 10,
  621. U32x8(0b00101101010101101001100000000000, 0b11001101101001011001010000000000, 0b01010100110011000011100000000000, 0b00101100101101100100100000000000, 0b11011010101001011010110000000000, 0b11011001011010110101010000000000, 0b01010010010010010110100000000000, 0b10101011001010101010110000000000));
  622. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 11,
  623. U32x8(0b01011010101011010011000000000000, 0b10011011010010110010100000000000, 0b10101001100110000111000000000000, 0b01011001011011001001000000000000, 0b10110101010010110101100000000000, 0b10110010110101101010100000000000, 0b10100100100100101101000000000000, 0b01010110010101010101100000000000));
  624. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 12,
  625. U32x8(0b10110101010110100110000000000000, 0b00110110100101100101000000000000, 0b01010011001100001110000000000000, 0b10110010110110010010000000000000, 0b01101010100101101011000000000000, 0b01100101101011010101000000000000, 0b01001001001001011010000000000000, 0b10101100101010101011000000000000));
  626. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 13,
  627. U32x8(0b01101010101101001100000000000000, 0b01101101001011001010000000000000, 0b10100110011000011100000000000000, 0b01100101101100100100000000000000, 0b11010101001011010110000000000000, 0b11001011010110101010000000000000, 0b10010010010010110100000000000000, 0b01011001010101010110000000000000));
  628. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 14,
  629. U32x8(0b11010101011010011000000000000000, 0b11011010010110010100000000000000, 0b01001100110000111000000000000000, 0b11001011011001001000000000000000, 0b10101010010110101100000000000000, 0b10010110101101010100000000000000, 0b00100100100101101000000000000000, 0b10110010101010101100000000000000));
  630. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 15,
  631. U32x8(0b10101010110100110000000000000000, 0b10110100101100101000000000000000, 0b10011001100001110000000000000000, 0b10010110110010010000000000000000, 0b01010100101101011000000000000000, 0b00101101011010101000000000000000, 0b01001001001011010000000000000000, 0b01100101010101011000000000000000));
  632. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 16,
  633. U32x8(0b01010101101001100000000000000000, 0b01101001011001010000000000000000, 0b00110011000011100000000000000000, 0b00101101100100100000000000000000, 0b10101001011010110000000000000000, 0b01011010110101010000000000000000, 0b10010010010110100000000000000000, 0b11001010101010110000000000000000));
  634. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 17,
  635. U32x8(0b10101011010011000000000000000000, 0b11010010110010100000000000000000, 0b01100110000111000000000000000000, 0b01011011001001000000000000000000, 0b01010010110101100000000000000000, 0b10110101101010100000000000000000, 0b00100100101101000000000000000000, 0b10010101010101100000000000000000));
  636. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 18,
  637. U32x8(0b01010110100110000000000000000000, 0b10100101100101000000000000000000, 0b11001100001110000000000000000000, 0b10110110010010000000000000000000, 0b10100101101011000000000000000000, 0b01101011010101000000000000000000, 0b01001001011010000000000000000000, 0b00101010101011000000000000000000));
  638. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 19,
  639. U32x8(0b10101101001100000000000000000000, 0b01001011001010000000000000000000, 0b10011000011100000000000000000000, 0b01101100100100000000000000000000, 0b01001011010110000000000000000000, 0b11010110101010000000000000000000, 0b10010010110100000000000000000000, 0b01010101010110000000000000000000));
  640. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 20,
  641. U32x8(0b01011010011000000000000000000000, 0b10010110010100000000000000000000, 0b00110000111000000000000000000000, 0b11011001001000000000000000000000, 0b10010110101100000000000000000000, 0b10101101010100000000000000000000, 0b00100101101000000000000000000000, 0b10101010101100000000000000000000));
  642. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 21,
  643. U32x8(0b10110100110000000000000000000000, 0b00101100101000000000000000000000, 0b01100001110000000000000000000000, 0b10110010010000000000000000000000, 0b00101101011000000000000000000000, 0b01011010101000000000000000000000, 0b01001011010000000000000000000000, 0b01010101011000000000000000000000));
  644. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 22,
  645. U32x8(0b01101001100000000000000000000000, 0b01011001010000000000000000000000, 0b11000011100000000000000000000000, 0b01100100100000000000000000000000, 0b01011010110000000000000000000000, 0b10110101010000000000000000000000, 0b10010110100000000000000000000000, 0b10101010110000000000000000000000));
  646. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 23,
  647. U32x8(0b11010011000000000000000000000000, 0b10110010100000000000000000000000, 0b10000111000000000000000000000000, 0b11001001000000000000000000000000, 0b10110101100000000000000000000000, 0b01101010100000000000000000000000, 0b00101101000000000000000000000000, 0b01010101100000000000000000000000));
  648. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 24,
  649. U32x8(0b10100110000000000000000000000000, 0b01100101000000000000000000000000, 0b00001110000000000000000000000000, 0b10010010000000000000000000000000, 0b01101011000000000000000000000000, 0b11010101000000000000000000000000, 0b01011010000000000000000000000000, 0b10101011000000000000000000000000));
  650. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 25,
  651. U32x8(0b01001100000000000000000000000000, 0b11001010000000000000000000000000, 0b00011100000000000000000000000000, 0b00100100000000000000000000000000, 0b11010110000000000000000000000000, 0b10101010000000000000000000000000, 0b10110100000000000000000000000000, 0b01010110000000000000000000000000));
  652. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 26,
  653. U32x8(0b10011000000000000000000000000000, 0b10010100000000000000000000000000, 0b00111000000000000000000000000000, 0b01001000000000000000000000000000, 0b10101100000000000000000000000000, 0b01010100000000000000000000000000, 0b01101000000000000000000000000000, 0b10101100000000000000000000000000));
  654. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 27,
  655. U32x8(0b00110000000000000000000000000000, 0b00101000000000000000000000000000, 0b01110000000000000000000000000000, 0b10010000000000000000000000000000, 0b01011000000000000000000000000000, 0b10101000000000000000000000000000, 0b11010000000000000000000000000000, 0b01011000000000000000000000000000));
  656. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 28,
  657. U32x8(0b01100000000000000000000000000000, 0b01010000000000000000000000000000, 0b11100000000000000000000000000000, 0b00100000000000000000000000000000, 0b10110000000000000000000000000000, 0b01010000000000000000000000000000, 0b10100000000000000000000000000000, 0b10110000000000000000000000000000));
  658. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 29,
  659. U32x8(0b11000000000000000000000000000000, 0b10100000000000000000000000000000, 0b11000000000000000000000000000000, 0b01000000000000000000000000000000, 0b01100000000000000000000000000000, 0b10100000000000000000000000000000, 0b01000000000000000000000000000000, 0b01100000000000000000000000000000));
  660. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 30,
  661. U32x8(0b10000000000000000000000000000000, 0b01000000000000000000000000000000, 0b10000000000000000000000000000000, 0b10000000000000000000000000000000, 0b11000000000000000000000000000000, 0b01000000000000000000000000000000, 0b10000000000000000000000000000000, 0b11000000000000000000000000000000));
  662. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 31,
  663. U32x8(0b00000000000000000000000000000000, 0b10000000000000000000000000000000, 0b00000000000000000000000000000000, 0b00000000000000000000000000000000, 0b10000000000000000000000000000000, 0b10000000000000000000000000000000, 0b00000000000000000000000000000000, 0b10000000000000000000000000000000));
  664. ASSERT_CRASH(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) << 32, U"Tried to shift ");
  665. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 0,
  666. U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011));
  667. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 1,
  668. U32x8(0b01100011011001011010101011010011, 0b01100100010110011011010010110010, 0b00101100110010101001100110000111, 0b01010010101001011001011011001001, 0b00101101000110110101010010110101, 0b00101101010110110010110101101010, 0b01101000101010100100100100101101, 0b01001010110101010110010101010101));
  669. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 2,
  670. U32x8(0b00110001101100101101010101101001, 0b00110010001011001101101001011001, 0b00010110011001010100110011000011, 0b00101001010100101100101101100100, 0b00010110100011011010101001011010, 0b00010110101011011001011010110101, 0b00110100010101010010010010010110, 0b00100101011010101011001010101010));
  671. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 3,
  672. U32x8(0b00011000110110010110101010110100, 0b00011001000101100110110100101100, 0b00001011001100101010011001100001, 0b00010100101010010110010110110010, 0b00001011010001101101010100101101, 0b00001011010101101100101101011010, 0b00011010001010101001001001001011, 0b00010010101101010101100101010101));
  673. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 4,
  674. U32x8(0b00001100011011001011010101011010, 0b00001100100010110011011010010110, 0b00000101100110010101001100110000, 0b00001010010101001011001011011001, 0b00000101101000110110101010010110, 0b00000101101010110110010110101101, 0b00001101000101010100100100100101, 0b00001001010110101010110010101010));
  675. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 5,
  676. U32x8(0b00000110001101100101101010101101, 0b00000110010001011001101101001011, 0b00000010110011001010100110011000, 0b00000101001010100101100101101100, 0b00000010110100011011010101001011, 0b00000010110101011011001011010110, 0b00000110100010101010010010010010, 0b00000100101011010101011001010101));
  677. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 6,
  678. U32x8(0b00000011000110110010110101010110, 0b00000011001000101100110110100101, 0b00000001011001100101010011001100, 0b00000010100101010010110010110110, 0b00000001011010001101101010100101, 0b00000001011010101101100101101011, 0b00000011010001010101001001001001, 0b00000010010101101010101100101010));
  679. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 7,
  680. U32x8(0b00000001100011011001011010101011, 0b00000001100100010110011011010010, 0b00000000101100110010101001100110, 0b00000001010010101001011001011011, 0b00000000101101000110110101010010, 0b00000000101101010110110010110101, 0b00000001101000101010100100100100, 0b00000001001010110101010110010101));
  681. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 8,
  682. U32x8(0b00000000110001101100101101010101, 0b00000000110010001011001101101001, 0b00000000010110011001010100110011, 0b00000000101001010100101100101101, 0b00000000010110100011011010101001, 0b00000000010110101011011001011010, 0b00000000110100010101010010010010, 0b00000000100101011010101011001010));
  683. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 9,
  684. U32x8(0b00000000011000110110010110101010, 0b00000000011001000101100110110100, 0b00000000001011001100101010011001, 0b00000000010100101010010110010110, 0b00000000001011010001101101010100, 0b00000000001011010101101100101101, 0b00000000011010001010101001001001, 0b00000000010010101101010101100101));
  685. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 10,
  686. U32x8(0b00000000001100011011001011010101, 0b00000000001100100010110011011010, 0b00000000000101100110010101001100, 0b00000000001010010101001011001011, 0b00000000000101101000110110101010, 0b00000000000101101010110110010110, 0b00000000001101000101010100100100, 0b00000000001001010110101010110010));
  687. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 11,
  688. U32x8(0b00000000000110001101100101101010, 0b00000000000110010001011001101101, 0b00000000000010110011001010100110, 0b00000000000101001010100101100101, 0b00000000000010110100011011010101, 0b00000000000010110101011011001011, 0b00000000000110100010101010010010, 0b00000000000100101011010101011001));
  689. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 12,
  690. U32x8(0b00000000000011000110110010110101, 0b00000000000011001000101100110110, 0b00000000000001011001100101010011, 0b00000000000010100101010010110010, 0b00000000000001011010001101101010, 0b00000000000001011010101101100101, 0b00000000000011010001010101001001, 0b00000000000010010101101010101100));
  691. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 13,
  692. U32x8(0b00000000000001100011011001011010, 0b00000000000001100100010110011011, 0b00000000000000101100110010101001, 0b00000000000001010010101001011001, 0b00000000000000101101000110110101, 0b00000000000000101101010110110010, 0b00000000000001101000101010100100, 0b00000000000001001010110101010110));
  693. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 14,
  694. U32x8(0b00000000000000110001101100101101, 0b00000000000000110010001011001101, 0b00000000000000010110011001010100, 0b00000000000000101001010100101100, 0b00000000000000010110100011011010, 0b00000000000000010110101011011001, 0b00000000000000110100010101010010, 0b00000000000000100101011010101011));
  695. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 15,
  696. U32x8(0b00000000000000011000110110010110, 0b00000000000000011001000101100110, 0b00000000000000001011001100101010, 0b00000000000000010100101010010110, 0b00000000000000001011010001101101, 0b00000000000000001011010101101100, 0b00000000000000011010001010101001, 0b00000000000000010010101101010101));
  697. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 16,
  698. U32x8(0b00000000000000001100011011001011, 0b00000000000000001100100010110011, 0b00000000000000000101100110010101, 0b00000000000000001010010101001011, 0b00000000000000000101101000110110, 0b00000000000000000101101010110110, 0b00000000000000001101000101010100, 0b00000000000000001001010110101010));
  699. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 17,
  700. U32x8(0b00000000000000000110001101100101, 0b00000000000000000110010001011001, 0b00000000000000000010110011001010, 0b00000000000000000101001010100101, 0b00000000000000000010110100011011, 0b00000000000000000010110101011011, 0b00000000000000000110100010101010, 0b00000000000000000100101011010101));
  701. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 18,
  702. U32x8(0b00000000000000000011000110110010, 0b00000000000000000011001000101100, 0b00000000000000000001011001100101, 0b00000000000000000010100101010010, 0b00000000000000000001011010001101, 0b00000000000000000001011010101101, 0b00000000000000000011010001010101, 0b00000000000000000010010101101010));
  703. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 19,
  704. U32x8(0b00000000000000000001100011011001, 0b00000000000000000001100100010110, 0b00000000000000000000101100110010, 0b00000000000000000001010010101001, 0b00000000000000000000101101000110, 0b00000000000000000000101101010110, 0b00000000000000000001101000101010, 0b00000000000000000001001010110101));
  705. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 20,
  706. U32x8(0b00000000000000000000110001101100, 0b00000000000000000000110010001011, 0b00000000000000000000010110011001, 0b00000000000000000000101001010100, 0b00000000000000000000010110100011, 0b00000000000000000000010110101011, 0b00000000000000000000110100010101, 0b00000000000000000000100101011010));
  707. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 21,
  708. U32x8(0b00000000000000000000011000110110, 0b00000000000000000000011001000101, 0b00000000000000000000001011001100, 0b00000000000000000000010100101010, 0b00000000000000000000001011010001, 0b00000000000000000000001011010101, 0b00000000000000000000011010001010, 0b00000000000000000000010010101101));
  709. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 22,
  710. U32x8(0b00000000000000000000001100011011, 0b00000000000000000000001100100010, 0b00000000000000000000000101100110, 0b00000000000000000000001010010101, 0b00000000000000000000000101101000, 0b00000000000000000000000101101010, 0b00000000000000000000001101000101, 0b00000000000000000000001001010110));
  711. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 23,
  712. U32x8(0b00000000000000000000000110001101, 0b00000000000000000000000110010001, 0b00000000000000000000000010110011, 0b00000000000000000000000101001010, 0b00000000000000000000000010110100, 0b00000000000000000000000010110101, 0b00000000000000000000000110100010, 0b00000000000000000000000100101011));
  713. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 24,
  714. U32x8(0b00000000000000000000000011000110, 0b00000000000000000000000011001000, 0b00000000000000000000000001011001, 0b00000000000000000000000010100101, 0b00000000000000000000000001011010, 0b00000000000000000000000001011010, 0b00000000000000000000000011010001, 0b00000000000000000000000010010101));
  715. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 25,
  716. U32x8(0b00000000000000000000000001100011, 0b00000000000000000000000001100100, 0b00000000000000000000000000101100, 0b00000000000000000000000001010010, 0b00000000000000000000000000101101, 0b00000000000000000000000000101101, 0b00000000000000000000000001101000, 0b00000000000000000000000001001010));
  717. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 26,
  718. U32x8(0b00000000000000000000000000110001, 0b00000000000000000000000000110010, 0b00000000000000000000000000010110, 0b00000000000000000000000000101001, 0b00000000000000000000000000010110, 0b00000000000000000000000000010110, 0b00000000000000000000000000110100, 0b00000000000000000000000000100101));
  719. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 27,
  720. U32x8(0b00000000000000000000000000011000, 0b00000000000000000000000000011001, 0b00000000000000000000000000001011, 0b00000000000000000000000000010100, 0b00000000000000000000000000001011, 0b00000000000000000000000000001011, 0b00000000000000000000000000011010, 0b00000000000000000000000000010010));
  721. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 28,
  722. U32x8(0b00000000000000000000000000001100, 0b00000000000000000000000000001100, 0b00000000000000000000000000000101, 0b00000000000000000000000000001010, 0b00000000000000000000000000000101, 0b00000000000000000000000000000101, 0b00000000000000000000000000001101, 0b00000000000000000000000000001001));
  723. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 29,
  724. U32x8(0b00000000000000000000000000000110, 0b00000000000000000000000000000110, 0b00000000000000000000000000000010, 0b00000000000000000000000000000101, 0b00000000000000000000000000000010, 0b00000000000000000000000000000010, 0b00000000000000000000000000000110, 0b00000000000000000000000000000100));
  725. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 30,
  726. U32x8(0b00000000000000000000000000000011, 0b00000000000000000000000000000011, 0b00000000000000000000000000000001, 0b00000000000000000000000000000010, 0b00000000000000000000000000000001, 0b00000000000000000000000000000001, 0b00000000000000000000000000000011, 0b00000000000000000000000000000010));
  727. ASSERT_EQUAL_SIMD(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 31,
  728. U32x8(0b00000000000000000000000000000001, 0b00000000000000000000000000000001, 0b00000000000000000000000000000000, 0b00000000000000000000000000000001, 0b00000000000000000000000000000000, 0b00000000000000000000000000000000, 0b00000000000000000000000000000001, 0b00000000000000000000000000000001));
  729. ASSERT_CRASH(U32x8(0b11000110110010110101010110100110, 0b11001000101100110110100101100101, 0b01011001100101010011001100001110, 0b10100101010010110010110110010010, 0b01011010001101101010100101101011, 0b01011010101101100101101011010101, 0b11010001010101001001001001011010, 0b10010101101010101100101010101011) >> 32, U"Tried to shift ");
  730. // Bit shift with multiple offsets.
  731. ASSERT_EQUAL_SIMD(U32x4(1, 2, 3, 4) << U32x4(0, 3, 1, 2), U32x4(1, 16, 6, 16));
  732. ASSERT_EQUAL_SIMD(
  733. U32x4(0b11111011111011111111001111101111u, 0b11111111011110011111111110011111u, 0b11111111111011111111101111111101u, 0b11111111011111111101111011111111u) << U32x4(0, 1, 30, 31),
  734. U32x4(0b11111011111011111111001111101111u, 0b11111110111100111111111100111110u, 0b01000000000000000000000000000000u, 0b10000000000000000000000000000000u)
  735. );
  736. ASSERT_EQUAL_SIMD(
  737. U32x4(0b11111011111111110111111100111111u, 0b11111111001111111101101111001111u, 0b11111011111111111111111110111111u, 0b11111111011110111111101111111111u) >> U32x4(0, 1, 30, 31),
  738. U32x4(0b11111011111111110111111100111111u, 0b01111111100111111110110111100111u, 0b00000000000000000000000000000011u, 0b00000000000000000000000000000001u)
  739. );
  740. ASSERT_EQUAL_SIMD(U32x4(1, 2, 3, 4) << U32x4(2, 4, 3, 1), U32x4(4, 32, 24, 8));
  741. ASSERT_EQUAL_SIMD(U32x4(64, 32, 5, 8) >> U32x4(2, 1, 2, 0), U32x4(16, 16, 1, 8));
  742. ASSERT_EQUAL_SIMD(U32x8(1, 2, 3, 4, 5, 6, 7, 8) << U32x8(2, 4, 3, 1, 0, 1, 2, 1), U32x8(4, 32, 24, 8, 5, 12, 28, 16));
  743. ASSERT_EQUAL_SIMD(U32x8(64, 32, 5, 8, 128, 64, 128, 256) >> U32x8(2, 4, 3, 1, 3, 1, 2, 1), U32x8(16, 2, 0, 4, 16, 32, 32, 128));
  744. // Bit shift with immediate offset.
  745. ASSERT_EQUAL_SIMD(bitShiftLeftImmediate<1>(U32x4(1, 2, 3, 4)), U32x4(2, 4, 6, 8));
  746. ASSERT_EQUAL_SIMD(bitShiftLeftImmediate<2>(U32x4(1, 2, 3, 4)), U32x4(4, 8, 12, 16));
  747. ASSERT_EQUAL_SIMD(bitShiftLeftImmediate<3>(U32x4(1, 2, 3, 4)), U32x4(8, 16, 24, 32));
  748. ASSERT_EQUAL_SIMD(bitShiftLeftImmediate<4>(U32x4(1, 2, 3, 4)), U32x4(16, 32, 48, 64));
  749. ASSERT_EQUAL_SIMD(bitShiftRightImmediate<1>(U32x4(1, 2, 3, 4)), U32x4(0, 1, 1, 2));
  750. ASSERT_EQUAL_SIMD(bitShiftRightImmediate<1>(U32x4(2, 4, 6, 8)), U32x4(1, 2, 3, 4));
  751. ASSERT_EQUAL_SIMD(bitShiftRightImmediate<2>(U32x4(2, 4, 6, 8)), U32x4(0, 1, 1, 2));
  752. ASSERT_EQUAL_SIMD(bitShiftLeftImmediate<4>(U32x4(0x0AB12CD0, 0xFFFFFFFF, 0x12345678, 0xF0000000)), U32x4(0xAB12CD00, 0xFFFFFFF0, 0x23456780, 0x00000000));
  753. ASSERT_EQUAL_SIMD(bitShiftRightImmediate<4>(U32x4(0x0AB12CD0, 0xFFFFFFFF, 0x12345678, 0x0000000F)), U32x4(0x00AB12CD, 0x0FFFFFFF, 0x01234567, 0x00000000));
  754. ASSERT_EQUAL_SIMD(bitShiftLeftImmediate <1>(U32x8(1, 2, 3, 4, 5, 6, 7, 8)), U32x8( 2, 4, 6, 8, 10, 12, 14, 16));
  755. ASSERT_EQUAL_SIMD(bitShiftLeftImmediate <2>(U32x8(1, 2, 3, 4, 5, 6, 7, 8)), U32x8( 4, 8, 12, 16, 20, 24, 28, 32));
  756. ASSERT_EQUAL_SIMD(bitShiftLeftImmediate <3>(U32x8(1, 2, 3, 4, 5, 6, 7, 8)), U32x8( 8, 16, 24, 32, 40, 48, 56, 64));
  757. ASSERT_EQUAL_SIMD(bitShiftLeftImmediate <4>(U32x8(1, 2, 3, 4, 5, 6, 7, 8)), U32x8(16, 32, 48, 64, 80, 96,112,128));
  758. ASSERT_EQUAL_SIMD(bitShiftRightImmediate<1>(U32x8(1, 2, 3, 4, 5, 6, 7, 8)), U32x8( 0, 1, 1, 2, 2, 3, 3, 4));
  759. ASSERT_EQUAL_SIMD(bitShiftRightImmediate<1>(U32x8(2, 4, 6, 8, 10, 12, 14, 16)), U32x8( 1, 2, 3, 4, 5, 6, 7, 8));
  760. ASSERT_EQUAL_SIMD(bitShiftRightImmediate<2>(U32x8(2, 4, 6, 8, 10, 12, 14, 16)), U32x8( 0, 1, 1, 2, 2, 3, 3, 4));
  761. ASSERT_EQUAL_SIMD(
  762. bitShiftLeftImmediate<4>(U32x8(0x0AB12CD0, 0xFFFFFFFF, 0x12345678, 0xF0000000, 0x87654321, 0x48484848, 0x76437643, 0x11111111)),
  763. U32x8(0xAB12CD00, 0xFFFFFFF0, 0x23456780, 0x00000000, 0x76543210, 0x84848480, 0x64376430, 0x11111110));
  764. ASSERT_EQUAL_SIMD(
  765. bitShiftRightImmediate<4>(U32x8(0x0AB12CD0, 0xFFFFFFFF, 0x12345678, 0x0000000F, 0x87654321, 0x48484848, 0x76437643, 0x11111111)),
  766. U32x8(0x00AB12CD, 0x0FFFFFFF, 0x01234567, 0x00000000, 0x08765432, 0x04848484, 0x07643764, 0x01111111));
  767. }
  768. static void testVectorExtract() {
  769. ASSERT_EQUAL_SIMD(vectorExtract_0(U32x4(1, 2, 3, 4), U32x4(5, 6, 7, 8)), U32x4(1, 2, 3, 4));
  770. ASSERT_EQUAL_SIMD(vectorExtract_1(U32x4(1, 2, 3, 4), U32x4(5, 6, 7, 8)), U32x4(2, 3, 4, 5));
  771. ASSERT_EQUAL_SIMD(vectorExtract_2(U32x4(1, 2, 3, 4), U32x4(5, 6, 7, 8)), U32x4(3, 4, 5, 6));
  772. ASSERT_EQUAL_SIMD(vectorExtract_3(U32x4(1, 2, 3, 4), U32x4(5, 6, 7, 8)), U32x4(4, 5, 6, 7));
  773. ASSERT_EQUAL_SIMD(vectorExtract_4(U32x4(1, 2, 3, 4), U32x4(5, 6, 7, 8)), U32x4(5, 6, 7, 8));
  774. ASSERT_EQUAL_SIMD(vectorExtract_0(U32x4(123, 4294967295, 712, 45), U32x4(850514, 27, 0, 174)), U32x4(123, 4294967295, 712, 45));
  775. ASSERT_EQUAL_SIMD(vectorExtract_1(U32x4(123, 4294967295, 712, 45), U32x4(850514, 27, 0, 174)), U32x4(4294967295, 712, 45, 850514));
  776. ASSERT_EQUAL_SIMD(vectorExtract_2(U32x4(123, 4294967295, 712, 45), U32x4(850514, 27, 0, 174)), U32x4(712, 45, 850514, 27));
  777. ASSERT_EQUAL_SIMD(vectorExtract_3(U32x4(123, 4294967295, 712, 45), U32x4(850514, 27, 0, 174)), U32x4(45, 850514, 27, 0));
  778. ASSERT_EQUAL_SIMD(vectorExtract_4(U32x4(123, 4294967295, 712, 45), U32x4(850514, 27, 0, 174)), U32x4(850514, 27, 0, 174));
  779. ASSERT_EQUAL_SIMD(vectorExtract_0(I32x4(1, 2, 3, 4), I32x4(5, 6, 7, 8)), I32x4(1, 2, 3, 4));
  780. ASSERT_EQUAL_SIMD(vectorExtract_1(I32x4(1, 2, 3, 4), I32x4(5, 6, 7, 8)), I32x4(2, 3, 4, 5));
  781. ASSERT_EQUAL_SIMD(vectorExtract_2(I32x4(1, 2, 3, 4), I32x4(5, 6, 7, 8)), I32x4(3, 4, 5, 6));
  782. ASSERT_EQUAL_SIMD(vectorExtract_3(I32x4(1, 2, 3, 4), I32x4(5, 6, 7, 8)), I32x4(4, 5, 6, 7));
  783. ASSERT_EQUAL_SIMD(vectorExtract_4(I32x4(1, 2, 3, 4), I32x4(5, 6, 7, 8)), I32x4(5, 6, 7, 8));
  784. ASSERT_EQUAL_SIMD(vectorExtract_0(I32x4(123, 8462784, -712, 45), I32x4(-37562, 27, 0, 174)), I32x4(123, 8462784, -712, 45));
  785. ASSERT_EQUAL_SIMD(vectorExtract_1(I32x4(123, 8462784, -712, 45), I32x4(-37562, 27, 0, 174)), I32x4(8462784, -712, 45, -37562));
  786. ASSERT_EQUAL_SIMD(vectorExtract_2(I32x4(123, 8462784, -712, 45), I32x4(-37562, 27, 0, 174)), I32x4(-712, 45, -37562, 27));
  787. ASSERT_EQUAL_SIMD(vectorExtract_3(I32x4(123, 8462784, -712, 45), I32x4(-37562, 27, 0, 174)), I32x4(45, -37562, 27, 0));
  788. ASSERT_EQUAL_SIMD(vectorExtract_4(I32x4(123, 8462784, -712, 45), I32x4(-37562, 27, 0, 174)), I32x4(-37562, 27, 0, 174));
  789. ASSERT_EQUAL_SIMD(vectorExtract_0(F32x4(1.0f, -2.0f, 3.0f, -4.0f), F32x4(5.0f, 6.0f, -7.0f, 8.0f)), F32x4(1.0f, -2.0f, 3.0f, -4.0f));
  790. ASSERT_EQUAL_SIMD(vectorExtract_1(F32x4(1.0f, -2.0f, 3.0f, -4.0f), F32x4(5.0f, 6.0f, -7.0f, 8.0f)), F32x4(-2.0f, 3.0f, -4.0f, 5.0f));
  791. ASSERT_EQUAL_SIMD(vectorExtract_2(F32x4(1.0f, -2.0f, 3.0f, -4.0f), F32x4(5.0f, 6.0f, -7.0f, 8.0f)), F32x4(3.0f, -4.0f, 5.0f, 6.0f));
  792. ASSERT_EQUAL_SIMD(vectorExtract_3(F32x4(1.0f, -2.0f, 3.0f, -4.0f), F32x4(5.0f, 6.0f, -7.0f, 8.0f)), F32x4(-4.0f, 5.0f, 6.0f, -7.0f));
  793. ASSERT_EQUAL_SIMD(vectorExtract_4(F32x4(1.0f, -2.0f, 3.0f, -4.0f), F32x4(5.0f, 6.0f, -7.0f, 8.0f)), F32x4(5.0f, 6.0f, -7.0f, 8.0f));
  794. ASSERT_EQUAL_SIMD(vectorExtract_0(U16x8(1, 2, 3, 4, 5, 6, 7, 8), U16x8(9, 10, 11, 12, 13, 14, 15, 16)), U16x8(1, 2, 3, 4, 5, 6, 7, 8));
  795. ASSERT_EQUAL_SIMD(vectorExtract_1(U16x8(1, 2, 3, 4, 5, 6, 7, 8), U16x8(9, 10, 11, 12, 13, 14, 15, 16)), U16x8(2, 3, 4, 5, 6, 7, 8, 9));
  796. ASSERT_EQUAL_SIMD(vectorExtract_2(U16x8(1, 2, 3, 4, 5, 6, 7, 8), U16x8(9, 10, 11, 12, 13, 14, 15, 16)), U16x8(3, 4, 5, 6, 7, 8, 9, 10));
  797. ASSERT_EQUAL_SIMD(vectorExtract_3(U16x8(1, 2, 3, 4, 5, 6, 7, 8), U16x8(9, 10, 11, 12, 13, 14, 15, 16)), U16x8(4, 5, 6, 7, 8, 9, 10, 11));
  798. ASSERT_EQUAL_SIMD(vectorExtract_4(U16x8(1, 2, 3, 4, 5, 6, 7, 8), U16x8(9, 10, 11, 12, 13, 14, 15, 16)), U16x8(5, 6, 7, 8, 9, 10, 11, 12));
  799. ASSERT_EQUAL_SIMD(vectorExtract_5(U16x8(1, 2, 3, 4, 5, 6, 7, 8), U16x8(9, 10, 11, 12, 13, 14, 15, 16)), U16x8(6, 7, 8, 9, 10, 11, 12, 13));
  800. ASSERT_EQUAL_SIMD(vectorExtract_6(U16x8(1, 2, 3, 4, 5, 6, 7, 8), U16x8(9, 10, 11, 12, 13, 14, 15, 16)), U16x8(7, 8, 9, 10, 11, 12, 13, 14));
  801. ASSERT_EQUAL_SIMD(vectorExtract_7(U16x8(1, 2, 3, 4, 5, 6, 7, 8), U16x8(9, 10, 11, 12, 13, 14, 15, 16)), U16x8(8, 9, 10, 11, 12, 13, 14, 15));
  802. ASSERT_EQUAL_SIMD(vectorExtract_8(U16x8(1, 2, 3, 4, 5, 6, 7, 8), U16x8(9, 10, 11, 12, 13, 14, 15, 16)), U16x8(9, 10, 11, 12, 13, 14, 15, 16));
  803. ASSERT_EQUAL_SIMD(vectorExtract_0(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U8x16(17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32)), U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16));
  804. ASSERT_EQUAL_SIMD(vectorExtract_1(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U8x16(17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32)), U8x16(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17));
  805. ASSERT_EQUAL_SIMD(vectorExtract_2(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U8x16(17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32)), U8x16(3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18));
  806. ASSERT_EQUAL_SIMD(vectorExtract_3(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U8x16(17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32)), U8x16(4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19));
  807. ASSERT_EQUAL_SIMD(vectorExtract_4(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U8x16(17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32)), U8x16(5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20));
  808. ASSERT_EQUAL_SIMD(vectorExtract_5(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U8x16(17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32)), U8x16(6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21));
  809. ASSERT_EQUAL_SIMD(vectorExtract_6(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U8x16(17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32)), U8x16(7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22));
  810. ASSERT_EQUAL_SIMD(vectorExtract_7(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U8x16(17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32)), U8x16(8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23));
  811. ASSERT_EQUAL_SIMD(vectorExtract_8(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U8x16(17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32)), U8x16(9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24));
  812. ASSERT_EQUAL_SIMD(vectorExtract_9(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U8x16(17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32)), U8x16(10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25));
  813. ASSERT_EQUAL_SIMD(vectorExtract_10(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U8x16(17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32)), U8x16(11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26));
  814. ASSERT_EQUAL_SIMD(vectorExtract_11(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U8x16(17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32)), U8x16(12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27));
  815. ASSERT_EQUAL_SIMD(vectorExtract_12(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U8x16(17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32)), U8x16(13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28));
  816. ASSERT_EQUAL_SIMD(vectorExtract_13(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U8x16(17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32)), U8x16(14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29));
  817. ASSERT_EQUAL_SIMD(vectorExtract_14(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U8x16(17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32)), U8x16(15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30));
  818. ASSERT_EQUAL_SIMD(vectorExtract_15(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U8x16(17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32)), U8x16(16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31));
  819. ASSERT_EQUAL_SIMD(vectorExtract_16(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U8x16(17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32)), U8x16(17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32));
  820. ASSERT_EQUAL_SIMD(vectorExtract_0(U32x8( 1, 2, 3, 4, 5, 6, 7, 8), U32x8( 9,10,11,12,13,14,15,16)),
  821. U32x8( 1, 2, 3, 4, 5, 6, 7, 8));
  822. ASSERT_EQUAL_SIMD(vectorExtract_1(U32x8( 1, 2, 3, 4, 5, 6, 7, 8), U32x8( 9,10,11,12,13,14,15,16)),
  823. U32x8( 2, 3, 4, 5, 6, 7, 8, 9));
  824. ASSERT_EQUAL_SIMD(vectorExtract_2(U32x8( 1, 2, 3, 4, 5, 6, 7, 8), U32x8( 9,10,11,12,13,14,15,16)),
  825. U32x8( 3, 4, 5, 6, 7, 8, 9,10));
  826. ASSERT_EQUAL_SIMD(vectorExtract_3(U32x8( 1, 2, 3, 4, 5, 6, 7, 8), U32x8( 9,10,11,12,13,14,15,16)),
  827. U32x8( 4, 5, 6, 7, 8, 9,10,11));
  828. ASSERT_EQUAL_SIMD(vectorExtract_4(U32x8( 1, 2, 3, 4, 5, 6, 7, 8), U32x8( 9,10,11,12,13,14,15,16)),
  829. U32x8( 5, 6, 7, 8, 9,10,11,12));
  830. ASSERT_EQUAL_SIMD(vectorExtract_5(U32x8( 1, 2, 3, 4, 5, 6, 7, 8), U32x8( 9,10,11,12,13,14,15,16)),
  831. U32x8( 6, 7, 8, 9,10,11,12,13));
  832. ASSERT_EQUAL_SIMD(vectorExtract_6(U32x8( 1, 2, 3, 4, 5, 6, 7, 8), U32x8( 9,10,11,12,13,14,15,16)),
  833. U32x8( 7, 8, 9,10,11,12,13,14));
  834. ASSERT_EQUAL_SIMD(vectorExtract_7(U32x8( 1, 2, 3, 4, 5, 6, 7, 8), U32x8( 9,10,11,12,13,14,15,16)),
  835. U32x8( 8, 9,10,11,12,13,14,15));
  836. ASSERT_EQUAL_SIMD(vectorExtract_8(U32x8( 1, 2, 3, 4, 5, 6, 7, 8), U32x8( 9,10,11,12,13,14,15,16)),
  837. U32x8( 9,10,11,12,13,14,15,16));
  838. ASSERT_EQUAL_SIMD(vectorExtract_5(U32x8( 1, 2, 3, 4, 5, 6, 7, 4294967295), U32x8( 9,10,11,1000,13,14,15,16)),
  839. U32x8( 6, 7, 4294967295, 9,10,11,1000,13));
  840. ASSERT_EQUAL_SIMD(vectorExtract_0(I32x8( 1,-2, 3, 4,-5, 6, 7, 8), I32x8( 9,10,11,-12,13,14,15,-16)),
  841. I32x8( 1,-2, 3, 4,-5, 6, 7, 8));
  842. ASSERT_EQUAL_SIMD(vectorExtract_1(I32x8( 1,-2, 3, 4,-5, 6, 7, 8), I32x8( 9,10,11,-12,13,14,15,-16)),
  843. I32x8(-2, 3, 4,-5, 6, 7, 8, 9));
  844. ASSERT_EQUAL_SIMD(vectorExtract_2(I32x8( 1,-2, 3, 4,-5, 6, 7, 8), I32x8( 9,10,11,-12,13,14,15,-16)),
  845. I32x8( 3, 4,-5, 6, 7, 8, 9,10));
  846. ASSERT_EQUAL_SIMD(vectorExtract_3(I32x8( 1,-2, 3, 4,-5, 6, 7, 8), I32x8( 9,10,11,-12,13,14,15,-16)),
  847. I32x8( 4,-5, 6, 7, 8, 9,10,11));
  848. ASSERT_EQUAL_SIMD(vectorExtract_4(I32x8( 1,-2, 3, 4,-5, 6, 7, 8), I32x8( 9,10,11,-12,13,14,15,-16)),
  849. I32x8(-5, 6, 7, 8, 9,10,11,-12));
  850. ASSERT_EQUAL_SIMD(vectorExtract_5(I32x8( 1,-2, 3, 4,-5, 6, 7, 8), I32x8( 9,10,11,-12,13,14,15,-16)),
  851. I32x8( 6, 7, 8, 9,10,11,-12,13));
  852. ASSERT_EQUAL_SIMD(vectorExtract_6(I32x8( 1,-2, 3, 4,-5, 6, 7, 8), I32x8( 9,10,11,-12,13,14,15,-16)),
  853. I32x8( 7, 8, 9,10,11,-12,13,14));
  854. ASSERT_EQUAL_SIMD(vectorExtract_7(I32x8( 1,-2, 3, 4,-5, 6, 7, 8), I32x8( 9,10,11,-12,13,14,15,-16)),
  855. I32x8( 8, 9,10,11,-12,13,14,15));
  856. ASSERT_EQUAL_SIMD(vectorExtract_8(I32x8( 1,-2, 3, 4,-5, 6, 7, 8), I32x8( 9,10,11,-12,13,14,15,-16)),
  857. I32x8( 9,10,11,-12,13,14,15,-16));
  858. ASSERT_EQUAL_SIMD(vectorExtract_0(F32x8( 1.1f,-2.2f, 3.0f, 4.0f,-5.0f, 6.0f, 7.0f, 8.0f), F32x8( 9.0f, 10.0f, 11.0f,-12.0f, 13.0f, 14.0f, 15.0f,-16.0f)),
  859. F32x8( 1.1f,-2.2f, 3.0f, 4.0f,-5.0f, 6.0f, 7.0f, 8.0f));
  860. ASSERT_EQUAL_SIMD(vectorExtract_1(F32x8( 1.1f,-2.2f, 3.0f, 4.0f,-5.0f, 6.0f, 7.0f, 8.0f), F32x8( 9.0f, 10.0f, 11.0f,-12.0f, 13.0f, 14.0f, 15.0f,-16.0f)),
  861. F32x8( -2.2f, 3.0f, 4.0f,-5.0f, 6.0f, 7.0f, 8.0f, 9.0f));
  862. ASSERT_EQUAL_SIMD(vectorExtract_2(F32x8( 1.1f,-2.2f, 3.0f, 4.0f,-5.0f, 6.0f, 7.0f, 8.0f), F32x8( 9.0f, 10.0f, 11.0f,-12.0f, 13.0f, 14.0f, 15.0f,-16.0f)),
  863. F32x8( 3.0f, 4.0f,-5.0f, 6.0f, 7.0f, 8.0f, 9.0f, 10.0f));
  864. ASSERT_EQUAL_SIMD(vectorExtract_3(F32x8( 1.1f,-2.2f, 3.0f, 4.0f,-5.0f, 6.0f, 7.0f, 8.0f), F32x8( 9.0f, 10.0f, 11.0f,-12.0f, 13.0f, 14.0f, 15.0f,-16.0f)),
  865. F32x8( 4.0f,-5.0f, 6.0f, 7.0f, 8.0f, 9.0f, 10.0f, 11.0f));
  866. ASSERT_EQUAL_SIMD(vectorExtract_4(F32x8( 1.1f,-2.2f, 3.0f, 4.0f,-5.0f, 6.0f, 7.0f, 8.0f), F32x8( 9.0f, 10.0f, 11.0f,-12.0f, 13.0f, 14.0f, 15.0f,-16.0f)),
  867. F32x8(-5.0f, 6.0f, 7.0f, 8.0f, 9.0f, 10.0f, 11.0f,-12.0f));
  868. ASSERT_EQUAL_SIMD(vectorExtract_5(F32x8( 1.1f,-2.2f, 3.0f, 4.0f,-5.0f, 6.0f, 7.0f, 8.0f), F32x8( 9.0f, 10.0f, 11.0f,-12.0f, 13.0f, 14.0f, 15.0f,-16.0f)),
  869. F32x8( 6.0f, 7.0f, 8.0f, 9.0f, 10.0f, 11.0f,-12.0f, 13.0f));
  870. ASSERT_EQUAL_SIMD(vectorExtract_6(F32x8( 1.1f,-2.2f, 3.0f, 4.0f,-5.0f, 6.0f, 7.0f, 8.0f), F32x8( 9.0f, 10.0f, 11.0f,-12.0f, 13.0f, 14.0f, 15.0f,-16.0f)),
  871. F32x8( 7.0f, 8.0f, 9.0f, 10.0f, 11.0f,-12.0f, 13.0f, 14.0f));
  872. ASSERT_EQUAL_SIMD(vectorExtract_7(F32x8( 1.1f,-2.2f, 3.0f, 4.0f,-5.0f, 6.0f, 7.0f, 8.0f), F32x8( 9.0f, 10.0f, 11.0f,-12.0f, 13.0f, 14.0f, 15.0f,-16.0f)),
  873. F32x8( 8.0f, 9.0f, 10.0f, 11.0f,-12.0f, 13.0f, 14.0f, 15.0f));
  874. ASSERT_EQUAL_SIMD(vectorExtract_8(F32x8( 1.1f,-2.2f, 3.0f, 4.0f,-5.0f, 6.0f, 7.0f, 8.0f), F32x8( 9.0f, 10.0f, 11.0f,-12.0f, 13.0f, 14.0f, 15.0f,-16.0f)),
  875. F32x8( 9.0f, 10.0f, 11.0f,-12.0f, 13.0f, 14.0f, 15.0f,-16.0f));
  876. ASSERT_EQUAL_SIMD(vectorExtract_0 (U16x16( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16), U16x16(17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32)),
  877. U16x16( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16));
  878. ASSERT_EQUAL_SIMD(vectorExtract_1 (U16x16( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16), U16x16(17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32)),
  879. U16x16( 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16, 17));
  880. ASSERT_EQUAL_SIMD(vectorExtract_2 (U16x16( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16), U16x16(17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32)),
  881. U16x16( 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16, 17,18));
  882. ASSERT_EQUAL_SIMD(vectorExtract_3 (U16x16( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16), U16x16(17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32)),
  883. U16x16( 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16, 17,18,19));
  884. ASSERT_EQUAL_SIMD(vectorExtract_4 (U16x16( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16), U16x16(17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32)),
  885. U16x16( 5, 6, 7, 8, 9,10,11,12,13,14,15,16, 17,18,19,20));
  886. ASSERT_EQUAL_SIMD(vectorExtract_5 (U16x16( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16), U16x16(17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32)),
  887. U16x16( 6, 7, 8, 9,10,11,12,13,14,15,16, 17,18,19,20,21));
  888. ASSERT_EQUAL_SIMD(vectorExtract_6 (U16x16( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16), U16x16(17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32)),
  889. U16x16( 7, 8, 9,10,11,12,13,14,15,16, 17,18,19,20,21,22));
  890. ASSERT_EQUAL_SIMD(vectorExtract_7 (U16x16( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16), U16x16(17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32)),
  891. U16x16( 8, 9,10,11,12,13,14,15,16, 17,18,19,20,21,22,23));
  892. ASSERT_EQUAL_SIMD(vectorExtract_8 (U16x16( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16), U16x16(17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32)),
  893. U16x16( 9,10,11,12,13,14,15,16, 17,18,19,20,21,22,23,24));
  894. ASSERT_EQUAL_SIMD(vectorExtract_9 (U16x16( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16), U16x16(17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32)),
  895. U16x16(10,11,12,13,14,15,16, 17,18,19,20,21,22,23,24,25));
  896. ASSERT_EQUAL_SIMD(vectorExtract_10(U16x16( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16), U16x16(17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32)),
  897. U16x16(11,12,13,14,15,16, 17,18,19,20,21,22,23,24,25,26));
  898. ASSERT_EQUAL_SIMD(vectorExtract_11(U16x16( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16), U16x16(17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32)),
  899. U16x16(12,13,14,15,16, 17,18,19,20,21,22,23,24,25,26,27));
  900. ASSERT_EQUAL_SIMD(vectorExtract_12(U16x16( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16), U16x16(17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32)),
  901. U16x16(13,14,15,16, 17,18,19,20,21,22,23,24,25,26,27,28));
  902. ASSERT_EQUAL_SIMD(vectorExtract_13(U16x16( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16), U16x16(17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32)),
  903. U16x16(14,15,16, 17,18,19,20,21,22,23,24,25,26,27,28,29));
  904. ASSERT_EQUAL_SIMD(vectorExtract_14(U16x16( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16), U16x16(17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32)),
  905. U16x16(15,16, 17,18,19,20,21,22,23,24,25,26,27,28,29,30));
  906. ASSERT_EQUAL_SIMD(vectorExtract_15(U16x16( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16), U16x16(17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32)),
  907. U16x16(16, 17,18,19,20,21,22,23,24,25,26,27,28,29,30,31));
  908. ASSERT_EQUAL_SIMD(vectorExtract_16(U16x16( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16), U16x16(17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32)),
  909. U16x16(17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32));
  910. ASSERT_EQUAL_SIMD(vectorExtract_0 (U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  911. U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32));
  912. ASSERT_EQUAL_SIMD(vectorExtract_1 (U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  913. U8x32( 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32, 33));
  914. ASSERT_EQUAL_SIMD(vectorExtract_2 (U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  915. U8x32( 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32, 33,34));
  916. ASSERT_EQUAL_SIMD(vectorExtract_3 (U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  917. U8x32( 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32, 33,34,35));
  918. ASSERT_EQUAL_SIMD(vectorExtract_4 (U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  919. U8x32( 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32, 33,34,35,36));
  920. ASSERT_EQUAL_SIMD(vectorExtract_5 (U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  921. U8x32( 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32, 33,34,35,36,37));
  922. ASSERT_EQUAL_SIMD(vectorExtract_6 (U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  923. U8x32( 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32, 33,34,35,36,37,38));
  924. ASSERT_EQUAL_SIMD(vectorExtract_7 (U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  925. U8x32( 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32, 33,34,35,36,37,38,39));
  926. ASSERT_EQUAL_SIMD(vectorExtract_8 (U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  927. U8x32( 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32, 33,34,35,36,37,38,39,40));
  928. ASSERT_EQUAL_SIMD(vectorExtract_9 (U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  929. U8x32(10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32, 33,34,35,36,37,38,39,40,41));
  930. ASSERT_EQUAL_SIMD(vectorExtract_10(U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  931. U8x32(11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32, 33,34,35,36,37,38,39,40,41,42));
  932. ASSERT_EQUAL_SIMD(vectorExtract_11(U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  933. U8x32(12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32, 33,34,35,36,37,38,39,40,41,42,43));
  934. ASSERT_EQUAL_SIMD(vectorExtract_12(U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  935. U8x32(13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32, 33,34,35,36,37,38,39,40,41,42,43,44));
  936. ASSERT_EQUAL_SIMD(vectorExtract_13(U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  937. U8x32(14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32, 33,34,35,36,37,38,39,40,41,42,43,44,45));
  938. ASSERT_EQUAL_SIMD(vectorExtract_14(U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  939. U8x32(15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32, 33,34,35,36,37,38,39,40,41,42,43,44,45,46));
  940. ASSERT_EQUAL_SIMD(vectorExtract_15(U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  941. U8x32(16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32, 33,34,35,36,37,38,39,40,41,42,43,44,45,46,47));
  942. ASSERT_EQUAL_SIMD(vectorExtract_16(U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  943. U8x32(17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32, 33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48));
  944. ASSERT_EQUAL_SIMD(vectorExtract_17(U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  945. U8x32(18,19,20,21,22,23,24,25,26,27,28,29,30,31,32, 33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49));
  946. ASSERT_EQUAL_SIMD(vectorExtract_18(U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  947. U8x32(19,20,21,22,23,24,25,26,27,28,29,30,31,32, 33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50));
  948. ASSERT_EQUAL_SIMD(vectorExtract_19(U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  949. U8x32(20,21,22,23,24,25,26,27,28,29,30,31,32, 33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51));
  950. ASSERT_EQUAL_SIMD(vectorExtract_20(U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  951. U8x32(21,22,23,24,25,26,27,28,29,30,31,32, 33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52));
  952. ASSERT_EQUAL_SIMD(vectorExtract_21(U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  953. U8x32(22,23,24,25,26,27,28,29,30,31,32, 33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53));
  954. ASSERT_EQUAL_SIMD(vectorExtract_22(U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  955. U8x32(23,24,25,26,27,28,29,30,31,32, 33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54));
  956. ASSERT_EQUAL_SIMD(vectorExtract_23(U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  957. U8x32(24,25,26,27,28,29,30,31,32, 33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55));
  958. ASSERT_EQUAL_SIMD(vectorExtract_24(U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  959. U8x32(25,26,27,28,29,30,31,32, 33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56));
  960. ASSERT_EQUAL_SIMD(vectorExtract_25(U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  961. U8x32(26,27,28,29,30,31,32, 33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57));
  962. ASSERT_EQUAL_SIMD(vectorExtract_26(U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  963. U8x32(27,28,29,30,31,32, 33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58));
  964. ASSERT_EQUAL_SIMD(vectorExtract_27(U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  965. U8x32(28,29,30,31,32, 33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59));
  966. ASSERT_EQUAL_SIMD(vectorExtract_28(U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  967. U8x32(29,30,31,32, 33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60));
  968. ASSERT_EQUAL_SIMD(vectorExtract_29(U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  969. U8x32(30,31,32, 33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61));
  970. ASSERT_EQUAL_SIMD(vectorExtract_30(U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  971. U8x32(31,32, 33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62));
  972. ASSERT_EQUAL_SIMD(vectorExtract_31(U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  973. U8x32(32, 33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63));
  974. ASSERT_EQUAL_SIMD(vectorExtract_32(U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32), U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64)),
  975. U8x32(33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64));
  976. }
  977. static void testGather() {
  978. // The Buffer must be kept alive during the pointer's lifetime to prevent freeing the memory too early with reference counting.
  979. // Because SafePointer exists only to be faster than Buffer but safer than a raw pointer.
  980. Buffer gatherTestBuffer = buffer_create(sizeof(int32_t) * 32);
  981. {
  982. // 32-bit floating-point gather
  983. SafePointer<float> pointerF = buffer_getSafeData<float>(gatherTestBuffer, "float gather test data");
  984. for (int i = 0; i < 32; i++) { // -32.0f, -30.0f, -28.0f, -26.0f ... 24.0f, 26.0f, 28.0f, 30.0f
  985. pointerF[i] = i * 2.0f - 32.0f;
  986. }
  987. ASSERT_EQUAL_SIMD(gather_F32(pointerF , U32x4(2, 1, 30, 31)), F32x4(-28.0f, -30.0f, 28.0f, 30.0f));
  988. ASSERT_EQUAL_SIMD(gather_F32(pointerF + 10, U32x4(0, 1, 2, 3)), F32x4(-12.0f, -10.0f, -8.0f, -6.0f));
  989. ASSERT_EQUAL_SIMD(gather_F32(pointerF , U32x8(2, 1, 28, 29, 3, 0, 30, 31)), F32x8(-28.0f, -30.0f, 24.0f, 26.0f, -26.0f, -32.0f, 28.0f, 30.0f));
  990. ASSERT_EQUAL_SIMD(gather_F32(pointerF + 10, U32x8(0, 1, 2, 3, 4, 5, 6, 7)), F32x8(-12.0f, -10.0f, -8.0f, -6.0f, -4.0f, -2.0f, 0.0f, 2.0f));
  991. }
  992. {
  993. // Signed 32-bit integer gather
  994. SafePointer<int32_t> pointerU = buffer_getSafeData<int32_t>(gatherTestBuffer, "int32_t gather test data");
  995. for (int i = 0; i < 32; i++) { // -32, -30, -28, -26 ... 24, 26, 28, 30
  996. pointerU[i] = i * 2 - 32;
  997. }
  998. ASSERT_EQUAL_SIMD(gather_I32(pointerU , U32x4(2, 1, 30, 31)), I32x4(-28, -30, 28, 30));
  999. ASSERT_EQUAL_SIMD(gather_I32(pointerU + 10, U32x4(0, 1, 2, 3)), I32x4(-12, -10, -8, -6));
  1000. ASSERT_EQUAL_SIMD(gather_I32(pointerU , U32x8(2, 1, 28, 29, 3, 0, 30, 31)), I32x8(-28, -30, 24, 26, -26, -32, 28, 30));
  1001. ASSERT_EQUAL_SIMD(gather_I32(pointerU + 10, U32x8(0, 1, 2, 3, 4, 5, 6, 7)), I32x8(-12, -10, -8, -6, -4, -2, 0, 2));
  1002. }
  1003. {
  1004. // Unsigned 32-bit integer gather
  1005. SafePointer<uint32_t> pointerI = buffer_getSafeData<uint32_t>(gatherTestBuffer, "uint32_t gather test data");
  1006. for (int i = 0; i < 32; i++) { // 100, 102, 104, 106 ... 156, 158, 160, 162
  1007. pointerI[i] = 100 + i * 2;
  1008. }
  1009. // Signed 32-bit integer gather
  1010. ASSERT_EQUAL_SIMD(gather_U32(pointerI , U32x4(2, 1, 30, 31)), U32x4(104, 102, 160, 162));
  1011. ASSERT_EQUAL_SIMD(gather_U32(pointerI + 10, U32x4(0, 1, 2, 3)), U32x4(120, 122, 124, 126));
  1012. ASSERT_EQUAL_SIMD(gather_U32(pointerI , U32x8(2, 1, 28, 29, 3, 0, 30, 31)), U32x8(104, 102, 156, 158, 106, 100, 160, 162));
  1013. ASSERT_EQUAL_SIMD(gather_U32(pointerI + 10, U32x8(0, 1, 2, 3, 4, 5, 6, 7)), U32x8(120, 122, 124, 126, 128, 130, 132, 134));
  1014. }
  1015. }
  1016. START_TEST(Simd)
  1017. printText("\nSIMD test is compiled using:\n");
  1018. #ifdef USE_SSE2
  1019. printText(" * SSE2\n");
  1020. #endif
  1021. #ifdef USE_SSSE3
  1022. printText(" * SSSE3\n");
  1023. #endif
  1024. #ifdef USE_AVX
  1025. printText(" * AVX\n");
  1026. #endif
  1027. #ifdef USE_AVX2
  1028. printText(" * AVX2\n");
  1029. #endif
  1030. #ifdef USE_NEON
  1031. printText(" * NEON\n");
  1032. #endif
  1033. testComparisons();
  1034. // Reciprocal: 1 / x
  1035. ASSERT_EQUAL_SIMD(reciprocal(F32x4(0.5f, 1.0f, 2.0f, 4.0f)), F32x4(2.0f, 1.0f, 0.5f, 0.25f));
  1036. ASSERT_EQUAL_SIMD(reciprocal(F32x8(0.5f, 1.0f, 2.0f, 4.0f, 8.0f, 10.0f, 100.0f, 1000.0f)), F32x8(2.0f, 1.0f, 0.5f, 0.25f, 0.125f, 0.1f, 0.01f, 0.001f));
  1037. // Reciprocal square root: 1 / sqrt(x)
  1038. ASSERT_EQUAL_SIMD(reciprocalSquareRoot(F32x4(1.0f, 4.0f, 16.0f, 100.0f)), F32x4(1.0f, 0.5f, 0.25f, 0.1f));
  1039. ASSERT_EQUAL_SIMD(reciprocalSquareRoot(F32x8(1.0f, 4.0f, 16.0f, 100.0f, 400.0f, 64.0f, 25.0f, 100.0f)), F32x8(1.0f, 0.5f, 0.25f, 0.1f, 0.05f, 0.125f, 0.2f, 0.1f));
  1040. // Square root: sqrt(x)
  1041. ASSERT_EQUAL_SIMD(squareRoot(F32x4(1.0f, 4.0f, 9.0f, 100.0f)), F32x4(1.0f, 2.0f, 3.0f, 10.0f));
  1042. ASSERT_EQUAL_SIMD(squareRoot(F32x8(1.0f, 4.0f, 9.0f, 100.0f, 64.0f, 256.0f, 1024.0f, 4096.0f)), F32x8(1.0f, 2.0f, 3.0f, 10.0f, 8.0f, 16.0f, 32.0f, 64.0f));
  1043. // Minimum
  1044. ASSERT_EQUAL_SIMD(min(F32x4(1.1f, 2.2f, 3.3f, 4.4f), F32x4(5.0f, 3.0f, 1.0f, -1.0f)), F32x4(1.1f, 2.2f, 1.0f, -1.0f));
  1045. ASSERT_EQUAL_SIMD(min(F32x8(1.1f, 2.2f, 3.3f, 4.4f, 5.5f, 6.6f, 7.7f, 8.8f), F32x8(5.0f, 3.0f, 1.0f, -1.0f, 4.0f, 5.0f, -2.5f, 10.0f)), F32x8(1.1f, 2.2f, 1.0f, -1.0f, 4.0f, 5.0f, -2.5f, 8.8f));
  1046. // Maximum
  1047. ASSERT_EQUAL_SIMD(max(F32x4(1.1f, 2.2f, 3.3f, 4.4f), F32x4(5.0f, 3.0f, 1.0f, -1.0f)), F32x4(5.0f, 3.0f, 3.3f, 4.4f));
  1048. ASSERT_EQUAL_SIMD(max(F32x8(1.1f, 2.2f, 3.3f, 4.4f, 5.5f, 6.6f, 7.7f, 8.8f), F32x8(5.0f, 3.0f, 1.0f, -1.0f, 4.0f, 5.0f, -2.5f, 10.0f)), F32x8(5.0f, 3.0f, 3.3f, 4.4f, 5.5f, 6.6f, 7.7f, 10.0f));
  1049. // Clamp
  1050. ASSERT_EQUAL_SIMD(clamp(F32x4(-1.5f), F32x4(-35.1f, 1.0f, 2.0f, 45.7f), F32x4(1.5f)), F32x4(-1.5f, 1.0f, 1.5f, 1.5f));
  1051. ASSERT_EQUAL_SIMD(clampUpper(F32x4(-35.1f, 1.0f, 2.0f, 45.7f), F32x4(1.5f)), F32x4(-35.1f, 1.0f, 1.5f, 1.5f));
  1052. ASSERT_EQUAL_SIMD(clampLower(F32x4(-1.5f), F32x4(-35.1f, 1.0f, 2.0f, 45.7f)), F32x4(-1.5f, 1.0f, 2.0f, 45.7f));
  1053. ASSERT_EQUAL_SIMD(clamp(F32x8(-1.5f), F32x8(-35.1f, 1.0f, 2.0f, 45.7f, 0.0f, -1.0f, 2.1f, -1.9f), F32x8(1.5f)), F32x8(-1.5f, 1.0f, 1.5f, 1.5f, 0.0f, -1.0f, 1.5f, -1.5f));
  1054. ASSERT_EQUAL_SIMD(clampUpper(F32x8(-35.1f, 1.0f, 2.0f, 45.7f, 0.0f, -1.0f, 2.1f, -1.9f), F32x8(1.5f)), F32x8(-35.1f, 1.0f, 1.5f, 1.5f, 0.0f, -1.0f, 1.5f, -1.9f));
  1055. ASSERT_EQUAL_SIMD(clampLower(F32x8(-1.5f), F32x8(-35.1f, 1.0f, 2.0f, 45.7f, 0.0f, -1.0f, 2.1f, -1.9f)), F32x8(-1.5f, 1.0f, 2.0f, 45.7f, 0.0f, -1.0f, 2.1f, -1.5f));
  1056. // Float to integer conversions
  1057. // Underflow and overflow is undefined behavior, because NEON will clamp out of bound values while SSE will truncate away higher bits.
  1058. ASSERT_EQUAL_SIMD(truncateToU32(F32x4(0.01f, 0.99f, 1.01f, 1.99f)),U32x4(0, 0, 1, 1));
  1059. ASSERT_EQUAL_SIMD(truncateToI32(F32x4(0.01f, 0.99f, 1.01f, 1.99f)),I32x4(0, 0, 1, 1));
  1060. ASSERT_EQUAL_SIMD(truncateToI32(F32x4(-0.01f, -0.99f, -1.01f, -1.99f)),I32x4(0, 0, -1, -1));
  1061. ASSERT_EQUAL_SIMD(truncateToU32(F32x4(0.1f, 5.4f, 2.6f, 4.9f)),U32x4(0, 5, 2, 4));
  1062. ASSERT_EQUAL_SIMD(truncateToI32(F32x4(0.1f, 5.4f, 2.6f, 4.9f)),I32x4(0, 5, 2, 4));
  1063. ASSERT_EQUAL_SIMD(truncateToI32(F32x4(-1.1f, -0.9f, -0.1f, 0.1f)),I32x4(-1, 0, 0, 0));
  1064. ASSERT_EQUAL_SIMD(truncateToI32(F32x4(-1000.9f, -23.4f, 123456.7f, 846.999f)),I32x4(-1000, -23, 123456, 846));
  1065. // F32x4 operations
  1066. ASSERT_EQUAL_SIMD(F32x4(1.1f, -2.2f, 3.3f, 4.0f) + F32x4(2.2f, -4.4f, 6.6f, 8.0f), F32x4(3.3f, -6.6f, 9.9f, 12.0f));
  1067. ASSERT_EQUAL_SIMD(F32x4(-1.5f, -0.5f, 0.5f, 1.5f) + 1.0f, F32x4(-0.5f, 0.5f, 1.5f, 2.5f));
  1068. ASSERT_EQUAL_SIMD(1.0f + F32x4(-1.5f, -0.5f, 0.5f, 1.5f), F32x4(-0.5f, 0.5f, 1.5f, 2.5f));
  1069. ASSERT_EQUAL_SIMD(F32x4(1.1f, 2.2f, 3.3f, 4.4f) - F32x4(0.1f, 0.2f, 0.3f, 0.4f), F32x4(1.0f, 2.0f, 3.0f, 4.0f));
  1070. ASSERT_EQUAL_SIMD(F32x4(1.0f, 2.0f, 3.0f, 4.0f) - 0.5f, F32x4(0.5f, 1.5f, 2.5f, 3.5f));
  1071. ASSERT_EQUAL_SIMD(0.5f - F32x4(1.0f, 2.0f, 3.0f, 4.0f), F32x4(-0.5f, -1.5f, -2.5f, -3.5f));
  1072. ASSERT_EQUAL_SIMD(2.0f * F32x4(1.0f, 2.0f, 3.0f, 4.0f), F32x4(2.0f, 4.0f, 6.0f, 8.0f));
  1073. ASSERT_EQUAL_SIMD(F32x4(1.0f, -2.0f, 3.0f, -4.0f) * -2.0f, F32x4(-2.0f, 4.0f, -6.0f, 8.0f));
  1074. ASSERT_EQUAL_SIMD(F32x4(1.0f, -2.0f, 3.0f, -4.0f) * F32x4(1.0f, -2.0f, 3.0f, -4.0f), F32x4(1.0f, 4.0f, 9.0f, 16.0f));
  1075. ASSERT_EQUAL_SIMD(-F32x4(1.0f, -2.0f, 3.0f, -4.0f), F32x4(-1.0f, 2.0f, -3.0f, 4.0f));
  1076. // F32x8 operations
  1077. ASSERT_EQUAL_SIMD(F32x8(1.1f, -2.2f, 3.3f, 4.0f, 1.4f, 2.3f, 3.2f, 4.1f) + F32x8(2.2f, -4.4f, 6.6f, 8.0f, 4.11f, 3.22f, 2.33f, 1.44f), F32x8(3.3f, -6.6f, 9.9f, 12.0f, 5.51f, 5.52f, 5.53f, 5.54f));
  1078. ASSERT_EQUAL_SIMD(F32x8(-1.5f, -0.5f, 0.5f, 1.5f, 1000.0f, 2000.0f, -4000.0f, -1500.0f) + 1.0f, F32x8(-0.5f, 0.5f, 1.5f, 2.5f, 1001.0f, 2001.0f, -3999.0f, -1499.0f));
  1079. ASSERT_EQUAL_SIMD(1.0f + F32x8(-1.5f, -0.5f, 0.5f, 1.5f, 1000.0f, 2000.0f, -4000.0f, -1500.0f), F32x8(-0.5f, 0.5f, 1.5f, 2.5f, 1001.0f, 2001.0f, -3999.0f, -1499.0f));
  1080. ASSERT_EQUAL_SIMD(F32x8(1.1f, 2.2f, 3.3f, 4.4f, 5.5f, 6.6f, 7.7f, 8.8f) - F32x8(0.1f, 0.2f, 0.3f, 0.4f, 0.5f, 0.6f, 0.7f, 0.8f), F32x8(1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f));
  1081. ASSERT_EQUAL_SIMD(F32x8(1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f) - 0.5f, F32x8(0.5f, 1.5f, 2.5f, 3.5f, 4.5f, 5.5f, 6.5f, 7.5f));
  1082. ASSERT_EQUAL_SIMD(0.5f - F32x8(1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f), F32x8(-0.5f, -1.5f, -2.5f, -3.5f, -4.5f, -5.5f, -6.5f, -7.5f));
  1083. ASSERT_EQUAL_SIMD(2.0f * F32x8(1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f), F32x8(2.0f, 4.0f, 6.0f, 8.0f, 10.0f, 12.0f, 14.0f, 16.0f));
  1084. ASSERT_EQUAL_SIMD(F32x8(1.0f, -2.0f, 3.0f, -4.0f, 5.0f, -6.0f, 7.0f, -8.0f) * -2.0f, F32x8(-2.0f, 4.0f, -6.0f, 8.0f, -10.0f, 12.0f, -14.0f, 16.0f));
  1085. ASSERT_EQUAL_SIMD(F32x8(1.0f, -2.0f, 3.0f, -4.0f, 5.0f, -6.0f, 7.0f, -8.0f) * F32x8(1.0f, -2.0f, 3.0f, -4.0f, 5.0f, -6.0f, 7.0f, -8.0f), F32x8(1.0f, 4.0f, 9.0f, 16.0f, 25.0f, 36.0f, 49.0f, 64.0f));
  1086. ASSERT_EQUAL_SIMD(-F32x8(1.0f, -2.0f, 3.0f, -4.0f, 5.0f, -6.0f, 7.0f, -8.0f), F32x8(-1.0f, 2.0f, -3.0f, 4.0f, -5.0f, 6.0f, -7.0f, 8.0f));
  1087. // I32x4 operations
  1088. ASSERT_EQUAL_SIMD(I32x4(1, 2, -3, 4) + I32x4(-2, 4, 6, 8), I32x4(-1, 6, 3, 12));
  1089. ASSERT_EQUAL_SIMD(I32x4(1, -2, 3, 4) - 4, I32x4(-3, -6, -1, 0));
  1090. ASSERT_EQUAL_SIMD(10 + I32x4(1, 2, 3, 4), I32x4(11, 12, 13, 14));
  1091. ASSERT_EQUAL_SIMD(I32x4(1, 2, 3, 4) + I32x4(4), I32x4(5, 6, 7, 8));
  1092. ASSERT_EQUAL_SIMD(I32x4(10) + I32x4(1, 2, 3, 4), I32x4(11, 12, 13, 14));
  1093. ASSERT_EQUAL_SIMD(I32x4(-3, 6, -9, 12) * I32x4(1, 2, -3, -4), I32x4(-3, 12, 27, -48));
  1094. ASSERT_EQUAL_SIMD(-I32x4(1, -2, 3, -4), I32x4(-1, 2, -3, 4));
  1095. // I32x8 operations
  1096. ASSERT_EQUAL_SIMD(I32x8(1, 2, 3, 4, 5, 6, 7, 8) - 1, I32x8(0, 1, 2, 3, 4, 5, 6, 7));
  1097. ASSERT_EQUAL_SIMD(1 - I32x8(1, 2, 3, 4, 5, 6, 7, 8), I32x8(0, -1, -2, -3, -4, -5, -6, -7));
  1098. ASSERT_EQUAL_SIMD(2 * I32x8(1, 2, 3, 4, 5, 6, 7, 8), I32x8(2, 4, 6, 8, 10, 12, 14, 16));
  1099. ASSERT_EQUAL_SIMD(I32x8(1, -2, 3, -4, 5, -6, 7, -8) * -2, I32x8(-2, 4, -6, 8, -10, 12, -14, 16));
  1100. ASSERT_EQUAL_SIMD(I32x8(1, -2, 3, -4, 5, -6, 7, -8) * I32x8(1, -2, 3, -4, 5, -6, 7, -8), I32x8(1, 4, 9, 16, 25, 36, 49, 64));
  1101. ASSERT_EQUAL_SIMD(-I32x8(1, -2, 3, -4, 5, -6, 7, -8), I32x8(-1, 2, -3, 4, -5, 6, -7, 8));
  1102. // U32x4 operations
  1103. ASSERT_EQUAL_SIMD(U32x4(1, 2, 3, 4) + U32x4(2, 4, 6, 8), U32x4(3, 6, 9, 12));
  1104. ASSERT_EQUAL_SIMD(U32x4(1, 2, 3, 4) + 4, U32x4(5, 6, 7, 8));
  1105. ASSERT_EQUAL_SIMD(10 + U32x4(1, 2, 3, 4), U32x4(11, 12, 13, 14));
  1106. ASSERT_EQUAL_SIMD(U32x4(1, 2, 3, 4) + U32x4(4), U32x4(5, 6, 7, 8));
  1107. ASSERT_EQUAL_SIMD(U32x4(10) + U32x4(1, 2, 3, 4), U32x4(11, 12, 13, 14));
  1108. ASSERT_EQUAL_SIMD(U32x4(3, 6, 9, 12) - U32x4(1, 2, 3, 4), U32x4(2, 4, 6, 8));
  1109. ASSERT_EQUAL_SIMD(U32x4(3, 6, 9, 12) * U32x4(1, 2, 3, 4), U32x4(3, 12, 27, 48));
  1110. // U32x8 operations
  1111. ASSERT_EQUAL_SIMD(U32x8(1, 2, 3, 4, 5, 6, 7, 8) - 1, U32x8(0, 1, 2, 3, 4, 5, 6, 7));
  1112. ASSERT_EQUAL_SIMD(10 - U32x8(1, 2, 3, 4, 5, 6, 7, 8), U32x8(9, 8, 7, 6, 5, 4, 3, 2));
  1113. ASSERT_EQUAL_SIMD(2 * U32x8(1, 2, 3, 4, 5, 6, 7, 8), U32x8(2, 4, 6, 8, 10, 12, 14, 16));
  1114. ASSERT_EQUAL_SIMD(U32x8(1, 2, 3, 4, 5, 6, 7, 8) * 2, U32x8(2, 4, 6, 8, 10, 12, 14, 16));
  1115. ASSERT_EQUAL_SIMD(U32x8(1, 2, 3, 4, 5, 6, 7, 8) * U32x8(1, 2, 3, 4, 5, 6, 7, 8), U32x8(1, 4, 9, 16, 25, 36, 49, 64));
  1116. // U16x8 operations
  1117. ASSERT_EQUAL_SIMD(U16x8(1, 2, 3, 4, 5, 6, 7, 8) + U16x8(2, 4, 6, 8, 10, 12, 14, 16), U16x8(3, 6, 9, 12, 15, 18, 21, 24));
  1118. ASSERT_EQUAL_SIMD(U16x8(1, 2, 3, 4, 5, 6, 7, 8) + 8, U16x8(9, 10, 11, 12, 13, 14, 15, 16));
  1119. ASSERT_EQUAL_SIMD(10 + U16x8(1, 2, 3, 4, 5, 6, 7, 8), U16x8(11, 12, 13, 14, 15, 16, 17, 18));
  1120. ASSERT_EQUAL_SIMD(U16x8(1, 2, 3, 4, 5, 6, 7, 8) + U16x8((uint16_t)8), U16x8(9, 10, 11, 12, 13, 14, 15, 16));
  1121. ASSERT_EQUAL_SIMD(U16x8((uint16_t)10) + U16x8(1, 2, 3, 4, 5, 6, 7, 8), U16x8(11, 12, 13, 14, 15, 16, 17, 18));
  1122. ASSERT_EQUAL_SIMD(U16x8(3, 6, 9, 12, 15, 18, 21, 24) - U16x8(1, 2, 3, 4, 5, 6, 7, 8), U16x8(2, 4, 6, 8, 10, 12, 14, 16));
  1123. // U16x16 operations
  1124. ASSERT_EQUAL_SIMD(U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16) + U16x16(2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32), U16x16(3, 6, 9, 12, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42, 45, 48));
  1125. ASSERT_EQUAL_SIMD(U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16) + 8, U16x16(9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24));
  1126. ASSERT_EQUAL_SIMD(8 + U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U16x16(9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24));
  1127. ASSERT_EQUAL_SIMD(U16x16(3, 6, 9, 12, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42, 45, 48) - U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U16x16(2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32));
  1128. ASSERT_EQUAL_SIMD(U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16) - 1, U16x16(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15));
  1129. ASSERT_EQUAL_SIMD(16 - U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U16x16(15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0));
  1130. ASSERT_EQUAL_SIMD(U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16) * 2, U16x16(2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32));
  1131. ASSERT_EQUAL_SIMD(2 * U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), U16x16(2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32));
  1132. // U8x16 operations
  1133. ASSERT_EQUAL_SIMD(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16) + 2, U8x16(3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18));
  1134. ASSERT_EQUAL_SIMD(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16) - 1, U8x16(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15));
  1135. ASSERT_EQUAL_SIMD(
  1136. saturatedAddition(U8x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 255), U8x16((uint8_t)250)),
  1137. U8x16(251, 252, 253, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255)
  1138. );
  1139. ASSERT_EQUAL_SIMD(
  1140. saturatedSubtraction(
  1141. U8x16(128, 128, 128, 0, 255, 255, 0, 200, 123, 80, 46, 46, 46, 255, 255, 255),
  1142. U8x16( 0, 128, 255, 0, 255, 0, 255, 100, 23, 81, 45, 46, 47, 128, 127, 200)),
  1143. U8x16(128, 0, 0, 0, 0, 255, 0, 100, 100, 0, 1, 0, 0, 127, 128, 55)
  1144. );
  1145. // U8x32 operations
  1146. ASSERT_EQUAL_SIMD(
  1147. U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32)
  1148. + U8x32( 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64),
  1149. U8x32( 3, 6, 9, 12, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42, 45, 48, 51, 54, 57, 60, 63, 66, 69, 72, 75, 78, 81, 84, 87, 90, 93, 96));
  1150. ASSERT_EQUAL_SIMD(
  1151. U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32) + 5,
  1152. U8x32( 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37));
  1153. ASSERT_EQUAL_SIMD(
  1154. 5 + U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32),
  1155. U8x32( 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37));
  1156. ASSERT_EQUAL_SIMD(
  1157. U8x32( 3, 6, 9, 12, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42, 45, 48, 51, 54, 57, 60, 63, 66, 69, 72, 75, 78, 81, 84, 87, 90, 93, 96)
  1158. - U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32),
  1159. U8x32( 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64));
  1160. ASSERT_EQUAL_SIMD(
  1161. U8x32( 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37) - 5,
  1162. U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32));
  1163. ASSERT_EQUAL_SIMD(
  1164. 33 - U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32),
  1165. U8x32(32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1));
  1166. ASSERT_EQUAL_SIMD(
  1167. saturatedAddition(
  1168. U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,255),
  1169. U8x32((uint8_t)240)),
  1170. U8x32(241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255)
  1171. );
  1172. ASSERT_EQUAL_SIMD(
  1173. saturatedSubtraction(
  1174. U8x32( 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,255),
  1175. U8x32((uint8_t)16)),
  1176. U8x32( 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,239)
  1177. );
  1178. // Unsigned integer unpacking
  1179. ASSERT_EQUAL_SIMD(lowerToU32(U16x8(1,2,3,4,5,6,7,8)), U32x4(1, 2, 3, 4));
  1180. ASSERT_EQUAL_SIMD(higherToU32(U16x8(1,2,3,4,5,6,7,8)), U32x4(5, 6, 7, 8));
  1181. ASSERT_EQUAL_SIMD(lowerToU16(U8x16(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)), U16x8(1,2,3,4,5,6,7,8));
  1182. ASSERT_EQUAL_SIMD(higherToU16(U8x16(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)), U16x8(9,10,11,12,13,14,15,16));
  1183. ASSERT_EQUAL_SIMD(lowerToU32(U16x16(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)), U32x8(1,2,3,4,5,6,7,8));
  1184. ASSERT_EQUAL_SIMD(higherToU32(U16x16(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)), U32x8(9,10,11,12,13,14,15,16));
  1185. ASSERT_EQUAL_SIMD(lowerToU32(U16x16(1,2,3,4,5,6,65535,8,9,10,11,12,13,1000,15,16)), U32x8(1,2,3,4,5,6,65535,8));
  1186. ASSERT_EQUAL_SIMD(higherToU32(U16x16(1,2,3,4,5,6,65535,8,9,10,11,12,13,1000,15,16)), U32x8(9,10,11,12,13,1000,15,16));
  1187. ASSERT_EQUAL_SIMD(lowerToU16(U8x32(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,255,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,255)), U16x16(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,255));
  1188. ASSERT_EQUAL_SIMD(higherToU16(U8x32(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,255,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,255)), U16x16(17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,255));
  1189. testBitMasks();
  1190. testBitShift();
  1191. // Bitwise negation.
  1192. ASSERT_EQUAL_SIMD(
  1193. ~U32x4(0b11000000111000000111111100001100, 0b00111000000000110000001110001100, 0b00001110000000100011000000011001, 0b00001110001000000111001100001000),
  1194. U32x4(0b00111111000111111000000011110011, 0b11000111111111001111110001110011, 0b11110001111111011100111111100110, 0b11110001110111111000110011110111)
  1195. );
  1196. ASSERT_EQUAL_SIMD(
  1197. ~U16x8(0b1100000011100000, 0b0111111100001100, 0b0011100000000011, 0b0000001110001100, 0b0000111000000010, 0b0011000000011000, 0b0000111000100000, 0b0111001100001000),
  1198. U16x8(0b0011111100011111, 0b1000000011110011, 0b1100011111111100, 0b1111110001110011, 0b1111000111111101, 0b1100111111100111, 0b1111000111011111, 0b1000110011110111)
  1199. );
  1200. ASSERT_EQUAL_SIMD(
  1201. ~U32x8(0b11000000111000000111111100001100, 0b00111000000000110000001110001100, 0b00001110000000100011000000011000, 0b00001110001000000111001100001000, 0b11000000111000100111101100101100, 0b00111010000000110010001110101101, 0b01001110001000100011001000010010, 0b01001110001001000111100110000100),
  1202. U32x8(0b00111111000111111000000011110011, 0b11000111111111001111110001110011, 0b11110001111111011100111111100111, 0b11110001110111111000110011110111, 0b00111111000111011000010011010011, 0b11000101111111001101110001010010, 0b10110001110111011100110111101101, 0b10110001110110111000011001111011)
  1203. );
  1204. ASSERT_EQUAL_SIMD(
  1205. ~U16x16(0b1100000011100000, 0b0111111100001100, 0b0011100000000011, 0b0000001110001100, 0b0000111000000010, 0b0011000000011000, 0b0000111000100000, 0b0111001100001000, 0b1100100011100100, 0b0110011100001110, 0b0010100001001011, 0b0001001110001110, 0b0000111011000110, 0b0011000111011000, 0b0000111000100100, 0b0101001100011000),
  1206. U16x16(0b0011111100011111, 0b1000000011110011, 0b1100011111111100, 0b1111110001110011, 0b1111000111111101, 0b1100111111100111, 0b1111000111011111, 0b1000110011110111, 0b0011011100011011, 0b1001100011110001, 0b1101011110110100, 0b1110110001110001, 0b1111000100111001, 0b1100111000100111, 0b1111000111011011, 0b1010110011100111)
  1207. );
  1208. // Reinterpret cast.
  1209. ASSERT_EQUAL_SIMD(
  1210. reinterpret_U8FromU32(U32x4(ENDIAN32_BYTE_0, ENDIAN32_BYTE_1, ENDIAN32_BYTE_2, ENDIAN32_BYTE_3)),
  1211. U8x16(
  1212. 255, 0, 0, 0,
  1213. 0, 255, 0, 0,
  1214. 0, 0, 255, 0,
  1215. 0, 0, 0, 255
  1216. )
  1217. );
  1218. ASSERT_EQUAL_SIMD(
  1219. reinterpret_U8FromU32(U32x4(
  1220. ENDIAN32_BYTE_0 | ENDIAN32_BYTE_2,
  1221. ENDIAN32_BYTE_0 | ENDIAN32_BYTE_3,
  1222. ENDIAN32_BYTE_1,
  1223. ENDIAN32_BYTE_1 | ENDIAN32_BYTE_3
  1224. )),
  1225. U8x16(
  1226. 255, 0, 255, 0,
  1227. 255, 0, 0, 255,
  1228. 0, 255, 0, 0,
  1229. 0, 255, 0, 255
  1230. )
  1231. );
  1232. ASSERT_EQUAL_SIMD(
  1233. reinterpret_U32FromU8(U8x16(
  1234. 255, 0, 255, 0,
  1235. 255, 0, 0, 255,
  1236. 0, 255, 0, 0,
  1237. 0, 255, 0, 255
  1238. )),
  1239. U32x4(
  1240. ENDIAN32_BYTE_0 | ENDIAN32_BYTE_2,
  1241. ENDIAN32_BYTE_0 | ENDIAN32_BYTE_3,
  1242. ENDIAN32_BYTE_1,
  1243. ENDIAN32_BYTE_1 | ENDIAN32_BYTE_3
  1244. )
  1245. );
  1246. #ifdef DSR_BIG_ENDIAN
  1247. ASSERT_EQUAL_SIMD(
  1248. reinterpret_U32FromU16(U16x8(1, 2, 3, 4, 5, 6, 7, 8)),
  1249. U32x4(1 * 65536 + 2, 3 * 65536 + 4, 5 * 65536 + 6, 7 * 65536 + 8)
  1250. );
  1251. ASSERT_EQUAL_SIMD(
  1252. reinterpret_U32FromU16(U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16)),
  1253. U32x8(1 * 65536 + 2, 3 * 65536 + 4, 5 * 65536 + 6, 7 * 65536 + 8, 9 * 65536 + 10, 11 * 65536 + 12, 13 * 65536 + 14, 15 * 65536 + 16)
  1254. );
  1255. ASSERT_EQUAL_SIMD(
  1256. reinterpret_U16FromU32(U32x4(1 * 65536 + 2, 3 * 65536 + 4, 5 * 65536 + 6, 7 * 65536 + 8)),
  1257. U16x8(1, 2, 3, 4, 5, 6, 7, 8)
  1258. );
  1259. ASSERT_EQUAL_SIMD(
  1260. reinterpret_U16FromU32(U32x8(1 * 65536 + 2, 3 * 65536 + 4, 5 * 65536 + 6, 7 * 65536 + 8, 9 * 65536 + 10, 11 * 65536 + 12, 13 * 65536 + 14, 15 * 65536 + 16)),
  1261. U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16)
  1262. );
  1263. #else
  1264. ASSERT_EQUAL_SIMD(
  1265. reinterpret_U32FromU16(U16x8(1, 2, 3, 4, 5, 6, 7, 8)),
  1266. U32x4(1 + 2 * 65536, 3 + 4 * 65536, 5 + 6 * 65536, 7 + 8 * 65536)
  1267. );
  1268. ASSERT_EQUAL_SIMD(
  1269. reinterpret_U32FromU16(U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16)),
  1270. U32x8(1 + 2 * 65536, 3 + 4 * 65536, 5 + 6 * 65536, 7 + 8 * 65536, 9 + 10 * 65536, 11 + 12 * 65536, 13 + 14 * 65536, 15 + 16 * 65536)
  1271. );
  1272. ASSERT_EQUAL_SIMD(
  1273. reinterpret_U16FromU32(U32x4(1 + 2 * 65536, 3 + 4 * 65536, 5 + 6 * 65536, 7 + 8 * 65536)),
  1274. U16x8(1, 2, 3, 4, 5, 6, 7, 8)
  1275. );
  1276. ASSERT_EQUAL_SIMD(
  1277. reinterpret_U16FromU32(U32x8(1 + 2 * 65536, 3 + 4 * 65536, 5 + 6 * 65536, 7 + 8 * 65536, 9 + 10 * 65536, 11 + 12 * 65536, 13 + 14 * 65536, 15 + 16 * 65536)),
  1278. U16x16(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16)
  1279. );
  1280. #endif
  1281. testVectorExtract();
  1282. testGather();
  1283. END_TEST