msan_basic.ll 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882
  1. ; RUN: opt < %s -msan -msan-check-access-address=0 -S | FileCheck %s
  2. ; RUN: opt < %s -msan -msan-check-access-address=0 -msan-track-origins=1 -S | FileCheck -check-prefix=CHECK -check-prefix=CHECK-ORIGINS %s
  3. target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64-S128"
  4. target triple = "x86_64-unknown-linux-gnu"
  5. ; CHECK: @llvm.global_ctors {{.*}} @msan.module_ctor
  6. ; Check the presence and the linkage type of __msan_track_origins and
  7. ; other interface symbols.
  8. ; CHECK-NOT: @__msan_track_origins
  9. ; CHECK-ORIGINS: @__msan_track_origins = weak_odr constant i32 1
  10. ; CHECK-NOT: @__msan_keep_going = weak_odr constant i32 0
  11. ; CHECK: @__msan_retval_tls = external thread_local(initialexec) global [{{.*}}]
  12. ; CHECK: @__msan_retval_origin_tls = external thread_local(initialexec) global i32
  13. ; CHECK: @__msan_param_tls = external thread_local(initialexec) global [{{.*}}]
  14. ; CHECK: @__msan_param_origin_tls = external thread_local(initialexec) global [{{.*}}]
  15. ; CHECK: @__msan_va_arg_tls = external thread_local(initialexec) global [{{.*}}]
  16. ; CHECK: @__msan_va_arg_overflow_size_tls = external thread_local(initialexec) global i64
  17. ; CHECK: @__msan_origin_tls = external thread_local(initialexec) global i32
  18. ; Check instrumentation of stores
  19. define void @Store(i32* nocapture %p, i32 %x) nounwind uwtable sanitize_memory {
  20. entry:
  21. store i32 %x, i32* %p, align 4
  22. ret void
  23. }
  24. ; CHECK: @Store
  25. ; CHECK: load {{.*}} @__msan_param_tls
  26. ; CHECK-ORIGINS: load {{.*}} @__msan_param_origin_tls
  27. ; CHECK: store
  28. ; CHECK-ORIGINS: icmp
  29. ; CHECK-ORIGINS: br i1
  30. ; CHECK-ORIGINS: <label>
  31. ; CHECK-ORIGINS: store
  32. ; CHECK-ORIGINS: br label
  33. ; CHECK-ORIGINS: <label>
  34. ; CHECK: store
  35. ; CHECK: ret void
  36. ; Check instrumentation of aligned stores
  37. ; Shadow store has the same alignment as the original store; origin store
  38. ; does not specify explicit alignment.
  39. define void @AlignedStore(i32* nocapture %p, i32 %x) nounwind uwtable sanitize_memory {
  40. entry:
  41. store i32 %x, i32* %p, align 32
  42. ret void
  43. }
  44. ; CHECK: @AlignedStore
  45. ; CHECK: load {{.*}} @__msan_param_tls
  46. ; CHECK-ORIGINS: load {{.*}} @__msan_param_origin_tls
  47. ; CHECK: store {{.*}} align 32
  48. ; CHECK-ORIGINS: icmp
  49. ; CHECK-ORIGINS: br i1
  50. ; CHECK-ORIGINS: <label>
  51. ; CHECK-ORIGINS: store {{.*}} align 32
  52. ; CHECK-ORIGINS: br label
  53. ; CHECK-ORIGINS: <label>
  54. ; CHECK: store {{.*}} align 32
  55. ; CHECK: ret void
  56. ; load followed by cmp: check that we load the shadow and call __msan_warning.
  57. define void @LoadAndCmp(i32* nocapture %a) nounwind uwtable sanitize_memory {
  58. entry:
  59. %0 = load i32, i32* %a, align 4
  60. %tobool = icmp eq i32 %0, 0
  61. br i1 %tobool, label %if.end, label %if.then
  62. if.then: ; preds = %entry
  63. tail call void (...) @foo() nounwind
  64. br label %if.end
  65. if.end: ; preds = %entry, %if.then
  66. ret void
  67. }
  68. declare void @foo(...)
  69. ; CHECK: @LoadAndCmp
  70. ; CHECK: = load
  71. ; CHECK: = load
  72. ; CHECK: call void @__msan_warning_noreturn()
  73. ; CHECK-NEXT: call void asm sideeffect
  74. ; CHECK-NEXT: unreachable
  75. ; CHECK: ret void
  76. ; Check that we store the shadow for the retval.
  77. define i32 @ReturnInt() nounwind uwtable readnone sanitize_memory {
  78. entry:
  79. ret i32 123
  80. }
  81. ; CHECK: @ReturnInt
  82. ; CHECK: store i32 0,{{.*}}__msan_retval_tls
  83. ; CHECK: ret i32
  84. ; Check that we get the shadow for the retval.
  85. define void @CopyRetVal(i32* nocapture %a) nounwind uwtable sanitize_memory {
  86. entry:
  87. %call = tail call i32 @ReturnInt() nounwind
  88. store i32 %call, i32* %a, align 4
  89. ret void
  90. }
  91. ; CHECK: @CopyRetVal
  92. ; CHECK: load{{.*}}__msan_retval_tls
  93. ; CHECK: store
  94. ; CHECK: store
  95. ; CHECK: ret void
  96. ; Check that we generate PHIs for shadow.
  97. define void @FuncWithPhi(i32* nocapture %a, i32* %b, i32* nocapture %c) nounwind uwtable sanitize_memory {
  98. entry:
  99. %tobool = icmp eq i32* %b, null
  100. br i1 %tobool, label %if.else, label %if.then
  101. if.then: ; preds = %entry
  102. %0 = load i32, i32* %b, align 4
  103. br label %if.end
  104. if.else: ; preds = %entry
  105. %1 = load i32, i32* %c, align 4
  106. br label %if.end
  107. if.end: ; preds = %if.else, %if.then
  108. %t.0 = phi i32 [ %0, %if.then ], [ %1, %if.else ]
  109. store i32 %t.0, i32* %a, align 4
  110. ret void
  111. }
  112. ; CHECK: @FuncWithPhi
  113. ; CHECK: = phi
  114. ; CHECK-NEXT: = phi
  115. ; CHECK: store
  116. ; CHECK: store
  117. ; CHECK: ret void
  118. ; Compute shadow for "x << 10"
  119. define void @ShlConst(i32* nocapture %x) nounwind uwtable sanitize_memory {
  120. entry:
  121. %0 = load i32, i32* %x, align 4
  122. %1 = shl i32 %0, 10
  123. store i32 %1, i32* %x, align 4
  124. ret void
  125. }
  126. ; CHECK: @ShlConst
  127. ; CHECK: = load
  128. ; CHECK: = load
  129. ; CHECK: shl
  130. ; CHECK: shl
  131. ; CHECK: store
  132. ; CHECK: store
  133. ; CHECK: ret void
  134. ; Compute shadow for "10 << x": it should have 'sext i1'.
  135. define void @ShlNonConst(i32* nocapture %x) nounwind uwtable sanitize_memory {
  136. entry:
  137. %0 = load i32, i32* %x, align 4
  138. %1 = shl i32 10, %0
  139. store i32 %1, i32* %x, align 4
  140. ret void
  141. }
  142. ; CHECK: @ShlNonConst
  143. ; CHECK: = load
  144. ; CHECK: = load
  145. ; CHECK: = sext i1
  146. ; CHECK: store
  147. ; CHECK: store
  148. ; CHECK: ret void
  149. ; SExt
  150. define void @SExt(i32* nocapture %a, i16* nocapture %b) nounwind uwtable sanitize_memory {
  151. entry:
  152. %0 = load i16, i16* %b, align 2
  153. %1 = sext i16 %0 to i32
  154. store i32 %1, i32* %a, align 4
  155. ret void
  156. }
  157. ; CHECK: @SExt
  158. ; CHECK: = load
  159. ; CHECK: = load
  160. ; CHECK: = sext
  161. ; CHECK: = sext
  162. ; CHECK: store
  163. ; CHECK: store
  164. ; CHECK: ret void
  165. ; memset
  166. define void @MemSet(i8* nocapture %x) nounwind uwtable sanitize_memory {
  167. entry:
  168. call void @llvm.memset.p0i8.i64(i8* %x, i8 42, i64 10, i32 1, i1 false)
  169. ret void
  170. }
  171. declare void @llvm.memset.p0i8.i64(i8* nocapture, i8, i64, i32, i1) nounwind
  172. ; CHECK: @MemSet
  173. ; CHECK: call i8* @__msan_memset
  174. ; CHECK: ret void
  175. ; memcpy
  176. define void @MemCpy(i8* nocapture %x, i8* nocapture %y) nounwind uwtable sanitize_memory {
  177. entry:
  178. call void @llvm.memcpy.p0i8.p0i8.i64(i8* %x, i8* %y, i64 10, i32 1, i1 false)
  179. ret void
  180. }
  181. declare void @llvm.memcpy.p0i8.p0i8.i64(i8* nocapture, i8* nocapture, i64, i32, i1) nounwind
  182. ; CHECK: @MemCpy
  183. ; CHECK: call i8* @__msan_memcpy
  184. ; CHECK: ret void
  185. ; memmove is lowered to a call
  186. define void @MemMove(i8* nocapture %x, i8* nocapture %y) nounwind uwtable sanitize_memory {
  187. entry:
  188. call void @llvm.memmove.p0i8.p0i8.i64(i8* %x, i8* %y, i64 10, i32 1, i1 false)
  189. ret void
  190. }
  191. declare void @llvm.memmove.p0i8.p0i8.i64(i8* nocapture, i8* nocapture, i64, i32, i1) nounwind
  192. ; CHECK: @MemMove
  193. ; CHECK: call i8* @__msan_memmove
  194. ; CHECK: ret void
  195. ; Check that we propagate shadow for "select"
  196. define i32 @Select(i32 %a, i32 %b, i1 %c) nounwind uwtable readnone sanitize_memory {
  197. entry:
  198. %cond = select i1 %c, i32 %a, i32 %b
  199. ret i32 %cond
  200. }
  201. ; CHECK: @Select
  202. ; CHECK: select i1
  203. ; CHECK-DAG: or i32
  204. ; CHECK-DAG: xor i32
  205. ; CHECK: or i32
  206. ; CHECK-DAG: select i1
  207. ; CHECK-ORIGINS-DAG: select
  208. ; CHECK-ORIGINS-DAG: select
  209. ; CHECK-DAG: select i1
  210. ; CHECK: store i32{{.*}}@__msan_retval_tls
  211. ; CHECK-ORIGINS: store i32{{.*}}@__msan_retval_origin_tls
  212. ; CHECK: ret i32
  213. ; Check that we propagate origin for "select" with vector condition.
  214. ; Select condition is flattened to i1, which is then used to select one of the
  215. ; argument origins.
  216. define <8 x i16> @SelectVector(<8 x i16> %a, <8 x i16> %b, <8 x i1> %c) nounwind uwtable readnone sanitize_memory {
  217. entry:
  218. %cond = select <8 x i1> %c, <8 x i16> %a, <8 x i16> %b
  219. ret <8 x i16> %cond
  220. }
  221. ; CHECK: @SelectVector
  222. ; CHECK: select <8 x i1>
  223. ; CHECK-DAG: or <8 x i16>
  224. ; CHECK-DAG: xor <8 x i16>
  225. ; CHECK: or <8 x i16>
  226. ; CHECK-DAG: select <8 x i1>
  227. ; CHECK-ORIGINS-DAG: select
  228. ; CHECK-ORIGINS-DAG: select
  229. ; CHECK-DAG: select <8 x i1>
  230. ; CHECK: store <8 x i16>{{.*}}@__msan_retval_tls
  231. ; CHECK-ORIGINS: store i32{{.*}}@__msan_retval_origin_tls
  232. ; CHECK: ret <8 x i16>
  233. ; Check that we propagate origin for "select" with scalar condition and vector
  234. ; arguments. Select condition shadow is sign-extended to the vector type and
  235. ; mixed into the result shadow.
  236. define <8 x i16> @SelectVector2(<8 x i16> %a, <8 x i16> %b, i1 %c) nounwind uwtable readnone sanitize_memory {
  237. entry:
  238. %cond = select i1 %c, <8 x i16> %a, <8 x i16> %b
  239. ret <8 x i16> %cond
  240. }
  241. ; CHECK: @SelectVector2
  242. ; CHECK: select i1
  243. ; CHECK-DAG: or <8 x i16>
  244. ; CHECK-DAG: xor <8 x i16>
  245. ; CHECK: or <8 x i16>
  246. ; CHECK-DAG: select i1
  247. ; CHECK-ORIGINS-DAG: select i1
  248. ; CHECK-ORIGINS-DAG: select i1
  249. ; CHECK-DAG: select i1
  250. ; CHECK: ret <8 x i16>
  251. define { i64, i64 } @SelectStruct(i1 zeroext %x, { i64, i64 } %a, { i64, i64 } %b) readnone sanitize_memory {
  252. entry:
  253. %c = select i1 %x, { i64, i64 } %a, { i64, i64 } %b
  254. ret { i64, i64 } %c
  255. }
  256. ; CHECK: @SelectStruct
  257. ; CHECK: select i1 {{.*}}, { i64, i64 }
  258. ; CHECK-NEXT: select i1 {{.*}}, { i64, i64 } { i64 -1, i64 -1 }, { i64, i64 }
  259. ; CHECK-ORIGINS: select i1
  260. ; CHECK-ORIGINS: select i1
  261. ; CHECK-NEXT: select i1 {{.*}}, { i64, i64 }
  262. ; CHECK: ret { i64, i64 }
  263. define { i64*, double } @SelectStruct2(i1 zeroext %x, { i64*, double } %a, { i64*, double } %b) readnone sanitize_memory {
  264. entry:
  265. %c = select i1 %x, { i64*, double } %a, { i64*, double } %b
  266. ret { i64*, double } %c
  267. }
  268. ; CHECK: @SelectStruct2
  269. ; CHECK: select i1 {{.*}}, { i64, i64 }
  270. ; CHECK-NEXT: select i1 {{.*}}, { i64, i64 } { i64 -1, i64 -1 }, { i64, i64 }
  271. ; CHECK-ORIGINS: select i1
  272. ; CHECK-ORIGINS: select i1
  273. ; CHECK-NEXT: select i1 {{.*}}, { i64*, double }
  274. ; CHECK: ret { i64*, double }
  275. define i8* @IntToPtr(i64 %x) nounwind uwtable readnone sanitize_memory {
  276. entry:
  277. %0 = inttoptr i64 %x to i8*
  278. ret i8* %0
  279. }
  280. ; CHECK: @IntToPtr
  281. ; CHECK: load i64, i64*{{.*}}__msan_param_tls
  282. ; CHECK-ORIGINS-NEXT: load i32, i32*{{.*}}__msan_param_origin_tls
  283. ; CHECK-NEXT: inttoptr
  284. ; CHECK-NEXT: store i64{{.*}}__msan_retval_tls
  285. ; CHECK: ret i8*
  286. define i8* @IntToPtr_ZExt(i16 %x) nounwind uwtable readnone sanitize_memory {
  287. entry:
  288. %0 = inttoptr i16 %x to i8*
  289. ret i8* %0
  290. }
  291. ; CHECK: @IntToPtr_ZExt
  292. ; CHECK: load i16, i16*{{.*}}__msan_param_tls
  293. ; CHECK: zext
  294. ; CHECK-NEXT: inttoptr
  295. ; CHECK-NEXT: store i64{{.*}}__msan_retval_tls
  296. ; CHECK: ret i8*
  297. ; Check that we insert exactly one check on udiv
  298. ; (2nd arg shadow is checked, 1st arg shadow is propagated)
  299. define i32 @Div(i32 %a, i32 %b) nounwind uwtable readnone sanitize_memory {
  300. entry:
  301. %div = udiv i32 %a, %b
  302. ret i32 %div
  303. }
  304. ; CHECK: @Div
  305. ; CHECK: icmp
  306. ; CHECK: call void @__msan_warning
  307. ; CHECK-NOT: icmp
  308. ; CHECK: udiv
  309. ; CHECK-NOT: icmp
  310. ; CHECK: ret i32
  311. ; Check that we propagate shadow for x<0, x>=0, etc (i.e. sign bit tests)
  312. define zeroext i1 @ICmpSLT(i32 %x) nounwind uwtable readnone sanitize_memory {
  313. %1 = icmp slt i32 %x, 0
  314. ret i1 %1
  315. }
  316. ; CHECK: @ICmpSLT
  317. ; CHECK: icmp slt
  318. ; CHECK-NOT: call void @__msan_warning
  319. ; CHECK: icmp slt
  320. ; CHECK-NOT: call void @__msan_warning
  321. ; CHECK: ret i1
  322. define zeroext i1 @ICmpSGE(i32 %x) nounwind uwtable readnone sanitize_memory {
  323. %1 = icmp sge i32 %x, 0
  324. ret i1 %1
  325. }
  326. ; CHECK: @ICmpSGE
  327. ; CHECK: icmp slt
  328. ; CHECK-NOT: call void @__msan_warning
  329. ; CHECK: icmp sge
  330. ; CHECK-NOT: call void @__msan_warning
  331. ; CHECK: ret i1
  332. define zeroext i1 @ICmpSGT(i32 %x) nounwind uwtable readnone sanitize_memory {
  333. %1 = icmp sgt i32 0, %x
  334. ret i1 %1
  335. }
  336. ; CHECK: @ICmpSGT
  337. ; CHECK: icmp slt
  338. ; CHECK-NOT: call void @__msan_warning
  339. ; CHECK: icmp sgt
  340. ; CHECK-NOT: call void @__msan_warning
  341. ; CHECK: ret i1
  342. define zeroext i1 @ICmpSLE(i32 %x) nounwind uwtable readnone sanitize_memory {
  343. %1 = icmp sle i32 0, %x
  344. ret i1 %1
  345. }
  346. ; CHECK: @ICmpSLE
  347. ; CHECK: icmp slt
  348. ; CHECK-NOT: call void @__msan_warning
  349. ; CHECK: icmp sle
  350. ; CHECK-NOT: call void @__msan_warning
  351. ; CHECK: ret i1
  352. ; Check that we propagate shadow for x<0, x>=0, etc (i.e. sign bit tests)
  353. ; of the vector arguments.
  354. define <2 x i1> @ICmpSLT_vector(<2 x i32*> %x) nounwind uwtable readnone sanitize_memory {
  355. %1 = icmp slt <2 x i32*> %x, zeroinitializer
  356. ret <2 x i1> %1
  357. }
  358. ; CHECK: @ICmpSLT_vector
  359. ; CHECK: icmp slt <2 x i64>
  360. ; CHECK-NOT: call void @__msan_warning
  361. ; CHECK: icmp slt <2 x i32*>
  362. ; CHECK-NOT: call void @__msan_warning
  363. ; CHECK: ret <2 x i1>
  364. ; Check that we propagate shadow for unsigned relational comparisons with
  365. ; constants
  366. define zeroext i1 @ICmpUGTConst(i32 %x) nounwind uwtable readnone sanitize_memory {
  367. entry:
  368. %cmp = icmp ugt i32 %x, 7
  369. ret i1 %cmp
  370. }
  371. ; CHECK: @ICmpUGTConst
  372. ; CHECK: icmp ugt i32
  373. ; CHECK-NOT: call void @__msan_warning
  374. ; CHECK: icmp ugt i32
  375. ; CHECK-NOT: call void @__msan_warning
  376. ; CHECK: icmp ugt i32
  377. ; CHECK-NOT: call void @__msan_warning
  378. ; CHECK: ret i1
  379. ; Check that loads of shadow have the same aligment as the original loads.
  380. ; Check that loads of origin have the aligment of max(4, original alignment).
  381. define i32 @ShadowLoadAlignmentLarge() nounwind uwtable sanitize_memory {
  382. %y = alloca i32, align 64
  383. %1 = load volatile i32, i32* %y, align 64
  384. ret i32 %1
  385. }
  386. ; CHECK: @ShadowLoadAlignmentLarge
  387. ; CHECK: load volatile i32, i32* {{.*}} align 64
  388. ; CHECK: load i32, i32* {{.*}} align 64
  389. ; CHECK: ret i32
  390. define i32 @ShadowLoadAlignmentSmall() nounwind uwtable sanitize_memory {
  391. %y = alloca i32, align 2
  392. %1 = load volatile i32, i32* %y, align 2
  393. ret i32 %1
  394. }
  395. ; CHECK: @ShadowLoadAlignmentSmall
  396. ; CHECK: load volatile i32, i32* {{.*}} align 2
  397. ; CHECK: load i32, i32* {{.*}} align 2
  398. ; CHECK-ORIGINS: load i32, i32* {{.*}} align 4
  399. ; CHECK: ret i32
  400. ; Test vector manipulation instructions.
  401. ; Check that the same bit manipulation is applied to the shadow values.
  402. ; Check that there is a zero test of the shadow of %idx argument, where present.
  403. define i32 @ExtractElement(<4 x i32> %vec, i32 %idx) sanitize_memory {
  404. %x = extractelement <4 x i32> %vec, i32 %idx
  405. ret i32 %x
  406. }
  407. ; CHECK: @ExtractElement
  408. ; CHECK: extractelement
  409. ; CHECK: call void @__msan_warning
  410. ; CHECK: extractelement
  411. ; CHECK: ret i32
  412. define <4 x i32> @InsertElement(<4 x i32> %vec, i32 %idx, i32 %x) sanitize_memory {
  413. %vec1 = insertelement <4 x i32> %vec, i32 %x, i32 %idx
  414. ret <4 x i32> %vec1
  415. }
  416. ; CHECK: @InsertElement
  417. ; CHECK: insertelement
  418. ; CHECK: call void @__msan_warning
  419. ; CHECK: insertelement
  420. ; CHECK: ret <4 x i32>
  421. define <4 x i32> @ShuffleVector(<4 x i32> %vec, <4 x i32> %vec1) sanitize_memory {
  422. %vec2 = shufflevector <4 x i32> %vec, <4 x i32> %vec1,
  423. <4 x i32> <i32 0, i32 4, i32 1, i32 5>
  424. ret <4 x i32> %vec2
  425. }
  426. ; CHECK: @ShuffleVector
  427. ; CHECK: shufflevector
  428. ; CHECK-NOT: call void @__msan_warning
  429. ; CHECK: shufflevector
  430. ; CHECK: ret <4 x i32>
  431. ; Test bswap intrinsic instrumentation
  432. define i32 @BSwap(i32 %x) nounwind uwtable readnone sanitize_memory {
  433. %y = tail call i32 @llvm.bswap.i32(i32 %x)
  434. ret i32 %y
  435. }
  436. declare i32 @llvm.bswap.i32(i32) nounwind readnone
  437. ; CHECK: @BSwap
  438. ; CHECK-NOT: call void @__msan_warning
  439. ; CHECK: @llvm.bswap.i32
  440. ; CHECK-NOT: call void @__msan_warning
  441. ; CHECK: @llvm.bswap.i32
  442. ; CHECK-NOT: call void @__msan_warning
  443. ; CHECK: ret i32
  444. ; Store intrinsic.
  445. define void @StoreIntrinsic(i8* %p, <4 x float> %x) nounwind uwtable sanitize_memory {
  446. call void @llvm.x86.sse.storeu.ps(i8* %p, <4 x float> %x)
  447. ret void
  448. }
  449. declare void @llvm.x86.sse.storeu.ps(i8*, <4 x float>) nounwind
  450. ; CHECK: @StoreIntrinsic
  451. ; CHECK-NOT: br
  452. ; CHECK-NOT: = or
  453. ; CHECK: store <4 x i32> {{.*}} align 1
  454. ; CHECK: call void @llvm.x86.sse.storeu.ps
  455. ; CHECK: ret void
  456. ; Load intrinsic.
  457. define <16 x i8> @LoadIntrinsic(i8* %p) nounwind uwtable sanitize_memory {
  458. %call = call <16 x i8> @llvm.x86.sse3.ldu.dq(i8* %p)
  459. ret <16 x i8> %call
  460. }
  461. declare <16 x i8> @llvm.x86.sse3.ldu.dq(i8* %p) nounwind
  462. ; CHECK: @LoadIntrinsic
  463. ; CHECK: load <16 x i8>, <16 x i8>* {{.*}} align 1
  464. ; CHECK-ORIGINS: [[ORIGIN:%[01-9a-z]+]] = load i32, i32* {{.*}}
  465. ; CHECK-NOT: br
  466. ; CHECK-NOT: = or
  467. ; CHECK: call <16 x i8> @llvm.x86.sse3.ldu.dq
  468. ; CHECK: store <16 x i8> {{.*}} @__msan_retval_tls
  469. ; CHECK-ORIGINS: store i32 {{.*}}[[ORIGIN]], i32* @__msan_retval_origin_tls
  470. ; CHECK: ret <16 x i8>
  471. ; Simple NoMem intrinsic
  472. ; Check that shadow is OR'ed, and origin is Select'ed
  473. ; And no shadow checks!
  474. define <8 x i16> @Paddsw128(<8 x i16> %a, <8 x i16> %b) nounwind uwtable sanitize_memory {
  475. %call = call <8 x i16> @llvm.x86.sse2.padds.w(<8 x i16> %a, <8 x i16> %b)
  476. ret <8 x i16> %call
  477. }
  478. declare <8 x i16> @llvm.x86.sse2.padds.w(<8 x i16> %a, <8 x i16> %b) nounwind
  479. ; CHECK: @Paddsw128
  480. ; CHECK-NEXT: load <8 x i16>, <8 x i16>* {{.*}} @__msan_param_tls
  481. ; CHECK-ORIGINS: load i32, i32* {{.*}} @__msan_param_origin_tls
  482. ; CHECK-NEXT: load <8 x i16>, <8 x i16>* {{.*}} @__msan_param_tls
  483. ; CHECK-ORIGINS: load i32, i32* {{.*}} @__msan_param_origin_tls
  484. ; CHECK-NEXT: = or <8 x i16>
  485. ; CHECK-ORIGINS: = bitcast <8 x i16> {{.*}} to i128
  486. ; CHECK-ORIGINS-NEXT: = icmp ne i128 {{.*}}, 0
  487. ; CHECK-ORIGINS-NEXT: = select i1 {{.*}}, i32 {{.*}}, i32
  488. ; CHECK-NEXT: call <8 x i16> @llvm.x86.sse2.padds.w
  489. ; CHECK-NEXT: store <8 x i16> {{.*}} @__msan_retval_tls
  490. ; CHECK-ORIGINS: store i32 {{.*}} @__msan_retval_origin_tls
  491. ; CHECK-NEXT: ret <8 x i16>
  492. ; Test handling of vectors of pointers.
  493. ; Check that shadow of such vector is a vector of integers.
  494. define <8 x i8*> @VectorOfPointers(<8 x i8*>* %p) nounwind uwtable sanitize_memory {
  495. %x = load <8 x i8*>, <8 x i8*>* %p
  496. ret <8 x i8*> %x
  497. }
  498. ; CHECK: @VectorOfPointers
  499. ; CHECK: load <8 x i8*>, <8 x i8*>*
  500. ; CHECK: load <8 x i64>, <8 x i64>*
  501. ; CHECK: store <8 x i64> {{.*}} @__msan_retval_tls
  502. ; CHECK: ret <8 x i8*>
  503. ; Test handling of va_copy.
  504. declare void @llvm.va_copy(i8*, i8*) nounwind
  505. define void @VACopy(i8* %p1, i8* %p2) nounwind uwtable sanitize_memory {
  506. call void @llvm.va_copy(i8* %p1, i8* %p2) nounwind
  507. ret void
  508. }
  509. ; CHECK: @VACopy
  510. ; CHECK: call void @llvm.memset.p0i8.i64({{.*}}, i8 0, i64 24, i32 8, i1 false)
  511. ; CHECK: ret void
  512. ; Test that va_start instrumentation does not use va_arg_tls*.
  513. ; It should work with a local stack copy instead.
  514. %struct.__va_list_tag = type { i32, i32, i8*, i8* }
  515. declare void @llvm.va_start(i8*) nounwind
  516. ; Function Attrs: nounwind uwtable
  517. define void @VAStart(i32 %x, ...) sanitize_memory {
  518. entry:
  519. %x.addr = alloca i32, align 4
  520. %va = alloca [1 x %struct.__va_list_tag], align 16
  521. store i32 %x, i32* %x.addr, align 4
  522. %arraydecay = getelementptr inbounds [1 x %struct.__va_list_tag], [1 x %struct.__va_list_tag]* %va, i32 0, i32 0
  523. %arraydecay1 = bitcast %struct.__va_list_tag* %arraydecay to i8*
  524. call void @llvm.va_start(i8* %arraydecay1)
  525. ret void
  526. }
  527. ; CHECK: @VAStart
  528. ; CHECK: call void @llvm.va_start
  529. ; CHECK-NOT: @__msan_va_arg_tls
  530. ; CHECK-NOT: @__msan_va_arg_overflow_size_tls
  531. ; CHECK: ret void
  532. ; Test handling of volatile stores.
  533. ; Check that MemorySanitizer does not add a check of the value being stored.
  534. define void @VolatileStore(i32* nocapture %p, i32 %x) nounwind uwtable sanitize_memory {
  535. entry:
  536. store volatile i32 %x, i32* %p, align 4
  537. ret void
  538. }
  539. ; CHECK: @VolatileStore
  540. ; CHECK-NOT: @__msan_warning
  541. ; CHECK: ret void
  542. ; Test that checks are omitted and returned value is always initialized if
  543. ; sanitize_memory attribute is missing.
  544. define i32 @NoSanitizeMemory(i32 %x) uwtable {
  545. entry:
  546. %tobool = icmp eq i32 %x, 0
  547. br i1 %tobool, label %if.end, label %if.then
  548. if.then: ; preds = %entry
  549. tail call void @bar()
  550. br label %if.end
  551. if.end: ; preds = %entry, %if.then
  552. ret i32 %x
  553. }
  554. declare void @bar()
  555. ; CHECK: @NoSanitizeMemory
  556. ; CHECK-NOT: @__msan_warning
  557. ; CHECK: store i32 0, {{.*}} @__msan_retval_tls
  558. ; CHECK-NOT: @__msan_warning
  559. ; CHECK: ret i32
  560. ; Test that stack allocations are unpoisoned in functions missing
  561. ; sanitize_memory attribute
  562. define i32 @NoSanitizeMemoryAlloca() {
  563. entry:
  564. %p = alloca i32, align 4
  565. %x = call i32 @NoSanitizeMemoryAllocaHelper(i32* %p)
  566. ret i32 %x
  567. }
  568. declare i32 @NoSanitizeMemoryAllocaHelper(i32* %p)
  569. ; CHECK: @NoSanitizeMemoryAlloca
  570. ; CHECK: call void @llvm.memset.p0i8.i64(i8* {{.*}}, i8 0, i64 4, i32 4, i1 false)
  571. ; CHECK: call i32 @NoSanitizeMemoryAllocaHelper(i32*
  572. ; CHECK: ret i32
  573. ; Test that undef is unpoisoned in functions missing
  574. ; sanitize_memory attribute
  575. define i32 @NoSanitizeMemoryUndef() {
  576. entry:
  577. %x = call i32 @NoSanitizeMemoryUndefHelper(i32 undef)
  578. ret i32 %x
  579. }
  580. declare i32 @NoSanitizeMemoryUndefHelper(i32 %x)
  581. ; CHECK: @NoSanitizeMemoryAlloca
  582. ; CHECK: store i32 0, i32* {{.*}} @__msan_param_tls
  583. ; CHECK: call i32 @NoSanitizeMemoryUndefHelper(i32 undef)
  584. ; CHECK: ret i32
  585. ; Test PHINode instrumentation in blacklisted functions
  586. define i32 @NoSanitizeMemoryPHI(i32 %x) {
  587. entry:
  588. %tobool = icmp ne i32 %x, 0
  589. br i1 %tobool, label %cond.true, label %cond.false
  590. cond.true: ; preds = %entry
  591. br label %cond.end
  592. cond.false: ; preds = %entry
  593. br label %cond.end
  594. cond.end: ; preds = %cond.false, %cond.true
  595. %cond = phi i32 [ undef, %cond.true ], [ undef, %cond.false ]
  596. ret i32 %cond
  597. }
  598. ; CHECK: [[A:%.*]] = phi i32 [ undef, %cond.true ], [ undef, %cond.false ]
  599. ; CHECK: store i32 0, i32* bitcast {{.*}} @__msan_retval_tls
  600. ; CHECK: ret i32 [[A]]
  601. ; Test that there are no __msan_param_origin_tls stores when
  602. ; argument shadow is a compile-time zero constant (which is always the case
  603. ; in functions missing sanitize_memory attribute).
  604. define i32 @NoSanitizeMemoryParamTLS(i32* nocapture readonly %x) {
  605. entry:
  606. %0 = load i32, i32* %x, align 4
  607. %call = tail call i32 @NoSanitizeMemoryParamTLSHelper(i32 %0)
  608. ret i32 %call
  609. }
  610. declare i32 @NoSanitizeMemoryParamTLSHelper(i32 %x)
  611. ; CHECK-LABEL: define i32 @NoSanitizeMemoryParamTLS(
  612. ; CHECK-NOT: __msan_param_origin_tls
  613. ; CHECK: ret i32
  614. ; Test argument shadow alignment
  615. define <2 x i64> @ArgumentShadowAlignment(i64 %a, <2 x i64> %b) sanitize_memory {
  616. entry:
  617. ret <2 x i64> %b
  618. }
  619. ; CHECK: @ArgumentShadowAlignment
  620. ; CHECK: load <2 x i64>, <2 x i64>* {{.*}} @__msan_param_tls {{.*}}, align 8
  621. ; CHECK: store <2 x i64> {{.*}} @__msan_retval_tls {{.*}}, align 8
  622. ; CHECK: ret <2 x i64>
  623. ; Test origin propagation for insertvalue
  624. define { i64, i32 } @make_pair_64_32(i64 %x, i32 %y) sanitize_memory {
  625. entry:
  626. %a = insertvalue { i64, i32 } undef, i64 %x, 0
  627. %b = insertvalue { i64, i32 } %a, i32 %y, 1
  628. ret { i64, i32 } %b
  629. }
  630. ; CHECK-ORIGINS: @make_pair_64_32
  631. ; First element shadow
  632. ; CHECK-ORIGINS: insertvalue { i64, i32 } { i64 -1, i32 -1 }, i64 {{.*}}, 0
  633. ; First element origin
  634. ; CHECK-ORIGINS: icmp ne i64
  635. ; CHECK-ORIGINS: select i1
  636. ; First element app value
  637. ; CHECK-ORIGINS: insertvalue { i64, i32 } undef, i64 {{.*}}, 0
  638. ; Second element shadow
  639. ; CHECK-ORIGINS: insertvalue { i64, i32 } {{.*}}, i32 {{.*}}, 1
  640. ; Second element origin
  641. ; CHECK-ORIGINS: icmp ne i32
  642. ; CHECK-ORIGINS: select i1
  643. ; Second element app value
  644. ; CHECK-ORIGINS: insertvalue { i64, i32 } {{.*}}, i32 {{.*}}, 1
  645. ; CHECK-ORIGINS: ret { i64, i32 }
  646. ; Test shadow propagation for aggregates passed through ellipsis.
  647. %struct.StructByVal = type { i32, i32, i32, i32 }
  648. declare void @VAArgStructFn(i32 %guard, ...)
  649. define void @VAArgStruct(%struct.StructByVal* nocapture %s) sanitize_memory {
  650. entry:
  651. %agg.tmp2 = alloca %struct.StructByVal, align 8
  652. %0 = bitcast %struct.StructByVal* %s to i8*
  653. %agg.tmp.sroa.0.0..sroa_cast = bitcast %struct.StructByVal* %s to i64*
  654. %agg.tmp.sroa.0.0.copyload = load i64, i64* %agg.tmp.sroa.0.0..sroa_cast, align 4
  655. %agg.tmp.sroa.2.0..sroa_idx = getelementptr inbounds %struct.StructByVal, %struct.StructByVal* %s, i64 0, i32 2
  656. %agg.tmp.sroa.2.0..sroa_cast = bitcast i32* %agg.tmp.sroa.2.0..sroa_idx to i64*
  657. %agg.tmp.sroa.2.0.copyload = load i64, i64* %agg.tmp.sroa.2.0..sroa_cast, align 4
  658. %1 = bitcast %struct.StructByVal* %agg.tmp2 to i8*
  659. call void @llvm.memcpy.p0i8.p0i8.i64(i8* %1, i8* %0, i64 16, i32 4, i1 false)
  660. call void (i32, ...) @VAArgStructFn(i32 undef, i64 %agg.tmp.sroa.0.0.copyload, i64 %agg.tmp.sroa.2.0.copyload, i64 %agg.tmp.sroa.0.0.copyload, i64 %agg.tmp.sroa.2.0.copyload, %struct.StructByVal* byval align 8 %agg.tmp2)
  661. ret void
  662. }
  663. ; "undef" and the first 2 structs go to general purpose registers;
  664. ; the third struct goes to the overflow area byval
  665. ; CHECK: @VAArgStruct
  666. ; undef
  667. ; CHECK: store i32 -1, i32* {{.*}}@__msan_va_arg_tls {{.*}}, align 8
  668. ; first struct through general purpose registers
  669. ; CHECK: store i64 {{.*}}, i64* {{.*}}@__msan_va_arg_tls{{.*}}, i64 8){{.*}}, align 8
  670. ; CHECK: store i64 {{.*}}, i64* {{.*}}@__msan_va_arg_tls{{.*}}, i64 16){{.*}}, align 8
  671. ; second struct through general purpose registers
  672. ; CHECK: store i64 {{.*}}, i64* {{.*}}@__msan_va_arg_tls{{.*}}, i64 24){{.*}}, align 8
  673. ; CHECK: store i64 {{.*}}, i64* {{.*}}@__msan_va_arg_tls{{.*}}, i64 32){{.*}}, align 8
  674. ; third struct through the overflow area byval
  675. ; CHECK: ptrtoint %struct.StructByVal* {{.*}} to i64
  676. ; CHECK: bitcast { i32, i32, i32, i32 }* {{.*}}@__msan_va_arg_tls {{.*}}, i64 176
  677. ; CHECK: call void @llvm.memcpy.p0i8.p0i8.i64
  678. ; CHECK: store i64 16, i64* @__msan_va_arg_overflow_size_tls
  679. ; CHECK: call void (i32, ...) @VAArgStructFn
  680. ; CHECK: ret void
  681. declare i32 @InnerTailCall(i32 %a)
  682. define void @MismatchedReturnTypeTailCall(i32 %a) sanitize_memory {
  683. %b = tail call i32 @InnerTailCall(i32 %a)
  684. ret void
  685. }
  686. ; We used to strip off the 'tail' modifier, but now that we unpoison return slot
  687. ; shadow before the call, we don't need to anymore.
  688. ; CHECK-LABEL: define void @MismatchedReturnTypeTailCall
  689. ; CHECK: tail call i32 @InnerTailCall
  690. ; CHECK: ret void
  691. ; CHECK-LABEL: define internal void @msan.module_ctor
  692. ; CHECK: call void @__msan_init()