cgcpu.pas 36 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938
  1. {
  2. Copyright (c) 1998-2002 by Florian Klaempfl
  3. This unit implements the code generator for the i386
  4. This program is free software; you can redistribute it and/or modify
  5. it under the terms of the GNU General Public License as published by
  6. the Free Software Foundation; either version 2 of the License, or
  7. (at your option) any later version.
  8. This program is distributed in the hope that it will be useful,
  9. but WITHOUT ANY WARRANTY; without even the implied warranty of
  10. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  11. GNU General Public License for more details.
  12. You should have received a copy of the GNU General Public License
  13. along with this program; if not, write to the Free Software
  14. Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
  15. ****************************************************************************
  16. }
  17. unit cgcpu;
  18. {$i fpcdefs.inc}
  19. interface
  20. uses
  21. globtype,
  22. cgbase,cgobj,cg64f32,cgx86,
  23. aasmbase,aasmtai,aasmdata,aasmcpu,
  24. cpubase,parabase,cgutils,
  25. symconst,symdef,symsym
  26. ;
  27. type
  28. tcg386 = class(tcgx86)
  29. procedure init_register_allocators;override;
  30. procedure do_register_allocation(list:TAsmList;headertai:tai);override;
  31. { passing parameter using push instead of mov }
  32. procedure a_load_reg_cgpara(list : TAsmList;size : tcgsize;r : tregister;const cgpara : tcgpara);override;
  33. procedure a_load_const_cgpara(list : TAsmList;size : tcgsize;a : tcgint;const cgpara : tcgpara);override;
  34. procedure a_load_ref_cgpara(list : TAsmList;size : tcgsize;const r : treference;const cgpara : tcgpara);override;
  35. procedure a_loadaddr_ref_cgpara(list : TAsmList;const r : treference;const cgpara : tcgpara);override;
  36. procedure g_proc_exit(list : TAsmList;parasize:longint;nostackframe:boolean);override;
  37. procedure g_copyvaluepara_openarray(list : TAsmList;const ref:treference;const lenloc:tlocation;elesize:tcgint;destreg:tregister);
  38. procedure g_releasevaluepara_openarray(list : TAsmList;const l:tlocation);
  39. procedure g_exception_reason_save(list : TAsmList; const href : treference);override;
  40. procedure g_exception_reason_save_const(list : TAsmList; const href : treference; a: tcgint);override;
  41. procedure g_exception_reason_load(list : TAsmList; const href : treference);override;
  42. procedure g_intf_wrapper(list: TAsmList; procdef: tprocdef; const labelname: string; ioffset: longint);override;
  43. procedure g_maybe_got_init(list: TAsmList); override;
  44. end;
  45. tcg64f386 = class(tcg64f32)
  46. procedure a_op64_ref_reg(list : TAsmList;op:TOpCG;size : tcgsize;const ref : treference;reg : tregister64);override;
  47. procedure a_op64_reg_reg(list : TAsmList;op:TOpCG;size : tcgsize;regsrc,regdst : tregister64);override;
  48. procedure a_op64_const_reg(list : TAsmList;op:TOpCG;size : tcgsize;value : int64;reg : tregister64);override;
  49. procedure a_op64_const_ref(list : TAsmList;op:TOpCG;size : tcgsize;value : int64;const ref : treference);override;
  50. private
  51. procedure get_64bit_ops(op:TOpCG;var op1,op2:TAsmOp);
  52. end;
  53. procedure create_codegen;
  54. implementation
  55. uses
  56. globals,verbose,systems,cutils,
  57. paramgr,procinfo,fmodule,
  58. rgcpu,rgx86,cpuinfo;
  59. function use_push(const cgpara:tcgpara):boolean;
  60. begin
  61. result:=(not paramanager.use_fixed_stack) and
  62. assigned(cgpara.location) and
  63. (cgpara.location^.loc=LOC_REFERENCE) and
  64. (cgpara.location^.reference.index=NR_STACK_POINTER_REG);
  65. end;
  66. procedure tcg386.init_register_allocators;
  67. begin
  68. inherited init_register_allocators;
  69. if not(target_info.system in [system_i386_darwin,system_i386_iphonesim]) and
  70. (cs_create_pic in current_settings.moduleswitches) then
  71. rg[R_INTREGISTER]:=trgcpu.create(R_INTREGISTER,R_SUBWHOLE,[RS_EAX,RS_EDX,RS_ECX,RS_ESI,RS_EDI],first_int_imreg,[RS_EBP])
  72. else
  73. if (cs_useebp in current_settings.optimizerswitches) and assigned(current_procinfo) and (current_procinfo.framepointer<>NR_EBP) then
  74. rg[R_INTREGISTER]:=trgcpu.create(R_INTREGISTER,R_SUBWHOLE,[RS_EAX,RS_EDX,RS_ECX,RS_EBX,RS_ESI,RS_EDI,RS_EBP],first_int_imreg,[])
  75. else
  76. rg[R_INTREGISTER]:=trgcpu.create(R_INTREGISTER,R_SUBWHOLE,[RS_EAX,RS_EDX,RS_ECX,RS_EBX,RS_ESI,RS_EDI],first_int_imreg,[RS_EBP]);
  77. rg[R_MMXREGISTER]:=trgcpu.create(R_MMXREGISTER,R_SUBNONE,[RS_XMM0,RS_XMM1,RS_XMM2,RS_XMM3,RS_XMM4,RS_XMM5,RS_XMM6,RS_XMM7],first_mm_imreg,[]);
  78. rg[R_MMREGISTER]:=trgcpu.create(R_MMREGISTER,R_SUBWHOLE,[RS_XMM0,RS_XMM1,RS_XMM2,RS_XMM3,RS_XMM4,RS_XMM5,RS_XMM6,RS_XMM7],first_mm_imreg,[]);
  79. rgfpu:=Trgx86fpu.create;
  80. end;
  81. procedure tcg386.do_register_allocation(list:TAsmList;headertai:tai);
  82. begin
  83. if (pi_needs_got in current_procinfo.flags) then
  84. begin
  85. if getsupreg(current_procinfo.got) < first_int_imreg then
  86. include(rg[R_INTREGISTER].used_in_proc,getsupreg(current_procinfo.got));
  87. end;
  88. inherited do_register_allocation(list,headertai);
  89. end;
  90. procedure tcg386.a_load_reg_cgpara(list : TAsmList;size : tcgsize;r : tregister;const cgpara : tcgpara);
  91. var
  92. pushsize : tcgsize;
  93. begin
  94. check_register_size(size,r);
  95. if use_push(cgpara) then
  96. begin
  97. cgpara.check_simple_location;
  98. if tcgsize2size[cgpara.location^.size]>cgpara.alignment then
  99. pushsize:=cgpara.location^.size
  100. else
  101. pushsize:=int_cgsize(cgpara.alignment);
  102. list.concat(taicpu.op_reg(A_PUSH,tcgsize2opsize[pushsize],makeregsize(list,r,pushsize)));
  103. end
  104. else
  105. inherited a_load_reg_cgpara(list,size,r,cgpara);
  106. end;
  107. procedure tcg386.a_load_const_cgpara(list : TAsmList;size : tcgsize;a : tcgint;const cgpara : tcgpara);
  108. var
  109. pushsize : tcgsize;
  110. begin
  111. if use_push(cgpara) then
  112. begin
  113. cgpara.check_simple_location;
  114. if tcgsize2size[cgpara.location^.size]>cgpara.alignment then
  115. pushsize:=cgpara.location^.size
  116. else
  117. pushsize:=int_cgsize(cgpara.alignment);
  118. list.concat(taicpu.op_const(A_PUSH,tcgsize2opsize[pushsize],a));
  119. end
  120. else
  121. inherited a_load_const_cgpara(list,size,a,cgpara);
  122. end;
  123. procedure tcg386.a_load_ref_cgpara(list : TAsmList;size : tcgsize;const r : treference;const cgpara : tcgpara);
  124. procedure pushdata(paraloc:pcgparalocation;ofs:tcgint);
  125. var
  126. pushsize : tcgsize;
  127. opsize : topsize;
  128. tmpreg : tregister;
  129. href : treference;
  130. begin
  131. if not assigned(paraloc) then
  132. exit;
  133. if (paraloc^.loc<>LOC_REFERENCE) or
  134. (paraloc^.reference.index<>NR_STACK_POINTER_REG) or
  135. (tcgsize2size[paraloc^.size]>sizeof(aint)) then
  136. internalerror(200501162);
  137. { Pushes are needed in reverse order, add the size of the
  138. current location to the offset where to load from. This
  139. prevents wrong calculations for the last location when
  140. the size is not a power of 2 }
  141. if assigned(paraloc^.next) then
  142. pushdata(paraloc^.next,ofs+tcgsize2size[paraloc^.size]);
  143. { Push the data starting at ofs }
  144. href:=r;
  145. inc(href.offset,ofs);
  146. if tcgsize2size[paraloc^.size]>cgpara.alignment then
  147. pushsize:=paraloc^.size
  148. else
  149. pushsize:=int_cgsize(cgpara.alignment);
  150. opsize:=TCgsize2opsize[pushsize];
  151. { for go32v2 we obtain OS_F32,
  152. but pushs is not valid, we need pushl }
  153. if opsize=S_FS then
  154. opsize:=S_L;
  155. if tcgsize2size[paraloc^.size]<cgpara.alignment then
  156. begin
  157. tmpreg:=getintregister(list,pushsize);
  158. a_load_ref_reg(list,paraloc^.size,pushsize,href,tmpreg);
  159. list.concat(taicpu.op_reg(A_PUSH,opsize,tmpreg));
  160. end
  161. else
  162. begin
  163. make_simple_ref(list,href);
  164. list.concat(taicpu.op_ref(A_PUSH,opsize,href));
  165. end;
  166. end;
  167. var
  168. len : tcgint;
  169. href : treference;
  170. begin
  171. { cgpara.size=OS_NO requires a copy on the stack }
  172. if use_push(cgpara) then
  173. begin
  174. { Record copy? }
  175. if (cgpara.size=OS_NO) or (size=OS_NO) then
  176. begin
  177. cgpara.check_simple_location;
  178. len:=align(cgpara.intsize,cgpara.alignment);
  179. g_stackpointer_alloc(list,len);
  180. reference_reset_base(href,NR_STACK_POINTER_REG,0,4);
  181. g_concatcopy(list,r,href,len);
  182. end
  183. else
  184. begin
  185. if tcgsize2size[cgpara.size]<>tcgsize2size[size] then
  186. internalerror(200501161);
  187. if (cgpara.size=OS_F64) then
  188. begin
  189. href:=r;
  190. make_simple_ref(list,href);
  191. inc(href.offset,4);
  192. list.concat(taicpu.op_ref(A_PUSH,S_L,href));
  193. dec(href.offset,4);
  194. list.concat(taicpu.op_ref(A_PUSH,S_L,href));
  195. end
  196. else
  197. { We need to push the data in reverse order,
  198. therefor we use a recursive algorithm }
  199. pushdata(cgpara.location,0);
  200. end
  201. end
  202. else
  203. inherited a_load_ref_cgpara(list,size,r,cgpara);
  204. end;
  205. procedure tcg386.a_loadaddr_ref_cgpara(list : TAsmList;const r : treference;const cgpara : tcgpara);
  206. var
  207. tmpreg : tregister;
  208. opsize : topsize;
  209. tmpref : treference;
  210. begin
  211. with r do
  212. begin
  213. if use_push(cgpara) then
  214. begin
  215. cgpara.check_simple_location;
  216. opsize:=tcgsize2opsize[OS_ADDR];
  217. if (segment=NR_NO) and (base=NR_NO) and (index=NR_NO) then
  218. begin
  219. if assigned(symbol) then
  220. begin
  221. if (target_info.system in [system_i386_darwin,system_i386_iphonesim]) and
  222. ((r.symbol.bind in [AB_EXTERNAL,AB_WEAK_EXTERNAL]) or
  223. (cs_create_pic in current_settings.moduleswitches)) then
  224. begin
  225. tmpreg:=getaddressregister(list);
  226. a_loadaddr_ref_reg(list,r,tmpreg);
  227. list.concat(taicpu.op_reg(A_PUSH,opsize,tmpreg));
  228. end
  229. else if cs_create_pic in current_settings.moduleswitches then
  230. begin
  231. if offset<>0 then
  232. begin
  233. tmpreg:=getaddressregister(list);
  234. a_loadaddr_ref_reg(list,r,tmpreg);
  235. list.concat(taicpu.op_reg(A_PUSH,opsize,tmpreg));
  236. end
  237. else
  238. begin
  239. reference_reset_symbol(tmpref,r.symbol,0,r.alignment);
  240. tmpref.refaddr:=addr_pic;
  241. tmpref.base:=current_procinfo.got;
  242. {$ifdef EXTDEBUG}
  243. if not (pi_needs_got in current_procinfo.flags) then
  244. Comment(V_warning,'pi_needs_got not included');
  245. {$endif EXTDEBUG}
  246. include(current_procinfo.flags,pi_needs_got);
  247. list.concat(taicpu.op_ref(A_PUSH,S_L,tmpref));
  248. end
  249. end
  250. else
  251. list.concat(Taicpu.Op_sym_ofs(A_PUSH,opsize,symbol,offset));
  252. end
  253. else
  254. list.concat(Taicpu.Op_const(A_PUSH,opsize,offset));
  255. end
  256. else if (segment=NR_NO) and (base=NR_NO) and (index<>NR_NO) and
  257. (offset=0) and (scalefactor=0) and (symbol=nil) then
  258. list.concat(Taicpu.Op_reg(A_PUSH,opsize,index))
  259. else if (segment=NR_NO) and (base<>NR_NO) and (index=NR_NO) and
  260. (offset=0) and (symbol=nil) then
  261. list.concat(Taicpu.Op_reg(A_PUSH,opsize,base))
  262. else
  263. begin
  264. tmpreg:=getaddressregister(list);
  265. a_loadaddr_ref_reg(list,r,tmpreg);
  266. list.concat(taicpu.op_reg(A_PUSH,opsize,tmpreg));
  267. end;
  268. end
  269. else
  270. inherited a_loadaddr_ref_cgpara(list,r,cgpara);
  271. end;
  272. end;
  273. procedure tcg386.g_proc_exit(list : TAsmList;parasize:longint;nostackframe:boolean);
  274. procedure increase_sp(a : tcgint);
  275. var
  276. href : treference;
  277. begin
  278. reference_reset_base(href,NR_STACK_POINTER_REG,a,0);
  279. { normally, lea is a better choice than an add }
  280. list.concat(Taicpu.op_ref_reg(A_LEA,TCGSize2OpSize[OS_ADDR],href,NR_STACK_POINTER_REG));
  281. end;
  282. begin
  283. { MMX needs to call EMMS }
  284. if assigned(rg[R_MMXREGISTER]) and
  285. (rg[R_MMXREGISTER].uses_registers) then
  286. list.concat(Taicpu.op_none(A_EMMS,S_NO));
  287. { remove stackframe }
  288. if not nostackframe then
  289. begin
  290. if (current_procinfo.framepointer=NR_STACK_POINTER_REG) or
  291. (current_procinfo.procdef.proctypeoption=potype_exceptfilter) then
  292. begin
  293. if current_procinfo.final_localsize<>0 then
  294. increase_sp(current_procinfo.final_localsize);
  295. if (not paramanager.use_fixed_stack) then
  296. internal_restore_regs(list,true);
  297. if (current_procinfo.procdef.proctypeoption=potype_exceptfilter) then
  298. list.concat(Taicpu.op_reg(A_POP,tcgsize2opsize[OS_ADDR],NR_FRAME_POINTER_REG));
  299. end
  300. else
  301. begin
  302. if (not paramanager.use_fixed_stack) then
  303. internal_restore_regs(list,not (pi_has_stack_allocs in current_procinfo.flags));
  304. list.concat(Taicpu.op_none(A_LEAVE,S_NO));
  305. end;
  306. list.concat(tai_regalloc.dealloc(current_procinfo.framepointer,nil));
  307. end;
  308. { return from proc }
  309. if (po_interrupt in current_procinfo.procdef.procoptions) and
  310. { this messes up stack alignment }
  311. (target_info.stackalign=4) then
  312. begin
  313. if assigned(current_procinfo.procdef.funcretloc[calleeside].location) and
  314. (current_procinfo.procdef.funcretloc[calleeside].location^.loc=LOC_REGISTER) then
  315. begin
  316. if (getsupreg(current_procinfo.procdef.funcretloc[calleeside].location^.register)=RS_EAX) then
  317. list.concat(Taicpu.Op_const_reg(A_ADD,S_L,4,NR_ESP))
  318. else
  319. internalerror(2010053001);
  320. end
  321. else
  322. list.concat(Taicpu.Op_reg(A_POP,S_L,NR_EAX));
  323. list.concat(Taicpu.Op_reg(A_POP,S_L,NR_EBX));
  324. list.concat(Taicpu.Op_reg(A_POP,S_L,NR_ECX));
  325. if (current_procinfo.procdef.funcretloc[calleeside].size in [OS_64,OS_S64]) and
  326. assigned(current_procinfo.procdef.funcretloc[calleeside].location) and
  327. assigned(current_procinfo.procdef.funcretloc[calleeside].location^.next) and
  328. (current_procinfo.procdef.funcretloc[calleeside].location^.next^.loc=LOC_REGISTER) then
  329. begin
  330. if (getsupreg(current_procinfo.procdef.funcretloc[calleeside].location^.next^.register)=RS_EDX) then
  331. list.concat(Taicpu.Op_const_reg(A_ADD,S_L,4,NR_ESP))
  332. else
  333. internalerror(2010053002);
  334. end
  335. else
  336. list.concat(Taicpu.Op_reg(A_POP,S_L,NR_EDX));
  337. list.concat(Taicpu.Op_reg(A_POP,S_L,NR_ESI));
  338. list.concat(Taicpu.Op_reg(A_POP,S_L,NR_EDI));
  339. { .... also the segment registers }
  340. list.concat(Taicpu.Op_reg(A_POP,S_W,NR_DS));
  341. list.concat(Taicpu.Op_reg(A_POP,S_W,NR_ES));
  342. list.concat(Taicpu.Op_reg(A_POP,S_W,NR_FS));
  343. list.concat(Taicpu.Op_reg(A_POP,S_W,NR_GS));
  344. { this restores the flags }
  345. list.concat(Taicpu.Op_none(A_IRET,S_NO));
  346. end
  347. { Routines with the poclearstack flag set use only a ret }
  348. else if (current_procinfo.procdef.proccalloption in clearstack_pocalls) and
  349. (not paramanager.use_fixed_stack) then
  350. begin
  351. { complex return values are removed from stack in C code PM }
  352. { but not on win32 }
  353. { and not for safecall with hidden exceptions, because the result }
  354. { wich contains the exception is passed in EAX }
  355. if ((target_info.system <> system_i386_win32) or
  356. (target_info.abi=abi_old_win32_gnu)) and
  357. not ((current_procinfo.procdef.proccalloption = pocall_safecall) and
  358. (tf_safecall_exceptions in target_info.flags)) and
  359. paramanager.ret_in_param(current_procinfo.procdef.returndef,
  360. current_procinfo.procdef) then
  361. list.concat(Taicpu.Op_const(A_RET,S_W,sizeof(aint)))
  362. else
  363. list.concat(Taicpu.Op_none(A_RET,S_NO));
  364. end
  365. { ... also routines with parasize=0 }
  366. else if (parasize=0) then
  367. list.concat(Taicpu.Op_none(A_RET,S_NO))
  368. else
  369. begin
  370. { parameters are limited to 65535 bytes because ret allows only imm16 }
  371. if (parasize>65535) then
  372. CGMessage(cg_e_parasize_too_big);
  373. list.concat(Taicpu.Op_const(A_RET,S_W,parasize));
  374. end;
  375. end;
  376. procedure tcg386.g_copyvaluepara_openarray(list : TAsmList;const ref:treference;const lenloc:tlocation;elesize:tcgint;destreg:tregister);
  377. var
  378. power : longint;
  379. opsize : topsize;
  380. {$ifndef __NOWINPECOFF__}
  381. again,ok : tasmlabel;
  382. {$endif}
  383. begin
  384. { get stack space }
  385. getcpuregister(list,NR_EDI);
  386. a_load_loc_reg(list,OS_INT,lenloc,NR_EDI);
  387. list.concat(Taicpu.op_reg(A_INC,S_L,NR_EDI));
  388. { Now EDI contains (high+1). }
  389. { special case handling for elesize=8, 4 and 2:
  390. set ECX = (high+1) instead of ECX = (high+1)*elesize.
  391. In the case of elesize=4 and 2, this allows us to avoid the SHR later.
  392. In the case of elesize=8, we can later use a SHL ECX, 1 instead of
  393. SHR ECX, 2 which is one byte shorter. }
  394. if (elesize=8) or (elesize=4) or (elesize=2) then
  395. begin
  396. { Now EDI contains (high+1). Copy it to ECX for later use. }
  397. getcpuregister(list,NR_ECX);
  398. list.concat(Taicpu.op_reg_reg(A_MOV,S_L,NR_EDI,NR_ECX));
  399. end;
  400. { EDI := EDI * elesize }
  401. if (elesize<>1) then
  402. begin
  403. if ispowerof2(elesize, power) then
  404. list.concat(Taicpu.op_const_reg(A_SHL,S_L,power,NR_EDI))
  405. else
  406. list.concat(Taicpu.op_const_reg(A_IMUL,S_L,elesize,NR_EDI));
  407. end;
  408. if (elesize<>8) and (elesize<>4) and (elesize<>2) then
  409. begin
  410. { Now EDI contains (high+1)*elesize. Copy it to ECX for later use. }
  411. getcpuregister(list,NR_ECX);
  412. list.concat(Taicpu.op_reg_reg(A_MOV,S_L,NR_EDI,NR_ECX));
  413. end;
  414. {$ifndef __NOWINPECOFF__}
  415. { windows guards only a few pages for stack growing, }
  416. { so we have to access every page first }
  417. if target_info.system=system_i386_win32 then
  418. begin
  419. current_asmdata.getjumplabel(again);
  420. current_asmdata.getjumplabel(ok);
  421. a_label(list,again);
  422. list.concat(Taicpu.op_const_reg(A_CMP,S_L,winstackpagesize,NR_EDI));
  423. a_jmp_cond(list,OC_B,ok);
  424. list.concat(Taicpu.op_const_reg(A_SUB,S_L,winstackpagesize-4,NR_ESP));
  425. list.concat(Taicpu.op_reg(A_PUSH,S_L,NR_EDI));
  426. list.concat(Taicpu.op_const_reg(A_SUB,S_L,winstackpagesize,NR_EDI));
  427. a_jmp_always(list,again);
  428. a_label(list,ok);
  429. end;
  430. {$endif __NOWINPECOFF__}
  431. { If we were probing pages, EDI=(size mod pagesize) and ESP is decremented
  432. by (size div pagesize)*pagesize, otherwise EDI=size.
  433. Either way, subtracting EDI from ESP will set ESP to desired final value. }
  434. list.concat(Taicpu.op_reg_reg(A_SUB,S_L,NR_EDI,NR_ESP));
  435. { align stack on 4 bytes }
  436. list.concat(Taicpu.op_const_reg(A_AND,S_L,aint($fffffff4),NR_ESP));
  437. { load destination, don't use a_load_reg_reg, that will add a move instruction
  438. that can confuse the reg allocator }
  439. list.concat(Taicpu.Op_reg_reg(A_MOV,S_L,NR_ESP,NR_EDI));
  440. { Allocate ESI and load it with source }
  441. getcpuregister(list,NR_ESI);
  442. a_loadaddr_ref_reg(list,ref,NR_ESI);
  443. { calculate size }
  444. opsize:=S_B;
  445. if elesize=8 then
  446. begin
  447. opsize:=S_L;
  448. { ECX is number of qwords, convert to dwords }
  449. list.concat(Taicpu.op_const_reg(A_SHL,S_L,1,NR_ECX))
  450. end
  451. else if elesize=4 then
  452. begin
  453. opsize:=S_L;
  454. { ECX is already number of dwords, so no need to SHL/SHR }
  455. end
  456. else if elesize=2 then
  457. begin
  458. opsize:=S_W;
  459. { ECX is already number of words, so no need to SHL/SHR }
  460. end
  461. else
  462. if (elesize and 3)=0 then
  463. begin
  464. opsize:=S_L;
  465. { ECX is number of bytes, convert to dwords }
  466. list.concat(Taicpu.op_const_reg(A_SHR,S_L,2,NR_ECX))
  467. end
  468. else
  469. if (elesize and 1)=0 then
  470. begin
  471. opsize:=S_W;
  472. { ECX is number of bytes, convert to words }
  473. list.concat(Taicpu.op_const_reg(A_SHR,S_L,1,NR_ECX))
  474. end;
  475. if ts_cld in current_settings.targetswitches then
  476. list.concat(Taicpu.op_none(A_CLD,S_NO));
  477. list.concat(Taicpu.op_none(A_REP,S_NO));
  478. case opsize of
  479. S_B : list.concat(Taicpu.Op_none(A_MOVSB,S_NO));
  480. S_W : list.concat(Taicpu.Op_none(A_MOVSW,S_NO));
  481. S_L : list.concat(Taicpu.Op_none(A_MOVSD,S_NO));
  482. end;
  483. ungetcpuregister(list,NR_EDI);
  484. ungetcpuregister(list,NR_ECX);
  485. ungetcpuregister(list,NR_ESI);
  486. { patch the new address, but don't use a_load_reg_reg, that will add a move instruction
  487. that can confuse the reg allocator }
  488. list.concat(Taicpu.Op_reg_reg(A_MOV,S_L,NR_ESP,destreg));
  489. include(current_procinfo.flags,pi_has_stack_allocs);
  490. end;
  491. procedure tcg386.g_releasevaluepara_openarray(list : TAsmList;const l:tlocation);
  492. begin
  493. { Nothing to release }
  494. end;
  495. procedure tcg386.g_exception_reason_save(list : TAsmList; const href : treference);
  496. begin
  497. if not paramanager.use_fixed_stack then
  498. list.concat(Taicpu.op_reg(A_PUSH,tcgsize2opsize[OS_INT],NR_FUNCTION_RESULT_REG))
  499. else
  500. inherited g_exception_reason_save(list,href);
  501. end;
  502. procedure tcg386.g_exception_reason_save_const(list : TAsmList;const href : treference; a: tcgint);
  503. begin
  504. if not paramanager.use_fixed_stack then
  505. list.concat(Taicpu.op_const(A_PUSH,tcgsize2opsize[OS_INT],a))
  506. else
  507. inherited g_exception_reason_save_const(list,href,a);
  508. end;
  509. procedure tcg386.g_exception_reason_load(list : TAsmList; const href : treference);
  510. begin
  511. if not paramanager.use_fixed_stack then
  512. begin
  513. a_reg_alloc(list,NR_FUNCTION_RESULT_REG);
  514. list.concat(Taicpu.op_reg(A_POP,tcgsize2opsize[OS_INT],NR_FUNCTION_RESULT_REG))
  515. end
  516. else
  517. inherited g_exception_reason_load(list,href);
  518. end;
  519. procedure tcg386.g_maybe_got_init(list: TAsmList);
  520. var
  521. notdarwin: boolean;
  522. begin
  523. { allocate PIC register }
  524. if (cs_create_pic in current_settings.moduleswitches) and
  525. (tf_pic_uses_got in target_info.flags) and
  526. (pi_needs_got in current_procinfo.flags) then
  527. begin
  528. notdarwin:=not(target_info.system in [system_i386_darwin,system_i386_iphonesim]);
  529. { on darwin, the got register is virtual (and allocated earlier
  530. already) }
  531. if notdarwin then
  532. { ecx could be used in leaf procedures that don't use ecx to pass
  533. aparameter }
  534. current_procinfo.got:=NR_EBX;
  535. if notdarwin { needs testing before it can be enabled for non-darwin platforms
  536. and
  537. (current_settings.optimizecputype in [cpu_Pentium2,cpu_Pentium3,cpu_Pentium4]) } then
  538. begin
  539. current_module.requires_ebx_pic_helper:=true;
  540. a_call_name_static(list,'fpc_geteipasebx');
  541. end
  542. else
  543. begin
  544. { call/pop is faster than call/ret/mov on Core Solo and later
  545. according to Apple's benchmarking -- and all Intel Macs
  546. have at least a Core Solo (furthermore, the i386 - Pentium 1
  547. don't have a return stack buffer) }
  548. a_call_name_static(list,current_procinfo.CurrGOTLabel.name);
  549. a_label(list,current_procinfo.CurrGotLabel);
  550. list.concat(taicpu.op_reg(A_POP,S_L,current_procinfo.got))
  551. end;
  552. if notdarwin then
  553. begin
  554. list.concat(taicpu.op_sym_ofs_reg(A_ADD,S_L,current_asmdata.RefAsmSymbol('_GLOBAL_OFFSET_TABLE_'),0,NR_PIC_OFFSET_REG));
  555. list.concat(tai_regalloc.alloc(NR_PIC_OFFSET_REG,nil));
  556. end;
  557. end;
  558. end;
  559. procedure tcg386.g_intf_wrapper(list: TAsmList; procdef: tprocdef; const labelname: string; ioffset: longint);
  560. {
  561. possible calling conventions:
  562. default stdcall cdecl pascal register
  563. default(0): OK OK OK OK OK
  564. virtual(1): OK OK OK OK OK(2 or 1)
  565. (0):
  566. set self parameter to correct value
  567. jmp mangledname
  568. (1): The wrapper code use %ecx to reach the virtual method address
  569. set self to correct value
  570. move self,%eax
  571. mov 0(%eax),%ecx ; load vmt
  572. jmp vmtoffs(%ecx) ; method offs
  573. (2): Virtual use values pushed on stack to reach the method address
  574. so the following code be generated:
  575. set self to correct value
  576. push %ebx ; allocate space for function address
  577. push %eax
  578. mov self,%eax
  579. mov 0(%eax),%eax ; load vmt
  580. mov vmtoffs(%eax),eax ; method offs
  581. mov %eax,4(%esp)
  582. pop %eax
  583. ret 0; jmp the address
  584. }
  585. { returns whether ECX is used (either as a parameter or is nonvolatile and shouldn't be changed) }
  586. function is_ecx_used: boolean;
  587. var
  588. i: Integer;
  589. hp: tparavarsym;
  590. paraloc: PCGParaLocation;
  591. begin
  592. if not (RS_ECX in paramanager.get_volatile_registers_int(procdef.proccalloption)) then
  593. exit(true);
  594. for i:=0 to procdef.paras.count-1 do
  595. begin
  596. hp:=tparavarsym(procdef.paras[i]);
  597. procdef.init_paraloc_info(calleeside);
  598. paraloc:=hp.paraloc[calleeside].Location;
  599. while paraloc<>nil do
  600. begin
  601. if (paraloc^.Loc=LOC_REGISTER) and (getsupreg(paraloc^.register)=RS_ECX) then
  602. exit(true);
  603. paraloc:=paraloc^.Next;
  604. end;
  605. end;
  606. Result:=false;
  607. end;
  608. procedure getselftoeax(offs: longint);
  609. var
  610. href : treference;
  611. selfoffsetfromsp : longint;
  612. begin
  613. { mov offset(%esp),%eax }
  614. if (procdef.proccalloption<>pocall_register) then
  615. begin
  616. { framepointer is pushed for nested procs }
  617. if procdef.parast.symtablelevel>normal_function_level then
  618. selfoffsetfromsp:=2*sizeof(aint)
  619. else
  620. selfoffsetfromsp:=sizeof(aint);
  621. reference_reset_base(href,NR_ESP,selfoffsetfromsp+offs,4);
  622. a_load_ref_reg(list,OS_ADDR,OS_ADDR,href,NR_EAX);
  623. end;
  624. end;
  625. procedure loadvmtto(reg: tregister);
  626. var
  627. href : treference;
  628. begin
  629. { mov 0(%eax),%reg ; load vmt}
  630. reference_reset_base(href,NR_EAX,0,4);
  631. a_load_ref_reg(list,OS_ADDR,OS_ADDR,href,reg);
  632. end;
  633. procedure op_onregmethodaddr(op: TAsmOp; reg: tregister);
  634. var
  635. href : treference;
  636. begin
  637. if (procdef.extnumber=$ffff) then
  638. Internalerror(200006139);
  639. { call/jmp vmtoffs(%reg) ; method offs }
  640. reference_reset_base(href,reg,tobjectdef(procdef.struct).vmtmethodoffset(procdef.extnumber),4);
  641. list.concat(taicpu.op_ref(op,S_L,href));
  642. end;
  643. procedure loadmethodoffstoeax;
  644. var
  645. href : treference;
  646. begin
  647. if (procdef.extnumber=$ffff) then
  648. Internalerror(200006139);
  649. { mov vmtoffs(%eax),%eax ; method offs }
  650. reference_reset_base(href,NR_EAX,tobjectdef(procdef.struct).vmtmethodoffset(procdef.extnumber),4);
  651. a_load_ref_reg(list,OS_ADDR,OS_ADDR,href,NR_EAX);
  652. end;
  653. var
  654. lab : tasmsymbol;
  655. make_global : boolean;
  656. href : treference;
  657. begin
  658. if not(procdef.proctypeoption in [potype_function,potype_procedure]) then
  659. Internalerror(200006137);
  660. if not assigned(procdef.struct) or
  661. (procdef.procoptions*[po_classmethod, po_staticmethod,
  662. po_methodpointer, po_interrupt, po_iocheck]<>[]) then
  663. Internalerror(200006138);
  664. if procdef.owner.symtabletype<>ObjectSymtable then
  665. Internalerror(200109191);
  666. make_global:=false;
  667. if (not current_module.is_unit) or
  668. create_smartlink or
  669. (procdef.owner.defowner.owner.symtabletype=globalsymtable) then
  670. make_global:=true;
  671. if make_global then
  672. List.concat(Tai_symbol.Createname_global(labelname,AT_FUNCTION,0))
  673. else
  674. List.concat(Tai_symbol.Createname(labelname,AT_FUNCTION,0));
  675. { set param1 interface to self }
  676. g_adjust_self_value(list,procdef,ioffset);
  677. if (po_virtualmethod in procdef.procoptions) and
  678. not is_objectpascal_helper(procdef.struct) then
  679. begin
  680. if (procdef.proccalloption=pocall_register) and is_ecx_used then
  681. begin
  682. { case 2 }
  683. list.concat(taicpu.op_reg(A_PUSH,S_L,NR_EBX)); { allocate space for address}
  684. list.concat(taicpu.op_reg(A_PUSH,S_L,NR_EAX));
  685. getselftoeax(8);
  686. loadvmtto(NR_EAX);
  687. loadmethodoffstoeax;
  688. { mov %eax,4(%esp) }
  689. reference_reset_base(href,NR_ESP,4,4);
  690. list.concat(taicpu.op_reg_ref(A_MOV,S_L,NR_EAX,href));
  691. { pop %eax }
  692. list.concat(taicpu.op_reg(A_POP,S_L,NR_EAX));
  693. { ret ; jump to the address }
  694. list.concat(taicpu.op_none(A_RET,S_L));
  695. end
  696. else
  697. begin
  698. { case 1 }
  699. getselftoeax(0);
  700. loadvmtto(NR_ECX);
  701. op_onregmethodaddr(A_JMP,NR_ECX);
  702. end;
  703. end
  704. { case 0 }
  705. else
  706. begin
  707. if (target_info.system <> system_i386_darwin) then
  708. begin
  709. lab:=current_asmdata.RefAsmSymbol(procdef.mangledname);
  710. list.concat(taicpu.op_sym(A_JMP,S_NO,lab))
  711. end
  712. else
  713. list.concat(taicpu.op_sym(A_JMP,S_NO,get_darwin_call_stub(procdef.mangledname,false)))
  714. end;
  715. List.concat(Tai_symbol_end.Createname(labelname));
  716. end;
  717. { ************* 64bit operations ************ }
  718. procedure tcg64f386.get_64bit_ops(op:TOpCG;var op1,op2:TAsmOp);
  719. begin
  720. case op of
  721. OP_ADD :
  722. begin
  723. op1:=A_ADD;
  724. op2:=A_ADC;
  725. end;
  726. OP_SUB :
  727. begin
  728. op1:=A_SUB;
  729. op2:=A_SBB;
  730. end;
  731. OP_XOR :
  732. begin
  733. op1:=A_XOR;
  734. op2:=A_XOR;
  735. end;
  736. OP_OR :
  737. begin
  738. op1:=A_OR;
  739. op2:=A_OR;
  740. end;
  741. OP_AND :
  742. begin
  743. op1:=A_AND;
  744. op2:=A_AND;
  745. end;
  746. else
  747. internalerror(200203241);
  748. end;
  749. end;
  750. procedure tcg64f386.a_op64_ref_reg(list : TAsmList;op:TOpCG;size : tcgsize;const ref : treference;reg : tregister64);
  751. var
  752. op1,op2 : TAsmOp;
  753. tempref : treference;
  754. begin
  755. if not(op in [OP_NEG,OP_NOT]) then
  756. begin
  757. get_64bit_ops(op,op1,op2);
  758. tempref:=ref;
  759. tcgx86(cg).make_simple_ref(list,tempref);
  760. list.concat(taicpu.op_ref_reg(op1,S_L,tempref,reg.reglo));
  761. inc(tempref.offset,4);
  762. list.concat(taicpu.op_ref_reg(op2,S_L,tempref,reg.reghi));
  763. end
  764. else
  765. begin
  766. a_load64_ref_reg(list,ref,reg);
  767. a_op64_reg_reg(list,op,size,reg,reg);
  768. end;
  769. end;
  770. procedure tcg64f386.a_op64_reg_reg(list : TAsmList;op:TOpCG;size : tcgsize;regsrc,regdst : tregister64);
  771. var
  772. op1,op2 : TAsmOp;
  773. begin
  774. case op of
  775. OP_NEG :
  776. begin
  777. if (regsrc.reglo<>regdst.reglo) then
  778. a_load64_reg_reg(list,regsrc,regdst);
  779. list.concat(taicpu.op_reg(A_NOT,S_L,regdst.reghi));
  780. list.concat(taicpu.op_reg(A_NEG,S_L,regdst.reglo));
  781. list.concat(taicpu.op_const_reg(A_SBB,S_L,-1,regdst.reghi));
  782. exit;
  783. end;
  784. OP_NOT :
  785. begin
  786. if (regsrc.reglo<>regdst.reglo) then
  787. a_load64_reg_reg(list,regsrc,regdst);
  788. list.concat(taicpu.op_reg(A_NOT,S_L,regdst.reghi));
  789. list.concat(taicpu.op_reg(A_NOT,S_L,regdst.reglo));
  790. exit;
  791. end;
  792. end;
  793. get_64bit_ops(op,op1,op2);
  794. list.concat(taicpu.op_reg_reg(op1,S_L,regsrc.reglo,regdst.reglo));
  795. list.concat(taicpu.op_reg_reg(op2,S_L,regsrc.reghi,regdst.reghi));
  796. end;
  797. procedure tcg64f386.a_op64_const_reg(list : TAsmList;op:TOpCG;size : tcgsize;value : int64;reg : tregister64);
  798. var
  799. op1,op2 : TAsmOp;
  800. begin
  801. case op of
  802. OP_AND,OP_OR,OP_XOR:
  803. begin
  804. cg.a_op_const_reg(list,op,OS_32,tcgint(lo(value)),reg.reglo);
  805. cg.a_op_const_reg(list,op,OS_32,tcgint(hi(value)),reg.reghi);
  806. end;
  807. OP_ADD, OP_SUB:
  808. begin
  809. // can't use a_op_const_ref because this may use dec/inc
  810. get_64bit_ops(op,op1,op2);
  811. list.concat(taicpu.op_const_reg(op1,S_L,aint(lo(value)),reg.reglo));
  812. list.concat(taicpu.op_const_reg(op2,S_L,aint(hi(value)),reg.reghi));
  813. end;
  814. else
  815. internalerror(200204021);
  816. end;
  817. end;
  818. procedure tcg64f386.a_op64_const_ref(list : TAsmList;op:TOpCG;size : tcgsize;value : int64;const ref : treference);
  819. var
  820. op1,op2 : TAsmOp;
  821. tempref : treference;
  822. begin
  823. tempref:=ref;
  824. tcgx86(cg).make_simple_ref(list,tempref);
  825. case op of
  826. OP_AND,OP_OR,OP_XOR:
  827. begin
  828. cg.a_op_const_ref(list,op,OS_32,tcgint(lo(value)),tempref);
  829. inc(tempref.offset,4);
  830. cg.a_op_const_ref(list,op,OS_32,tcgint(hi(value)),tempref);
  831. end;
  832. OP_ADD, OP_SUB:
  833. begin
  834. get_64bit_ops(op,op1,op2);
  835. // can't use a_op_const_ref because this may use dec/inc
  836. list.concat(taicpu.op_const_ref(op1,S_L,aint(lo(value)),tempref));
  837. inc(tempref.offset,4);
  838. list.concat(taicpu.op_const_ref(op2,S_L,aint(hi(value)),tempref));
  839. end;
  840. else
  841. internalerror(200204022);
  842. end;
  843. end;
  844. procedure create_codegen;
  845. begin
  846. cg := tcg386.create;
  847. cg64 := tcg64f386.create;
  848. end;
  849. end.