cgcpu.pas 36 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933
  1. {
  2. Copyright (c) 1998-2002 by Florian Klaempfl
  3. This unit implements the code generator for the i386
  4. This program is free software; you can redistribute it and/or modify
  5. it under the terms of the GNU General Public License as published by
  6. the Free Software Foundation; either version 2 of the License, or
  7. (at your option) any later version.
  8. This program is distributed in the hope that it will be useful,
  9. but WITHOUT ANY WARRANTY; without even the implied warranty of
  10. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  11. GNU General Public License for more details.
  12. You should have received a copy of the GNU General Public License
  13. along with this program; if not, write to the Free Software
  14. Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
  15. ****************************************************************************
  16. }
  17. unit cgcpu;
  18. {$i fpcdefs.inc}
  19. interface
  20. uses
  21. globtype,
  22. cgbase,cgobj,cg64f32,cgx86,
  23. aasmbase,aasmtai,aasmdata,aasmcpu,
  24. cpubase,parabase,cgutils,
  25. symconst,symdef,symsym
  26. ;
  27. type
  28. tcg386 = class(tcgx86)
  29. procedure init_register_allocators;override;
  30. procedure do_register_allocation(list:TAsmList;headertai:tai);override;
  31. { passing parameter using push instead of mov }
  32. procedure a_load_reg_cgpara(list : TAsmList;size : tcgsize;r : tregister;const cgpara : tcgpara);override;
  33. procedure a_load_const_cgpara(list : TAsmList;size : tcgsize;a : tcgint;const cgpara : tcgpara);override;
  34. procedure a_load_ref_cgpara(list : TAsmList;size : tcgsize;const r : treference;const cgpara : tcgpara);override;
  35. procedure a_loadaddr_ref_cgpara(list : TAsmList;const r : treference;const cgpara : tcgpara);override;
  36. procedure g_proc_exit(list : TAsmList;parasize:longint;nostackframe:boolean);override;
  37. procedure g_copyvaluepara_openarray(list : TAsmList;const ref:treference;const lenloc:tlocation;elesize:tcgint;destreg:tregister);
  38. procedure g_releasevaluepara_openarray(list : TAsmList;const l:tlocation);
  39. procedure g_exception_reason_save(list : TAsmList; const href : treference);override;
  40. procedure g_exception_reason_save_const(list : TAsmList; const href : treference; a: tcgint);override;
  41. procedure g_exception_reason_load(list : TAsmList; const href : treference);override;
  42. procedure g_intf_wrapper(list: TAsmList; procdef: tprocdef; const labelname: string; ioffset: longint);override;
  43. procedure g_maybe_got_init(list: TAsmList); override;
  44. end;
  45. tcg64f386 = class(tcg64f32)
  46. procedure a_op64_ref_reg(list : TAsmList;op:TOpCG;size : tcgsize;const ref : treference;reg : tregister64);override;
  47. procedure a_op64_reg_reg(list : TAsmList;op:TOpCG;size : tcgsize;regsrc,regdst : tregister64);override;
  48. procedure a_op64_const_reg(list : TAsmList;op:TOpCG;size : tcgsize;value : int64;reg : tregister64);override;
  49. procedure a_op64_const_ref(list : TAsmList;op:TOpCG;size : tcgsize;value : int64;const ref : treference);override;
  50. private
  51. procedure get_64bit_ops(op:TOpCG;var op1,op2:TAsmOp);
  52. end;
  53. procedure create_codegen;
  54. implementation
  55. uses
  56. globals,verbose,systems,cutils,
  57. paramgr,procinfo,fmodule,
  58. rgcpu,rgx86,cpuinfo;
  59. function use_push(const cgpara:tcgpara):boolean;
  60. begin
  61. result:=(not paramanager.use_fixed_stack) and
  62. assigned(cgpara.location) and
  63. (cgpara.location^.loc=LOC_REFERENCE) and
  64. (cgpara.location^.reference.index=NR_STACK_POINTER_REG);
  65. end;
  66. procedure tcg386.init_register_allocators;
  67. begin
  68. inherited init_register_allocators;
  69. if not(target_info.system in [system_i386_darwin,system_i386_iphonesim]) and
  70. (cs_create_pic in current_settings.moduleswitches) then
  71. rg[R_INTREGISTER]:=trgcpu.create(R_INTREGISTER,R_SUBWHOLE,[RS_EAX,RS_EDX,RS_ECX,RS_ESI,RS_EDI],first_int_imreg,[RS_EBP])
  72. else
  73. if (cs_useebp in current_settings.optimizerswitches) and assigned(current_procinfo) and (current_procinfo.framepointer<>NR_EBP) then
  74. rg[R_INTREGISTER]:=trgcpu.create(R_INTREGISTER,R_SUBWHOLE,[RS_EAX,RS_EDX,RS_ECX,RS_EBX,RS_ESI,RS_EDI,RS_EBP],first_int_imreg,[])
  75. else
  76. rg[R_INTREGISTER]:=trgcpu.create(R_INTREGISTER,R_SUBWHOLE,[RS_EAX,RS_EDX,RS_ECX,RS_EBX,RS_ESI,RS_EDI],first_int_imreg,[RS_EBP]);
  77. rg[R_MMXREGISTER]:=trgcpu.create(R_MMXREGISTER,R_SUBNONE,[RS_XMM0,RS_XMM1,RS_XMM2,RS_XMM3,RS_XMM4,RS_XMM5,RS_XMM6,RS_XMM7],first_mm_imreg,[]);
  78. rg[R_MMREGISTER]:=trgcpu.create(R_MMREGISTER,R_SUBWHOLE,[RS_XMM0,RS_XMM1,RS_XMM2,RS_XMM3,RS_XMM4,RS_XMM5,RS_XMM6,RS_XMM7],first_mm_imreg,[]);
  79. rgfpu:=Trgx86fpu.create;
  80. end;
  81. procedure tcg386.do_register_allocation(list:TAsmList;headertai:tai);
  82. begin
  83. if (pi_needs_got in current_procinfo.flags) then
  84. begin
  85. if getsupreg(current_procinfo.got) < first_int_imreg then
  86. include(rg[R_INTREGISTER].used_in_proc,getsupreg(current_procinfo.got));
  87. end;
  88. inherited do_register_allocation(list,headertai);
  89. end;
  90. procedure tcg386.a_load_reg_cgpara(list : TAsmList;size : tcgsize;r : tregister;const cgpara : tcgpara);
  91. var
  92. pushsize : tcgsize;
  93. begin
  94. check_register_size(size,r);
  95. if use_push(cgpara) then
  96. begin
  97. cgpara.check_simple_location;
  98. if tcgsize2size[cgpara.location^.size]>cgpara.alignment then
  99. pushsize:=cgpara.location^.size
  100. else
  101. pushsize:=int_cgsize(cgpara.alignment);
  102. list.concat(taicpu.op_reg(A_PUSH,tcgsize2opsize[pushsize],makeregsize(list,r,pushsize)));
  103. end
  104. else
  105. inherited a_load_reg_cgpara(list,size,r,cgpara);
  106. end;
  107. procedure tcg386.a_load_const_cgpara(list : TAsmList;size : tcgsize;a : tcgint;const cgpara : tcgpara);
  108. var
  109. pushsize : tcgsize;
  110. begin
  111. if use_push(cgpara) then
  112. begin
  113. cgpara.check_simple_location;
  114. if tcgsize2size[cgpara.location^.size]>cgpara.alignment then
  115. pushsize:=cgpara.location^.size
  116. else
  117. pushsize:=int_cgsize(cgpara.alignment);
  118. list.concat(taicpu.op_const(A_PUSH,tcgsize2opsize[pushsize],a));
  119. end
  120. else
  121. inherited a_load_const_cgpara(list,size,a,cgpara);
  122. end;
  123. procedure tcg386.a_load_ref_cgpara(list : TAsmList;size : tcgsize;const r : treference;const cgpara : tcgpara);
  124. procedure pushdata(paraloc:pcgparalocation;ofs:tcgint);
  125. var
  126. pushsize : tcgsize;
  127. opsize : topsize;
  128. tmpreg : tregister;
  129. href : treference;
  130. begin
  131. if not assigned(paraloc) then
  132. exit;
  133. if (paraloc^.loc<>LOC_REFERENCE) or
  134. (paraloc^.reference.index<>NR_STACK_POINTER_REG) or
  135. (tcgsize2size[paraloc^.size]>sizeof(aint)) then
  136. internalerror(200501162);
  137. { Pushes are needed in reverse order, add the size of the
  138. current location to the offset where to load from. This
  139. prevents wrong calculations for the last location when
  140. the size is not a power of 2 }
  141. if assigned(paraloc^.next) then
  142. pushdata(paraloc^.next,ofs+tcgsize2size[paraloc^.size]);
  143. { Push the data starting at ofs }
  144. href:=r;
  145. inc(href.offset,ofs);
  146. if tcgsize2size[paraloc^.size]>cgpara.alignment then
  147. pushsize:=paraloc^.size
  148. else
  149. pushsize:=int_cgsize(cgpara.alignment);
  150. opsize:=TCgsize2opsize[pushsize];
  151. { for go32v2 we obtain OS_F32,
  152. but pushs is not valid, we need pushl }
  153. if opsize=S_FS then
  154. opsize:=S_L;
  155. if tcgsize2size[paraloc^.size]<cgpara.alignment then
  156. begin
  157. tmpreg:=getintregister(list,pushsize);
  158. a_load_ref_reg(list,paraloc^.size,pushsize,href,tmpreg);
  159. list.concat(taicpu.op_reg(A_PUSH,opsize,tmpreg));
  160. end
  161. else
  162. begin
  163. make_simple_ref(list,href);
  164. list.concat(taicpu.op_ref(A_PUSH,opsize,href));
  165. end;
  166. end;
  167. var
  168. len : tcgint;
  169. href : treference;
  170. begin
  171. { cgpara.size=OS_NO requires a copy on the stack }
  172. if use_push(cgpara) then
  173. begin
  174. { Record copy? }
  175. if (cgpara.size in [OS_NO,OS_F64]) or (size=OS_NO) then
  176. begin
  177. cgpara.check_simple_location;
  178. len:=align(cgpara.intsize,cgpara.alignment);
  179. g_stackpointer_alloc(list,len);
  180. reference_reset_base(href,NR_STACK_POINTER_REG,0,4);
  181. g_concatcopy(list,r,href,len);
  182. end
  183. else
  184. begin
  185. if tcgsize2size[cgpara.size]<>tcgsize2size[size] then
  186. internalerror(200501161);
  187. { We need to push the data in reverse order,
  188. therefor we use a recursive algorithm }
  189. pushdata(cgpara.location,0);
  190. end
  191. end
  192. else
  193. inherited a_load_ref_cgpara(list,size,r,cgpara);
  194. end;
  195. procedure tcg386.a_loadaddr_ref_cgpara(list : TAsmList;const r : treference;const cgpara : tcgpara);
  196. var
  197. tmpreg : tregister;
  198. opsize : topsize;
  199. tmpref : treference;
  200. begin
  201. with r do
  202. begin
  203. if use_push(cgpara) then
  204. begin
  205. cgpara.check_simple_location;
  206. opsize:=tcgsize2opsize[OS_ADDR];
  207. if (segment=NR_NO) and (base=NR_NO) and (index=NR_NO) then
  208. begin
  209. if assigned(symbol) then
  210. begin
  211. if (target_info.system in [system_i386_darwin,system_i386_iphonesim]) and
  212. ((r.symbol.bind in [AB_EXTERNAL,AB_WEAK_EXTERNAL]) or
  213. (cs_create_pic in current_settings.moduleswitches)) then
  214. begin
  215. tmpreg:=getaddressregister(list);
  216. a_loadaddr_ref_reg(list,r,tmpreg);
  217. list.concat(taicpu.op_reg(A_PUSH,opsize,tmpreg));
  218. end
  219. else if cs_create_pic in current_settings.moduleswitches then
  220. begin
  221. if offset<>0 then
  222. begin
  223. tmpreg:=getaddressregister(list);
  224. a_loadaddr_ref_reg(list,r,tmpreg);
  225. list.concat(taicpu.op_reg(A_PUSH,opsize,tmpreg));
  226. end
  227. else
  228. begin
  229. reference_reset_symbol(tmpref,r.symbol,0,r.alignment);
  230. tmpref.refaddr:=addr_pic;
  231. tmpref.base:=current_procinfo.got;
  232. {$ifdef EXTDEBUG}
  233. if not (pi_needs_got in current_procinfo.flags) then
  234. Comment(V_warning,'pi_needs_got not included');
  235. {$endif EXTDEBUG}
  236. include(current_procinfo.flags,pi_needs_got);
  237. list.concat(taicpu.op_ref(A_PUSH,S_L,tmpref));
  238. end
  239. end
  240. else
  241. list.concat(Taicpu.Op_sym_ofs(A_PUSH,opsize,symbol,offset));
  242. end
  243. else
  244. list.concat(Taicpu.Op_const(A_PUSH,opsize,offset));
  245. end
  246. else if (segment=NR_NO) and (base=NR_NO) and (index<>NR_NO) and
  247. (offset=0) and (scalefactor=0) and (symbol=nil) then
  248. list.concat(Taicpu.Op_reg(A_PUSH,opsize,index))
  249. else if (segment=NR_NO) and (base<>NR_NO) and (index=NR_NO) and
  250. (offset=0) and (symbol=nil) then
  251. list.concat(Taicpu.Op_reg(A_PUSH,opsize,base))
  252. else
  253. begin
  254. tmpreg:=getaddressregister(list);
  255. a_loadaddr_ref_reg(list,r,tmpreg);
  256. list.concat(taicpu.op_reg(A_PUSH,opsize,tmpreg));
  257. end;
  258. end
  259. else
  260. inherited a_loadaddr_ref_cgpara(list,r,cgpara);
  261. end;
  262. end;
  263. procedure tcg386.g_proc_exit(list : TAsmList;parasize:longint;nostackframe:boolean);
  264. procedure increase_fp(a : tcgint);
  265. var
  266. href : treference;
  267. begin
  268. reference_reset_base(href,current_procinfo.framepointer,a,0);
  269. { normally, lea is a better choice than an add }
  270. list.concat(Taicpu.op_ref_reg(A_LEA,TCGSize2OpSize[OS_ADDR],href,current_procinfo.framepointer));
  271. end;
  272. var
  273. stacksize : longint;
  274. begin
  275. { MMX needs to call EMMS }
  276. if assigned(rg[R_MMXREGISTER]) and
  277. (rg[R_MMXREGISTER].uses_registers) then
  278. list.concat(Taicpu.op_none(A_EMMS,S_NO));
  279. { remove stackframe }
  280. if not nostackframe then
  281. begin
  282. if current_procinfo.framepointer=NR_STACK_POINTER_REG then
  283. begin
  284. stacksize:=current_procinfo.calc_stackframe_size;
  285. if (target_info.stackalign>4) and
  286. ((stacksize <> 0) or
  287. (pi_do_call in current_procinfo.flags) or
  288. { can't detect if a call in this case -> use nostackframe }
  289. { if you (think you) know what you are doing }
  290. (po_assembler in current_procinfo.procdef.procoptions)) then
  291. stacksize := align(stacksize+sizeof(aint),target_info.stackalign) - sizeof(aint);
  292. if stacksize<>0 then
  293. increase_fp(stacksize);
  294. if (not paramanager.use_fixed_stack) then
  295. internal_restore_regs(list,true);
  296. end
  297. else
  298. begin
  299. if (not paramanager.use_fixed_stack) then
  300. internal_restore_regs(list,not (pi_has_stack_allocs in current_procinfo.flags));
  301. list.concat(Taicpu.op_none(A_LEAVE,S_NO));
  302. end;
  303. list.concat(tai_regalloc.dealloc(current_procinfo.framepointer,nil));
  304. end;
  305. { return from proc }
  306. if (po_interrupt in current_procinfo.procdef.procoptions) and
  307. { this messes up stack alignment }
  308. (target_info.stackalign=4) then
  309. begin
  310. if assigned(current_procinfo.procdef.funcretloc[calleeside].location) and
  311. (current_procinfo.procdef.funcretloc[calleeside].location^.loc=LOC_REGISTER) then
  312. begin
  313. if (getsupreg(current_procinfo.procdef.funcretloc[calleeside].location^.register)=RS_EAX) then
  314. list.concat(Taicpu.Op_const_reg(A_ADD,S_L,4,NR_ESP))
  315. else
  316. internalerror(2010053001);
  317. end
  318. else
  319. list.concat(Taicpu.Op_reg(A_POP,S_L,NR_EAX));
  320. list.concat(Taicpu.Op_reg(A_POP,S_L,NR_EBX));
  321. list.concat(Taicpu.Op_reg(A_POP,S_L,NR_ECX));
  322. if (current_procinfo.procdef.funcretloc[calleeside].size in [OS_64,OS_S64]) and
  323. assigned(current_procinfo.procdef.funcretloc[calleeside].location) and
  324. assigned(current_procinfo.procdef.funcretloc[calleeside].location^.next) and
  325. (current_procinfo.procdef.funcretloc[calleeside].location^.next^.loc=LOC_REGISTER) then
  326. begin
  327. if (getsupreg(current_procinfo.procdef.funcretloc[calleeside].location^.next^.register)=RS_EDX) then
  328. list.concat(Taicpu.Op_const_reg(A_ADD,S_L,4,NR_ESP))
  329. else
  330. internalerror(2010053002);
  331. end
  332. else
  333. list.concat(Taicpu.Op_reg(A_POP,S_L,NR_EDX));
  334. list.concat(Taicpu.Op_reg(A_POP,S_L,NR_ESI));
  335. list.concat(Taicpu.Op_reg(A_POP,S_L,NR_EDI));
  336. { .... also the segment registers }
  337. list.concat(Taicpu.Op_reg(A_POP,S_W,NR_DS));
  338. list.concat(Taicpu.Op_reg(A_POP,S_W,NR_ES));
  339. list.concat(Taicpu.Op_reg(A_POP,S_W,NR_FS));
  340. list.concat(Taicpu.Op_reg(A_POP,S_W,NR_GS));
  341. { this restores the flags }
  342. list.concat(Taicpu.Op_none(A_IRET,S_NO));
  343. end
  344. { Routines with the poclearstack flag set use only a ret }
  345. else if (current_procinfo.procdef.proccalloption in clearstack_pocalls) and
  346. (not paramanager.use_fixed_stack) then
  347. begin
  348. { complex return values are removed from stack in C code PM }
  349. { but not on win32 }
  350. { and not for safecall with hidden exceptions, because the result }
  351. { wich contains the exception is passed in EAX }
  352. if (target_info.system <> system_i386_win32) and
  353. not ((current_procinfo.procdef.proccalloption = pocall_safecall) and
  354. (tf_safecall_exceptions in target_info.flags)) and
  355. paramanager.ret_in_param(current_procinfo.procdef.returndef,
  356. current_procinfo.procdef) then
  357. list.concat(Taicpu.Op_const(A_RET,S_W,sizeof(aint)))
  358. else
  359. list.concat(Taicpu.Op_none(A_RET,S_NO));
  360. end
  361. { ... also routines with parasize=0 }
  362. else if (parasize=0) then
  363. list.concat(Taicpu.Op_none(A_RET,S_NO))
  364. else
  365. begin
  366. { parameters are limited to 65535 bytes because ret allows only imm16 }
  367. if (parasize>65535) then
  368. CGMessage(cg_e_parasize_too_big);
  369. list.concat(Taicpu.Op_const(A_RET,S_W,parasize));
  370. end;
  371. end;
  372. procedure tcg386.g_copyvaluepara_openarray(list : TAsmList;const ref:treference;const lenloc:tlocation;elesize:tcgint;destreg:tregister);
  373. var
  374. power : longint;
  375. opsize : topsize;
  376. {$ifndef __NOWINPECOFF__}
  377. again,ok : tasmlabel;
  378. {$endif}
  379. begin
  380. { get stack space }
  381. getcpuregister(list,NR_EDI);
  382. a_load_loc_reg(list,OS_INT,lenloc,NR_EDI);
  383. list.concat(Taicpu.op_reg(A_INC,S_L,NR_EDI));
  384. { Now EDI contains (high+1). }
  385. { special case handling for elesize=8, 4 and 2:
  386. set ECX = (high+1) instead of ECX = (high+1)*elesize.
  387. In the case of elesize=4 and 2, this allows us to avoid the SHR later.
  388. In the case of elesize=8, we can later use a SHL ECX, 1 instead of
  389. SHR ECX, 2 which is one byte shorter. }
  390. if (elesize=8) or (elesize=4) or (elesize=2) then
  391. begin
  392. { Now EDI contains (high+1). Copy it to ECX for later use. }
  393. getcpuregister(list,NR_ECX);
  394. list.concat(Taicpu.op_reg_reg(A_MOV,S_L,NR_EDI,NR_ECX));
  395. end;
  396. { EDI := EDI * elesize }
  397. if (elesize<>1) then
  398. begin
  399. if ispowerof2(elesize, power) then
  400. list.concat(Taicpu.op_const_reg(A_SHL,S_L,power,NR_EDI))
  401. else
  402. list.concat(Taicpu.op_const_reg(A_IMUL,S_L,elesize,NR_EDI));
  403. end;
  404. if (elesize<>8) and (elesize<>4) and (elesize<>2) then
  405. begin
  406. { Now EDI contains (high+1)*elesize. Copy it to ECX for later use. }
  407. getcpuregister(list,NR_ECX);
  408. list.concat(Taicpu.op_reg_reg(A_MOV,S_L,NR_EDI,NR_ECX));
  409. end;
  410. {$ifndef __NOWINPECOFF__}
  411. { windows guards only a few pages for stack growing, }
  412. { so we have to access every page first }
  413. if target_info.system=system_i386_win32 then
  414. begin
  415. current_asmdata.getjumplabel(again);
  416. current_asmdata.getjumplabel(ok);
  417. a_label(list,again);
  418. list.concat(Taicpu.op_const_reg(A_CMP,S_L,winstackpagesize,NR_EDI));
  419. a_jmp_cond(list,OC_B,ok);
  420. list.concat(Taicpu.op_const_reg(A_SUB,S_L,winstackpagesize-4,NR_ESP));
  421. list.concat(Taicpu.op_reg(A_PUSH,S_L,NR_EDI));
  422. list.concat(Taicpu.op_const_reg(A_SUB,S_L,winstackpagesize,NR_EDI));
  423. a_jmp_always(list,again);
  424. a_label(list,ok);
  425. end;
  426. {$endif __NOWINPECOFF__}
  427. { If we were probing pages, EDI=(size mod pagesize) and ESP is decremented
  428. by (size div pagesize)*pagesize, otherwise EDI=size.
  429. Either way, subtracting EDI from ESP will set ESP to desired final value. }
  430. list.concat(Taicpu.op_reg_reg(A_SUB,S_L,NR_EDI,NR_ESP));
  431. { align stack on 4 bytes }
  432. list.concat(Taicpu.op_const_reg(A_AND,S_L,aint($fffffff4),NR_ESP));
  433. { load destination, don't use a_load_reg_reg, that will add a move instruction
  434. that can confuse the reg allocator }
  435. list.concat(Taicpu.Op_reg_reg(A_MOV,S_L,NR_ESP,NR_EDI));
  436. { Allocate ESI and load it with source }
  437. getcpuregister(list,NR_ESI);
  438. a_loadaddr_ref_reg(list,ref,NR_ESI);
  439. { calculate size }
  440. opsize:=S_B;
  441. if elesize=8 then
  442. begin
  443. opsize:=S_L;
  444. { ECX is number of qwords, convert to dwords }
  445. list.concat(Taicpu.op_const_reg(A_SHL,S_L,1,NR_ECX))
  446. end
  447. else if elesize=4 then
  448. begin
  449. opsize:=S_L;
  450. { ECX is already number of dwords, so no need to SHL/SHR }
  451. end
  452. else if elesize=2 then
  453. begin
  454. opsize:=S_W;
  455. { ECX is already number of words, so no need to SHL/SHR }
  456. end
  457. else
  458. if (elesize and 3)=0 then
  459. begin
  460. opsize:=S_L;
  461. { ECX is number of bytes, convert to dwords }
  462. list.concat(Taicpu.op_const_reg(A_SHR,S_L,2,NR_ECX))
  463. end
  464. else
  465. if (elesize and 1)=0 then
  466. begin
  467. opsize:=S_W;
  468. { ECX is number of bytes, convert to words }
  469. list.concat(Taicpu.op_const_reg(A_SHR,S_L,1,NR_ECX))
  470. end;
  471. if ts_cld in current_settings.targetswitches then
  472. list.concat(Taicpu.op_none(A_CLD,S_NO));
  473. list.concat(Taicpu.op_none(A_REP,S_NO));
  474. case opsize of
  475. S_B : list.concat(Taicpu.Op_none(A_MOVSB,S_NO));
  476. S_W : list.concat(Taicpu.Op_none(A_MOVSW,S_NO));
  477. S_L : list.concat(Taicpu.Op_none(A_MOVSD,S_NO));
  478. end;
  479. ungetcpuregister(list,NR_EDI);
  480. ungetcpuregister(list,NR_ECX);
  481. ungetcpuregister(list,NR_ESI);
  482. { patch the new address, but don't use a_load_reg_reg, that will add a move instruction
  483. that can confuse the reg allocator }
  484. list.concat(Taicpu.Op_reg_reg(A_MOV,S_L,NR_ESP,destreg));
  485. include(current_procinfo.flags,pi_has_stack_allocs);
  486. end;
  487. procedure tcg386.g_releasevaluepara_openarray(list : TAsmList;const l:tlocation);
  488. begin
  489. { Nothing to release }
  490. end;
  491. procedure tcg386.g_exception_reason_save(list : TAsmList; const href : treference);
  492. begin
  493. if not paramanager.use_fixed_stack then
  494. list.concat(Taicpu.op_reg(A_PUSH,tcgsize2opsize[OS_INT],NR_FUNCTION_RESULT_REG))
  495. else
  496. inherited g_exception_reason_save(list,href);
  497. end;
  498. procedure tcg386.g_exception_reason_save_const(list : TAsmList;const href : treference; a: tcgint);
  499. begin
  500. if not paramanager.use_fixed_stack then
  501. list.concat(Taicpu.op_const(A_PUSH,tcgsize2opsize[OS_INT],a))
  502. else
  503. inherited g_exception_reason_save_const(list,href,a);
  504. end;
  505. procedure tcg386.g_exception_reason_load(list : TAsmList; const href : treference);
  506. begin
  507. if not paramanager.use_fixed_stack then
  508. begin
  509. cg.a_reg_alloc(list,NR_FUNCTION_RESULT_REG);
  510. list.concat(Taicpu.op_reg(A_POP,tcgsize2opsize[OS_INT],NR_FUNCTION_RESULT_REG))
  511. end
  512. else
  513. inherited g_exception_reason_load(list,href);
  514. end;
  515. procedure tcg386.g_maybe_got_init(list: TAsmList);
  516. var
  517. notdarwin: boolean;
  518. begin
  519. { allocate PIC register }
  520. if (cs_create_pic in current_settings.moduleswitches) and
  521. (tf_pic_uses_got in target_info.flags) and
  522. (pi_needs_got in current_procinfo.flags) then
  523. begin
  524. notdarwin:=not(target_info.system in [system_i386_darwin,system_i386_iphonesim]);
  525. { on darwin, the got register is virtual (and allocated earlier
  526. already) }
  527. if notdarwin then
  528. { ecx could be used in leaf procedures that don't use ecx to pass
  529. aparameter }
  530. current_procinfo.got:=NR_EBX;
  531. if notdarwin { needs testing before it can be enabled for non-darwin platforms
  532. and
  533. (current_settings.optimizecputype in [cpu_Pentium2,cpu_Pentium3,cpu_Pentium4]) } then
  534. begin
  535. current_module.requires_ebx_pic_helper:=true;
  536. cg.a_call_name_static(list,'fpc_geteipasebx');
  537. end
  538. else
  539. begin
  540. { call/pop is faster than call/ret/mov on Core Solo and later
  541. according to Apple's benchmarking -- and all Intel Macs
  542. have at least a Core Solo (furthermore, the i386 - Pentium 1
  543. don't have a return stack buffer) }
  544. a_call_name_static(list,current_procinfo.CurrGOTLabel.name);
  545. a_label(list,current_procinfo.CurrGotLabel);
  546. list.concat(taicpu.op_reg(A_POP,S_L,current_procinfo.got))
  547. end;
  548. if notdarwin then
  549. begin
  550. list.concat(taicpu.op_sym_ofs_reg(A_ADD,S_L,current_asmdata.RefAsmSymbol('_GLOBAL_OFFSET_TABLE_'),0,NR_PIC_OFFSET_REG));
  551. list.concat(tai_regalloc.alloc(NR_PIC_OFFSET_REG,nil));
  552. end;
  553. end;
  554. end;
  555. procedure tcg386.g_intf_wrapper(list: TAsmList; procdef: tprocdef; const labelname: string; ioffset: longint);
  556. {
  557. possible calling conventions:
  558. default stdcall cdecl pascal register
  559. default(0): OK OK OK OK OK
  560. virtual(1): OK OK OK OK OK(2 or 1)
  561. (0):
  562. set self parameter to correct value
  563. jmp mangledname
  564. (1): The wrapper code use %ecx to reach the virtual method address
  565. set self to correct value
  566. move self,%eax
  567. mov 0(%eax),%ecx ; load vmt
  568. jmp vmtoffs(%ecx) ; method offs
  569. (2): Virtual use values pushed on stack to reach the method address
  570. so the following code be generated:
  571. set self to correct value
  572. push %ebx ; allocate space for function address
  573. push %eax
  574. mov self,%eax
  575. mov 0(%eax),%eax ; load vmt
  576. mov vmtoffs(%eax),eax ; method offs
  577. mov %eax,4(%esp)
  578. pop %eax
  579. ret 0; jmp the address
  580. }
  581. { returns whether ECX is used (either as a parameter or is nonvolatile and shouldn't be changed) }
  582. function is_ecx_used: boolean;
  583. var
  584. i: Integer;
  585. hp: tparavarsym;
  586. paraloc: PCGParaLocation;
  587. begin
  588. if not (RS_ECX in paramanager.get_volatile_registers_int(procdef.proccalloption)) then
  589. exit(true);
  590. for i:=0 to procdef.paras.count-1 do
  591. begin
  592. hp:=tparavarsym(procdef.paras[i]);
  593. paraloc:=hp.paraloc[calleeside].Location;
  594. while paraloc<>nil do
  595. begin
  596. if (paraloc^.Loc=LOC_REGISTER) and (getsupreg(paraloc^.register)=RS_ECX) then
  597. exit(true);
  598. paraloc:=paraloc^.Next;
  599. end;
  600. end;
  601. Result:=false;
  602. end;
  603. procedure getselftoeax(offs: longint);
  604. var
  605. href : treference;
  606. selfoffsetfromsp : longint;
  607. begin
  608. { mov offset(%esp),%eax }
  609. if (procdef.proccalloption<>pocall_register) then
  610. begin
  611. { framepointer is pushed for nested procs }
  612. if procdef.parast.symtablelevel>normal_function_level then
  613. selfoffsetfromsp:=2*sizeof(aint)
  614. else
  615. selfoffsetfromsp:=sizeof(aint);
  616. reference_reset_base(href,NR_ESP,selfoffsetfromsp+offs,4);
  617. cg.a_load_ref_reg(list,OS_ADDR,OS_ADDR,href,NR_EAX);
  618. end;
  619. end;
  620. procedure loadvmtto(reg: tregister);
  621. var
  622. href : treference;
  623. begin
  624. { mov 0(%eax),%reg ; load vmt}
  625. reference_reset_base(href,NR_EAX,0,4);
  626. cg.a_load_ref_reg(list,OS_ADDR,OS_ADDR,href,reg);
  627. end;
  628. procedure op_onregmethodaddr(op: TAsmOp; reg: tregister);
  629. var
  630. href : treference;
  631. begin
  632. if (procdef.extnumber=$ffff) then
  633. Internalerror(200006139);
  634. { call/jmp vmtoffs(%reg) ; method offs }
  635. reference_reset_base(href,reg,tobjectdef(procdef.struct).vmtmethodoffset(procdef.extnumber),4);
  636. list.concat(taicpu.op_ref(op,S_L,href));
  637. end;
  638. procedure loadmethodoffstoeax;
  639. var
  640. href : treference;
  641. begin
  642. if (procdef.extnumber=$ffff) then
  643. Internalerror(200006139);
  644. { mov vmtoffs(%eax),%eax ; method offs }
  645. reference_reset_base(href,NR_EAX,tobjectdef(procdef.struct).vmtmethodoffset(procdef.extnumber),4);
  646. cg.a_load_ref_reg(list,OS_ADDR,OS_ADDR,href,NR_EAX);
  647. end;
  648. var
  649. lab : tasmsymbol;
  650. make_global : boolean;
  651. href : treference;
  652. begin
  653. if not(procdef.proctypeoption in [potype_function,potype_procedure]) then
  654. Internalerror(200006137);
  655. if not assigned(procdef.struct) or
  656. (procdef.procoptions*[po_classmethod, po_staticmethod,
  657. po_methodpointer, po_interrupt, po_iocheck]<>[]) then
  658. Internalerror(200006138);
  659. if procdef.owner.symtabletype<>ObjectSymtable then
  660. Internalerror(200109191);
  661. make_global:=false;
  662. if (not current_module.is_unit) or
  663. create_smartlink or
  664. (procdef.owner.defowner.owner.symtabletype=globalsymtable) then
  665. make_global:=true;
  666. if make_global then
  667. List.concat(Tai_symbol.Createname_global(labelname,AT_FUNCTION,0))
  668. else
  669. List.concat(Tai_symbol.Createname(labelname,AT_FUNCTION,0));
  670. { set param1 interface to self }
  671. g_adjust_self_value(list,procdef,ioffset);
  672. if (po_virtualmethod in procdef.procoptions) and
  673. not is_objectpascal_helper(procdef.struct) then
  674. begin
  675. if (procdef.proccalloption=pocall_register) and is_ecx_used then
  676. begin
  677. { case 2 }
  678. list.concat(taicpu.op_reg(A_PUSH,S_L,NR_EBX)); { allocate space for address}
  679. list.concat(taicpu.op_reg(A_PUSH,S_L,NR_EAX));
  680. getselftoeax(8);
  681. loadvmtto(NR_EAX);
  682. loadmethodoffstoeax;
  683. { mov %eax,4(%esp) }
  684. reference_reset_base(href,NR_ESP,4,4);
  685. list.concat(taicpu.op_reg_ref(A_MOV,S_L,NR_EAX,href));
  686. { pop %eax }
  687. list.concat(taicpu.op_reg(A_POP,S_L,NR_EAX));
  688. { ret ; jump to the address }
  689. list.concat(taicpu.op_none(A_RET,S_L));
  690. end
  691. else
  692. begin
  693. { case 1 }
  694. getselftoeax(0);
  695. loadvmtto(NR_ECX);
  696. op_onregmethodaddr(A_JMP,NR_ECX);
  697. end;
  698. end
  699. { case 0 }
  700. else
  701. begin
  702. if (target_info.system <> system_i386_darwin) then
  703. begin
  704. lab:=current_asmdata.RefAsmSymbol(procdef.mangledname);
  705. list.concat(taicpu.op_sym(A_JMP,S_NO,lab))
  706. end
  707. else
  708. list.concat(taicpu.op_sym(A_JMP,S_NO,get_darwin_call_stub(procdef.mangledname,false)))
  709. end;
  710. List.concat(Tai_symbol_end.Createname(labelname));
  711. end;
  712. { ************* 64bit operations ************ }
  713. procedure tcg64f386.get_64bit_ops(op:TOpCG;var op1,op2:TAsmOp);
  714. begin
  715. case op of
  716. OP_ADD :
  717. begin
  718. op1:=A_ADD;
  719. op2:=A_ADC;
  720. end;
  721. OP_SUB :
  722. begin
  723. op1:=A_SUB;
  724. op2:=A_SBB;
  725. end;
  726. OP_XOR :
  727. begin
  728. op1:=A_XOR;
  729. op2:=A_XOR;
  730. end;
  731. OP_OR :
  732. begin
  733. op1:=A_OR;
  734. op2:=A_OR;
  735. end;
  736. OP_AND :
  737. begin
  738. op1:=A_AND;
  739. op2:=A_AND;
  740. end;
  741. else
  742. internalerror(200203241);
  743. end;
  744. end;
  745. procedure tcg64f386.a_op64_ref_reg(list : TAsmList;op:TOpCG;size : tcgsize;const ref : treference;reg : tregister64);
  746. var
  747. op1,op2 : TAsmOp;
  748. tempref : treference;
  749. begin
  750. if not(op in [OP_NEG,OP_NOT]) then
  751. begin
  752. get_64bit_ops(op,op1,op2);
  753. tempref:=ref;
  754. tcgx86(cg).make_simple_ref(list,tempref);
  755. list.concat(taicpu.op_ref_reg(op1,S_L,tempref,reg.reglo));
  756. inc(tempref.offset,4);
  757. list.concat(taicpu.op_ref_reg(op2,S_L,tempref,reg.reghi));
  758. end
  759. else
  760. begin
  761. a_load64_ref_reg(list,ref,reg);
  762. a_op64_reg_reg(list,op,size,reg,reg);
  763. end;
  764. end;
  765. procedure tcg64f386.a_op64_reg_reg(list : TAsmList;op:TOpCG;size : tcgsize;regsrc,regdst : tregister64);
  766. var
  767. op1,op2 : TAsmOp;
  768. begin
  769. case op of
  770. OP_NEG :
  771. begin
  772. if (regsrc.reglo<>regdst.reglo) then
  773. a_load64_reg_reg(list,regsrc,regdst);
  774. list.concat(taicpu.op_reg(A_NOT,S_L,regdst.reghi));
  775. list.concat(taicpu.op_reg(A_NEG,S_L,regdst.reglo));
  776. list.concat(taicpu.op_const_reg(A_SBB,S_L,-1,regdst.reghi));
  777. exit;
  778. end;
  779. OP_NOT :
  780. begin
  781. if (regsrc.reglo<>regdst.reglo) then
  782. a_load64_reg_reg(list,regsrc,regdst);
  783. list.concat(taicpu.op_reg(A_NOT,S_L,regdst.reghi));
  784. list.concat(taicpu.op_reg(A_NOT,S_L,regdst.reglo));
  785. exit;
  786. end;
  787. end;
  788. get_64bit_ops(op,op1,op2);
  789. list.concat(taicpu.op_reg_reg(op1,S_L,regsrc.reglo,regdst.reglo));
  790. list.concat(taicpu.op_reg_reg(op2,S_L,regsrc.reghi,regdst.reghi));
  791. end;
  792. procedure tcg64f386.a_op64_const_reg(list : TAsmList;op:TOpCG;size : tcgsize;value : int64;reg : tregister64);
  793. var
  794. op1,op2 : TAsmOp;
  795. begin
  796. case op of
  797. OP_AND,OP_OR,OP_XOR:
  798. begin
  799. cg.a_op_const_reg(list,op,OS_32,tcgint(lo(value)),reg.reglo);
  800. cg.a_op_const_reg(list,op,OS_32,tcgint(hi(value)),reg.reghi);
  801. end;
  802. OP_ADD, OP_SUB:
  803. begin
  804. // can't use a_op_const_ref because this may use dec/inc
  805. get_64bit_ops(op,op1,op2);
  806. list.concat(taicpu.op_const_reg(op1,S_L,aint(lo(value)),reg.reglo));
  807. list.concat(taicpu.op_const_reg(op2,S_L,aint(hi(value)),reg.reghi));
  808. end;
  809. else
  810. internalerror(200204021);
  811. end;
  812. end;
  813. procedure tcg64f386.a_op64_const_ref(list : TAsmList;op:TOpCG;size : tcgsize;value : int64;const ref : treference);
  814. var
  815. op1,op2 : TAsmOp;
  816. tempref : treference;
  817. begin
  818. tempref:=ref;
  819. tcgx86(cg).make_simple_ref(list,tempref);
  820. case op of
  821. OP_AND,OP_OR,OP_XOR:
  822. begin
  823. cg.a_op_const_ref(list,op,OS_32,tcgint(lo(value)),tempref);
  824. inc(tempref.offset,4);
  825. cg.a_op_const_ref(list,op,OS_32,tcgint(hi(value)),tempref);
  826. end;
  827. OP_ADD, OP_SUB:
  828. begin
  829. get_64bit_ops(op,op1,op2);
  830. // can't use a_op_const_ref because this may use dec/inc
  831. list.concat(taicpu.op_const_ref(op1,S_L,aint(lo(value)),tempref));
  832. inc(tempref.offset,4);
  833. list.concat(taicpu.op_const_ref(op2,S_L,aint(hi(value)),tempref));
  834. end;
  835. else
  836. internalerror(200204022);
  837. end;
  838. end;
  839. procedure create_codegen;
  840. begin
  841. cg := tcg386.create;
  842. cg64 := tcg64f386.create;
  843. end;
  844. end.