as_callfunc_x86.cpp 45 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540
  1. /*
  2. AngelCode Scripting Library
  3. Copyright (c) 2003-2018 Andreas Jonsson
  4. This software is provided 'as-is', without any express or implied
  5. warranty. In no event will the authors be held liable for any
  6. damages arising from the use of this software.
  7. Permission is granted to anyone to use this software for any
  8. purpose, including commercial applications, and to alter it and
  9. redistribute it freely, subject to the following restrictions:
  10. 1. The origin of this software must not be misrepresented; you
  11. must not claim that you wrote the original software. If you use
  12. this software in a product, an acknowledgment in the product
  13. documentation would be appreciated but is not required.
  14. 2. Altered source versions must be plainly marked as such, and
  15. must not be misrepresented as being the original software.
  16. 3. This notice may not be removed or altered from any source
  17. distribution.
  18. The original version of this library can be located at:
  19. http://www.angelcode.com/angelscript/
  20. Andreas Jonsson
  21. [email protected]
  22. */
  23. // Modified by Lasse Oorni for Urho3D
  24. //
  25. // as_callfunc_x86.cpp
  26. //
  27. // These functions handle the actual calling of system functions
  28. //
  29. // Added support for functor methods by Jordi Oliveras Rovira in April, 2014.
  30. //
  31. #include "as_config.h"
  32. #ifndef AS_MAX_PORTABILITY
  33. #ifdef AS_X86
  34. #include "as_callfunc.h"
  35. #include "as_scriptengine.h"
  36. #include "as_texts.h"
  37. #include "as_tokendef.h"
  38. #include "as_context.h"
  39. // Urho3D: work around Clang crash and assembler error on GCC
  40. // Because Urho3D's AngelScript API convention forbids C++ exceptions leaking to syscalls,
  41. // the lack of prologs / epilogs should not be critical
  42. #if defined(__clang__) || defined(__GNUC__)
  43. #undef __OPTIMIZE__
  44. #endif
  45. BEGIN_AS_NAMESPACE
  46. //
  47. // With some compile level optimizations the functions don't clear the FPU
  48. // stack themselves. So we have to do it as part of calling the native functions,
  49. // as the compiler will not be able to predict when it is supposed to do it by
  50. // itself due to the dynamic nature of scripts
  51. //
  52. // - fninit clears the FPU stack and the FPU control word
  53. // - emms only clears the FPU stack, while preserving the FPU control word
  54. //
  55. // By default I use fninit as it seems to be what works for most people,
  56. // but some may find it necessary to define this as emms instead.
  57. //
  58. // TODO: Figure out when one or the other must be used, and a way to
  59. // configure this automatically in as_config.h
  60. //
  61. #ifndef CLEAR_FPU_STACK
  62. #define CLEAR_FPU_STACK fninit
  63. #endif
  64. // These macros are just to allow me to use the above macro in the GNUC style inline assembly
  65. #define _S(x) _TOSTRING(x)
  66. #define _TOSTRING(x) #x
  67. // Prototypes
  68. asQWORD CallCDeclFunction(const asDWORD *args, int paramSize, asFUNCTION_t func);
  69. asQWORD CallCDeclFunctionObjLast(const void *obj, const asDWORD *args, int paramSize, asFUNCTION_t func);
  70. asQWORD CallCDeclFunctionObjFirst(const void *obj, const asDWORD *args, int paramSize, asFUNCTION_t func);
  71. asQWORD CallCDeclFunctionRetByRef(const asDWORD *args, int paramSize, asFUNCTION_t func, void *retPtr);
  72. asQWORD CallCDeclFunctionRetByRefObjLast(const void *obj, const asDWORD *args, int paramSize, asFUNCTION_t func, void *retPtr);
  73. asQWORD CallCDeclFunctionRetByRefObjFirst(const void *obj, const asDWORD *args, int paramSize, asFUNCTION_t func, void *retPtr);
  74. asQWORD CallSTDCallFunction(const asDWORD *args, int paramSize, asFUNCTION_t func);
  75. asQWORD CallThisCallFunction(const void *obj, const asDWORD *args, int paramSize, asFUNCTION_t func);
  76. asQWORD CallThisCallFunctionRetByRef(const void *, const asDWORD *, int, asFUNCTION_t, void *retPtr);
  77. asDWORD GetReturnedFloat();
  78. asQWORD GetReturnedDouble();
  79. asQWORD CallSystemFunctionNative(asCContext *context, asCScriptFunction *descr, void *obj, asDWORD *args, void *retPointer, asQWORD &/*retQW2*/, void *secondObject)
  80. {
  81. asCScriptEngine *engine = context->m_engine;
  82. asSSystemFunctionInterface *sysFunc = descr->sysFuncIntf;
  83. asQWORD retQW = 0;
  84. // Prepare the parameters
  85. asDWORD paramBuffer[64];
  86. int callConv = sysFunc->callConv;
  87. // Changed because need check for ICC_THISCALL_OBJFIRST or
  88. // ICC_THISCALL_OBJLAST if sysFunc->takesObjByVal (avoid copy code)
  89. // Check if is THISCALL_OBJ* calling convention (in this case needs to add secondObject pointer into stack).
  90. bool isThisCallMethod = callConv >= ICC_THISCALL_OBJLAST;
  91. int paramSize = isThisCallMethod || sysFunc->takesObjByVal ? 0 : sysFunc->paramSize;
  92. int dpos = 1;
  93. if( isThisCallMethod &&
  94. (callConv >= ICC_THISCALL_OBJFIRST &&
  95. callConv <= ICC_VIRTUAL_THISCALL_OBJFIRST_RETURNINMEM) )
  96. {
  97. // Add the object pointer as the first parameter
  98. paramBuffer[dpos++] = (asDWORD)secondObject;
  99. paramSize++;
  100. }
  101. if( sysFunc->takesObjByVal || isThisCallMethod )
  102. {
  103. int spos = 0;
  104. for( asUINT n = 0; n < descr->parameterTypes.GetLength(); n++ )
  105. {
  106. if( descr->parameterTypes[n].IsObject() && !descr->parameterTypes[n].IsObjectHandle() && !descr->parameterTypes[n].IsReference() )
  107. {
  108. #ifdef COMPLEX_OBJS_PASSED_BY_REF
  109. if( descr->parameterTypes[n].GetTypeInfo()->flags & COMPLEX_MASK )
  110. {
  111. paramBuffer[dpos++] = args[spos++];
  112. paramSize++;
  113. }
  114. else
  115. #endif
  116. {
  117. // Copy the object's memory to the buffer
  118. // TODO: bug: Must call the object's copy constructor instead of doing a memcpy,
  119. // as the object may hold a pointer to itself. It's not enough to
  120. // change only this memcpy as the assembler routine also makes a copy
  121. // of paramBuffer to the final stack location. To avoid the second
  122. // copy the C++ routine should point paramBuffer to the final stack
  123. // position and copy the values directly to that location. The assembler
  124. // routines then don't need to copy anything, and will just be
  125. // responsible for setting up the registers and the stack frame appropriately.
  126. memcpy(&paramBuffer[dpos], *(void**)(args+spos), descr->parameterTypes[n].GetSizeInMemoryBytes());
  127. // Delete the original memory
  128. engine->CallFree(*(char**)(args+spos));
  129. spos++;
  130. dpos += descr->parameterTypes[n].GetSizeInMemoryDWords();
  131. paramSize += descr->parameterTypes[n].GetSizeInMemoryDWords();
  132. }
  133. }
  134. else
  135. {
  136. // Copy the value directly
  137. paramBuffer[dpos++] = args[spos++];
  138. if( descr->parameterTypes[n].GetSizeOnStackDWords() > 1 )
  139. paramBuffer[dpos++] = args[spos++];
  140. paramSize += descr->parameterTypes[n].GetSizeOnStackDWords();
  141. }
  142. }
  143. // Keep a free location at the beginning
  144. args = &paramBuffer[1];
  145. }
  146. if( isThisCallMethod &&
  147. (callConv >= ICC_THISCALL_OBJLAST &&
  148. callConv <= ICC_VIRTUAL_THISCALL_OBJLAST_RETURNINMEM) )
  149. {
  150. // Add the object pointer as the last parameter
  151. paramBuffer[dpos++] = (asDWORD)secondObject;
  152. paramSize++;
  153. }
  154. // Make the actual call
  155. asFUNCTION_t func = sysFunc->func;
  156. if( sysFunc->hostReturnInMemory )
  157. callConv++;
  158. switch( callConv )
  159. {
  160. case ICC_CDECL:
  161. retQW = CallCDeclFunction(args, paramSize<<2, func);
  162. break;
  163. case ICC_CDECL_RETURNINMEM:
  164. retQW = CallCDeclFunctionRetByRef(args, paramSize<<2, func, retPointer);
  165. break;
  166. case ICC_STDCALL:
  167. retQW = CallSTDCallFunction(args, paramSize<<2, func);
  168. break;
  169. case ICC_STDCALL_RETURNINMEM:
  170. // Push the return pointer on the stack
  171. paramSize++;
  172. args--;
  173. *(asPWORD*)args = (size_t)retPointer;
  174. retQW = CallSTDCallFunction(args, paramSize<<2, func);
  175. break;
  176. case ICC_THISCALL:
  177. case ICC_THISCALL_OBJFIRST:
  178. case ICC_THISCALL_OBJLAST:
  179. retQW = CallThisCallFunction(obj, args, paramSize<<2, func);
  180. break;
  181. case ICC_THISCALL_RETURNINMEM:
  182. case ICC_THISCALL_OBJFIRST_RETURNINMEM:
  183. case ICC_THISCALL_OBJLAST_RETURNINMEM:
  184. retQW = CallThisCallFunctionRetByRef(obj, args, paramSize<<2, func, retPointer);
  185. break;
  186. case ICC_VIRTUAL_THISCALL:
  187. case ICC_VIRTUAL_THISCALL_OBJFIRST:
  188. case ICC_VIRTUAL_THISCALL_OBJLAST:
  189. {
  190. // Get virtual function table from the object pointer
  191. asFUNCTION_t *vftable = *(asFUNCTION_t**)obj;
  192. retQW = CallThisCallFunction(obj, args, paramSize<<2, vftable[FuncPtrToUInt(func)>>2]);
  193. }
  194. break;
  195. case ICC_VIRTUAL_THISCALL_RETURNINMEM:
  196. case ICC_VIRTUAL_THISCALL_OBJFIRST_RETURNINMEM:
  197. case ICC_VIRTUAL_THISCALL_OBJLAST_RETURNINMEM:
  198. {
  199. // Get virtual function table from the object pointer
  200. asFUNCTION_t *vftable = *(asFUNCTION_t**)obj;
  201. retQW = CallThisCallFunctionRetByRef(obj, args, paramSize<<2, vftable[FuncPtrToUInt(func)>>2], retPointer);
  202. }
  203. break;
  204. case ICC_CDECL_OBJLAST:
  205. retQW = CallCDeclFunctionObjLast(obj, args, paramSize<<2, func);
  206. break;
  207. case ICC_CDECL_OBJLAST_RETURNINMEM:
  208. // Call the system object method as a cdecl with the obj ref as the last parameter
  209. retQW = CallCDeclFunctionRetByRefObjLast(obj, args, paramSize<<2, func, retPointer);
  210. break;
  211. case ICC_CDECL_OBJFIRST:
  212. // Call the system object method as a cdecl with the obj ref as the first parameter
  213. retQW = CallCDeclFunctionObjFirst(obj, args, paramSize<<2, func);
  214. break;
  215. case ICC_CDECL_OBJFIRST_RETURNINMEM:
  216. // Call the system object method as a cdecl with the obj ref as the first parameter
  217. retQW = CallCDeclFunctionRetByRefObjFirst(obj, args, paramSize<<2, func, retPointer);
  218. break;
  219. default:
  220. context->SetInternalException(TXT_INVALID_CALLING_CONVENTION);
  221. }
  222. // If the return is a float value we need to get the value from the FP register
  223. if( sysFunc->hostReturnFloat )
  224. {
  225. if( sysFunc->hostReturnSize == 1 )
  226. *(asDWORD*)&retQW = GetReturnedFloat();
  227. else
  228. retQW = GetReturnedDouble();
  229. }
  230. return retQW;
  231. }
  232. // On GCC we need to prevent the compiler from inlining these assembler routines when
  233. // optimizing for speed (-O3), as the loop labels get duplicated which cause compile errors.
  234. #ifdef __GNUC__
  235. #define NOINLINE __attribute ((__noinline__))
  236. #else
  237. #define NOINLINE
  238. #endif
  239. asQWORD NOINLINE CallCDeclFunction(const asDWORD *args, int paramSize, asFUNCTION_t func)
  240. {
  241. volatile asQWORD retQW = 0;
  242. #if defined ASM_INTEL
  243. // Copy the data to the real stack. If we fail to do
  244. // this we may run into trouble in case of exceptions.
  245. __asm
  246. {
  247. // We must save registers that are used
  248. push ecx
  249. // Clear the FPU stack, in case the called function doesn't do it by itself
  250. CLEAR_FPU_STACK
  251. // Copy arguments from script
  252. // stack to application stack
  253. mov ecx, paramSize
  254. mov eax, args
  255. add eax, ecx
  256. cmp ecx, 0
  257. je endcopy
  258. copyloop:
  259. sub eax, 4
  260. push dword ptr [eax]
  261. sub ecx, 4
  262. jne copyloop
  263. endcopy:
  264. // Call function
  265. call [func]
  266. // Pop arguments from stack
  267. add esp, paramSize
  268. // Copy return value from EAX:EDX
  269. lea ecx, retQW
  270. mov [ecx], eax
  271. mov 4[ecx], edx
  272. // Restore registers
  273. pop ecx
  274. }
  275. #elif defined ASM_AT_N_T
  276. // It is not possible to rely on ESP or BSP to refer to variables or arguments on the stack
  277. // depending on compiler settings BSP may not even be used, and the ESP is not always on the
  278. // same offset from the local variables. Because the code adjusts the ESP register it is not
  279. // possible to inform the arguments through symbolic names below.
  280. // It's not also not possible to rely on the memory layout of the function arguments, because
  281. // on some compiler versions and settings the arguments may be copied to local variables with a
  282. // different ordering before they are accessed by the rest of the code.
  283. // I'm copying the arguments into this array where I know the exact memory layout. The address
  284. // of this array will then be passed to the inline asm in the EDX register.
  285. volatile asPWORD a[] = {asPWORD(args), asPWORD(paramSize), asPWORD(func)};
  286. asm __volatile__(
  287. #ifdef __OPTIMIZE__
  288. // When compiled with optimizations the stack unwind doesn't work properly,
  289. // causing exceptions to crash the application. By adding this prologue
  290. // and the epilogue below, the stack unwind works as it should.
  291. // TODO: runtime optimize: The prologue/epilogue shouldn't be needed if the correct cfi directives are used below
  292. "pushl %%ebp \n"
  293. ".cfi_startproc \n"
  294. ".cfi_adjust_cfa_offset 4 \n"
  295. ".cfi_rel_offset ebp, 0 \n"
  296. "movl %%esp, %%ebp \n"
  297. ".cfi_def_cfa_register ebp \n"
  298. #endif
  299. _S(CLEAR_FPU_STACK) "\n"
  300. "pushl %%ebx \n"
  301. "movl %%edx, %%ebx \n"
  302. // Need to align the stack pointer so that it is aligned to 16 bytes when making the function call.
  303. // It is assumed that when entering this function, the stack pointer is already aligned, so we need
  304. // to calculate how much we will put on the stack during this call.
  305. "movl 4(%%ebx), %%eax \n" // paramSize
  306. "addl $4, %%eax \n" // counting esp that we will push on the stack
  307. "movl %%esp, %%ecx \n"
  308. "subl %%eax, %%ecx \n"
  309. "andl $15, %%ecx \n"
  310. "movl %%esp, %%eax \n"
  311. "subl %%ecx, %%esp \n"
  312. "pushl %%eax \n" // Store the original stack pointer
  313. // Copy all arguments to the stack and call the function
  314. "movl 4(%%ebx), %%ecx \n" // paramSize
  315. "movl 0(%%ebx), %%eax \n" // args
  316. "addl %%ecx, %%eax \n" // push arguments on the stack
  317. "cmp $0, %%ecx \n"
  318. "je endcopy \n"
  319. "copyloop: \n"
  320. "subl $4, %%eax \n"
  321. "pushl (%%eax) \n"
  322. "subl $4, %%ecx \n"
  323. "jne copyloop \n"
  324. "endcopy: \n"
  325. "call *8(%%ebx) \n"
  326. "addl 4(%%ebx), %%esp \n" // pop arguments
  327. // Pop the alignment bytes
  328. "popl %%esp \n"
  329. "popl %%ebx \n"
  330. #ifdef __OPTIMIZE__
  331. // Epilogue
  332. "movl %%ebp, %%esp \n"
  333. ".cfi_def_cfa_register esp \n"
  334. "popl %%ebp \n"
  335. ".cfi_adjust_cfa_offset -4 \n"
  336. ".cfi_restore ebp \n"
  337. ".cfi_endproc \n"
  338. #endif
  339. // Copy EAX:EDX to retQW. As the stack pointer has been
  340. // restored it is now safe to access the local variable
  341. "leal %1, %%ecx \n"
  342. "movl %%eax, 0(%%ecx) \n"
  343. "movl %%edx, 4(%%ecx) \n"
  344. : // output
  345. : "d"(a), "m"(retQW) // input - pass pointer of args in edx, pass pointer of retQW in memory argument
  346. : "%eax", "%ecx" // clobber
  347. );
  348. #endif
  349. return retQW;
  350. }
  351. asQWORD NOINLINE CallCDeclFunctionObjLast(const void *obj, const asDWORD *args, int paramSize, asFUNCTION_t func)
  352. {
  353. volatile asQWORD retQW = 0;
  354. #if defined ASM_INTEL
  355. // Copy the data to the real stack. If we fail to do
  356. // this we may run into trouble in case of exceptions.
  357. __asm
  358. {
  359. // We must save registers that are used
  360. push ecx
  361. // Clear the FPU stack, in case the called function doesn't do it by itself
  362. CLEAR_FPU_STACK
  363. // Push the object pointer as the last argument to the function
  364. push obj
  365. // Copy arguments from script
  366. // stack to application stack
  367. mov ecx, paramSize
  368. mov eax, args
  369. add eax, ecx
  370. cmp ecx, 0
  371. je endcopy
  372. copyloop:
  373. sub eax, 4
  374. push dword ptr [eax]
  375. sub ecx, 4
  376. jne copyloop
  377. endcopy:
  378. // Call function
  379. call [func]
  380. // Pop arguments from stack
  381. add esp, paramSize
  382. add esp, 4
  383. // Copy return value from EAX:EDX
  384. lea ecx, retQW
  385. mov [ecx], eax
  386. mov 4[ecx], edx
  387. // Restore registers
  388. pop ecx
  389. }
  390. #elif defined ASM_AT_N_T
  391. volatile asPWORD a[] = {asPWORD(obj), asPWORD(args), asPWORD(paramSize), asPWORD(func)};
  392. asm __volatile__ (
  393. #ifdef __OPTIMIZE__
  394. // When compiled with optimizations the stack unwind doesn't work properly,
  395. // causing exceptions to crash the application. By adding this prologue
  396. // and the epilogue below, the stack unwind works as it should.
  397. // TODO: runtime optimize: The prologue/epilogue shouldn't be needed if the correct cfi directives are used below
  398. "pushl %%ebp \n"
  399. ".cfi_startproc \n"
  400. ".cfi_adjust_cfa_offset 4 \n"
  401. ".cfi_rel_offset ebp, 0 \n"
  402. "movl %%esp, %%ebp \n"
  403. ".cfi_def_cfa_register ebp \n"
  404. #endif
  405. _S(CLEAR_FPU_STACK) "\n"
  406. "pushl %%ebx \n"
  407. "movl %%edx, %%ebx \n"
  408. // Need to align the stack pointer so that it is aligned to 16 bytes when making the function call.
  409. // It is assumed that when entering this function, the stack pointer is already aligned, so we need
  410. // to calculate how much we will put on the stack during this call.
  411. "movl 8(%%ebx), %%eax \n" // paramSize
  412. "addl $8, %%eax \n" // counting esp that we will push on the stack
  413. "movl %%esp, %%ecx \n"
  414. "subl %%eax, %%ecx \n"
  415. "andl $15, %%ecx \n"
  416. "movl %%esp, %%eax \n"
  417. "subl %%ecx, %%esp \n"
  418. "pushl %%eax \n" // Store the original stack pointer
  419. "pushl 0(%%ebx) \n" // obj
  420. "movl 8(%%ebx), %%ecx \n" // paramSize
  421. "movl 4(%%ebx), %%eax \n" // args
  422. "addl %%ecx, %%eax \n" // push arguments on the stack
  423. "cmp $0, %%ecx \n"
  424. "je endcopy8 \n"
  425. "copyloop8: \n"
  426. "subl $4, %%eax \n"
  427. "pushl (%%eax) \n"
  428. "subl $4, %%ecx \n"
  429. "jne copyloop8 \n"
  430. "endcopy8: \n"
  431. "call *12(%%ebx) \n"
  432. "addl 8(%%ebx), %%esp \n" // pop arguments
  433. "addl $4, %%esp \n" // pop obj
  434. // Pop the alignment bytes
  435. "popl %%esp \n"
  436. "popl %%ebx \n"
  437. #ifdef __OPTIMIZE__
  438. // Epilogue
  439. "movl %%ebp, %%esp \n"
  440. ".cfi_def_cfa_register esp \n"
  441. "popl %%ebp \n"
  442. ".cfi_adjust_cfa_offset -4 \n"
  443. ".cfi_restore ebp \n"
  444. ".cfi_endproc \n"
  445. #endif
  446. // Copy EAX:EDX to retQW. As the stack pointer has been
  447. // restored it is now safe to access the local variable
  448. "leal %1, %%ecx \n"
  449. "movl %%eax, 0(%%ecx) \n"
  450. "movl %%edx, 4(%%ecx) \n"
  451. : // output
  452. : "d"(a), "m"(retQW) // input - pass pointer of args in edx, pass pointer of retQW in memory argument
  453. : "%eax", "%ecx" // clobber
  454. );
  455. #endif
  456. return retQW;
  457. }
  458. asQWORD NOINLINE CallCDeclFunctionObjFirst(const void *obj, const asDWORD *args, int paramSize, asFUNCTION_t func)
  459. {
  460. volatile asQWORD retQW = 0;
  461. #if defined ASM_INTEL
  462. // Copy the data to the real stack. If we fail to do
  463. // this we may run into trouble in case of exceptions.
  464. __asm
  465. {
  466. // We must save registers that are used
  467. push ecx
  468. // Clear the FPU stack, in case the called function doesn't do it by itself
  469. CLEAR_FPU_STACK
  470. // Copy arguments from script
  471. // stack to application stack
  472. mov ecx, paramSize
  473. mov eax, args
  474. add eax, ecx
  475. cmp ecx, 0
  476. je endcopy
  477. copyloop:
  478. sub eax, 4
  479. push dword ptr [eax]
  480. sub ecx, 4
  481. jne copyloop
  482. endcopy:
  483. // push object as first parameter
  484. push obj
  485. // Call function
  486. call [func]
  487. // Pop arguments from stack
  488. add esp, paramSize
  489. add esp, 4
  490. // Copy return value from EAX:EDX
  491. lea ecx, retQW
  492. mov [ecx], eax
  493. mov 4[ecx], edx
  494. // Restore registers
  495. pop ecx
  496. }
  497. #elif defined ASM_AT_N_T
  498. volatile asPWORD a[] = {asPWORD(obj), asPWORD(args), asPWORD(paramSize), asPWORD(func)};
  499. asm __volatile__ (
  500. #ifdef __OPTIMIZE__
  501. // When compiled with optimizations the stack unwind doesn't work properly,
  502. // causing exceptions to crash the application. By adding this prologue
  503. // and the epilogue below, the stack unwind works as it should.
  504. // TODO: runtime optimize: The prologue/epilogue shouldn't be needed if the correct cfi directives are used below
  505. "pushl %%ebp \n"
  506. ".cfi_startproc \n"
  507. ".cfi_adjust_cfa_offset 4 \n"
  508. ".cfi_rel_offset ebp, 0 \n"
  509. "movl %%esp, %%ebp \n"
  510. ".cfi_def_cfa_register ebp \n"
  511. #endif
  512. _S(CLEAR_FPU_STACK) "\n"
  513. "pushl %%ebx \n"
  514. "movl %%edx, %%ebx \n"
  515. // Need to align the stack pointer so that it is aligned to 16 bytes when making the function call.
  516. // It is assumed that when entering this function, the stack pointer is already aligned, so we need
  517. // to calculate how much we will put on the stack during this call.
  518. "movl 8(%%ebx), %%eax \n" // paramSize
  519. "addl $8, %%eax \n" // counting esp that we will push on the stack
  520. "movl %%esp, %%ecx \n"
  521. "subl %%eax, %%ecx \n"
  522. "andl $15, %%ecx \n"
  523. "movl %%esp, %%eax \n"
  524. "subl %%ecx, %%esp \n"
  525. "pushl %%eax \n" // Store the original stack pointer
  526. "movl 8(%%ebx), %%ecx \n" // paramSize
  527. "movl 4(%%ebx), %%eax \n" // args
  528. "addl %%ecx, %%eax \n" // push arguments on the stack
  529. "cmp $0, %%ecx \n"
  530. "je endcopy6 \n"
  531. "copyloop6: \n"
  532. "subl $4, %%eax \n"
  533. "pushl (%%eax) \n"
  534. "subl $4, %%ecx \n"
  535. "jne copyloop6 \n"
  536. "endcopy6: \n"
  537. "pushl 0(%%ebx) \n" // push obj
  538. "call *12(%%ebx) \n"
  539. "addl 8(%%ebx), %%esp \n" // pop arguments
  540. "addl $4, %%esp \n" // pop obj
  541. // Pop the alignment bytes
  542. "popl %%esp \n"
  543. "popl %%ebx \n"
  544. #ifdef __OPTIMIZE__
  545. // Epilogue
  546. "movl %%ebp, %%esp \n"
  547. ".cfi_def_cfa_register esp \n"
  548. "popl %%ebp \n"
  549. ".cfi_adjust_cfa_offset -4 \n"
  550. ".cfi_restore ebp \n"
  551. ".cfi_endproc \n"
  552. #endif
  553. // Copy EAX:EDX to retQW. As the stack pointer has been
  554. // restored it is now safe to access the local variable
  555. "leal %1, %%ecx \n"
  556. "movl %%eax, 0(%%ecx) \n"
  557. "movl %%edx, 4(%%ecx) \n"
  558. : // output
  559. : "d"(a), "m"(retQW) // input - pass pointer of args in edx, pass pointer of retQW in memory argument
  560. : "%eax", "%ecx" // clobber
  561. );
  562. #endif
  563. return retQW;
  564. }
  565. asQWORD NOINLINE CallCDeclFunctionRetByRefObjFirst(const void *obj, const asDWORD *args, int paramSize, asFUNCTION_t func, void *retPtr)
  566. {
  567. volatile asQWORD retQW = 0;
  568. #if defined ASM_INTEL
  569. // Copy the data to the real stack. If we fail to do
  570. // this we may run into trouble in case of exceptions.
  571. __asm
  572. {
  573. // We must save registers that are used
  574. push ecx
  575. // Clear the FPU stack, in case the called function doesn't do it by itself
  576. CLEAR_FPU_STACK
  577. // Copy arguments from script
  578. // stack to application stack
  579. mov ecx, paramSize
  580. mov eax, args
  581. add eax, ecx
  582. cmp ecx, 0
  583. je endcopy
  584. copyloop:
  585. sub eax, 4
  586. push dword ptr [eax]
  587. sub ecx, 4
  588. jne copyloop
  589. endcopy:
  590. // Push the object pointer
  591. push obj
  592. // Push the return pointer
  593. push retPtr;
  594. // Call function
  595. call [func]
  596. // Pop arguments from stack
  597. add esp, paramSize
  598. #ifndef CALLEE_POPS_HIDDEN_RETURN_POINTER
  599. // Pop the return pointer
  600. add esp, 8
  601. #else
  602. add esp, 4
  603. #endif
  604. // Copy return value from EAX:EDX
  605. lea ecx, retQW
  606. mov [ecx], eax
  607. mov 4[ecx], edx
  608. // Restore registers
  609. pop ecx
  610. }
  611. #elif defined ASM_AT_N_T
  612. volatile asPWORD a[] = {asPWORD(obj), asPWORD(args), asPWORD(paramSize), asPWORD(func), asPWORD(retPtr)};
  613. asm __volatile__ (
  614. #ifdef __OPTIMIZE__
  615. // When compiled with optimizations the stack unwind doesn't work properly,
  616. // causing exceptions to crash the application. By adding this prologue
  617. // and the epilogue below, the stack unwind works as it should.
  618. // TODO: runtime optimize: The prologue/epilogue shouldn't be needed if the correct cfi directives are used below
  619. "pushl %%ebp \n"
  620. ".cfi_startproc \n"
  621. ".cfi_adjust_cfa_offset 4 \n"
  622. ".cfi_rel_offset ebp, 0 \n"
  623. "movl %%esp, %%ebp \n"
  624. ".cfi_def_cfa_register ebp \n"
  625. #endif
  626. _S(CLEAR_FPU_STACK) "\n"
  627. "pushl %%ebx \n"
  628. "movl %%edx, %%ebx \n"
  629. // Need to align the stack pointer so that it is aligned to 16 bytes when making the function call.
  630. // It is assumed that when entering this function, the stack pointer is already aligned, so we need
  631. // to calculate how much we will put on the stack during this call.
  632. "movl 8(%%ebx), %%eax \n" // paramSize
  633. "addl $12, %%eax \n" // counting esp that we will push on the stack
  634. "movl %%esp, %%ecx \n"
  635. "subl %%eax, %%ecx \n"
  636. "andl $15, %%ecx \n"
  637. "movl %%esp, %%eax \n"
  638. "subl %%ecx, %%esp \n"
  639. "pushl %%eax \n" // Store the original stack pointer
  640. "movl 8(%%ebx), %%ecx \n" // paramSize
  641. "movl 4(%%ebx), %%eax \n" // args
  642. "addl %%ecx, %%eax \n" // push arguments on the stack
  643. "cmp $0, %%ecx \n"
  644. "je endcopy5 \n"
  645. "copyloop5: \n"
  646. "subl $4, %%eax \n"
  647. "pushl (%%eax) \n"
  648. "subl $4, %%ecx \n"
  649. "jne copyloop5 \n"
  650. "endcopy5: \n"
  651. "pushl 0(%%ebx) \n" // push object first
  652. "pushl 16(%%ebx) \n" // retPtr
  653. "call *12(%%ebx) \n" // func
  654. "addl 8(%%ebx), %%esp \n" // pop arguments
  655. #ifndef CALLEE_POPS_HIDDEN_RETURN_POINTER
  656. "addl $8, %%esp \n" // Pop the return pointer and object pointer
  657. #else
  658. "addl $4, %%esp \n" // Pop the object pointer
  659. #endif
  660. // Pop the alignment bytes
  661. "popl %%esp \n"
  662. "popl %%ebx \n"
  663. #ifdef __OPTIMIZE__
  664. // Epilogue
  665. "movl %%ebp, %%esp \n"
  666. ".cfi_def_cfa_register esp \n"
  667. "popl %%ebp \n"
  668. ".cfi_adjust_cfa_offset -4 \n"
  669. ".cfi_restore ebp \n"
  670. ".cfi_endproc \n"
  671. #endif
  672. // Copy EAX:EDX to retQW. As the stack pointer has been
  673. // restored it is now safe to access the local variable
  674. "leal %1, %%ecx \n"
  675. "movl %%eax, 0(%%ecx) \n"
  676. "movl %%edx, 4(%%ecx) \n"
  677. : // output
  678. : "d"(a), "m"(retQW) // input - pass pointer of args in edx, pass pointer of retQW in memory argument
  679. : "%eax", "%ecx" // clobber
  680. );
  681. #endif
  682. return retQW;
  683. }
  684. asQWORD NOINLINE CallCDeclFunctionRetByRef(const asDWORD *args, int paramSize, asFUNCTION_t func, void *retPtr)
  685. {
  686. volatile asQWORD retQW = 0;
  687. #if defined ASM_INTEL
  688. // Copy the data to the real stack. If we fail to do
  689. // this we may run into trouble in case of exceptions.
  690. __asm
  691. {
  692. // We must save registers that are used
  693. push ecx
  694. // Clear the FPU stack, in case the called function doesn't do it by itself
  695. CLEAR_FPU_STACK
  696. // Copy arguments from script
  697. // stack to application stack
  698. mov ecx, paramSize
  699. mov eax, args
  700. add eax, ecx
  701. cmp ecx, 0
  702. je endcopy
  703. copyloop:
  704. sub eax, 4
  705. push dword ptr [eax]
  706. sub ecx, 4
  707. jne copyloop
  708. endcopy:
  709. // Push the return pointer
  710. push retPtr;
  711. // Call function
  712. call [func]
  713. // Pop arguments from stack
  714. add esp, paramSize
  715. #ifndef CALLEE_POPS_HIDDEN_RETURN_POINTER
  716. // Pop the return pointer
  717. add esp, 4
  718. #endif
  719. // Copy return value from EAX:EDX
  720. lea ecx, retQW
  721. mov [ecx], eax
  722. mov 4[ecx], edx
  723. // Restore registers
  724. pop ecx
  725. // return value in EAX or EAX:EDX
  726. }
  727. #elif defined ASM_AT_N_T
  728. volatile asPWORD a[] = {asPWORD(args), asPWORD(paramSize), asPWORD(func), asPWORD(retPtr)};
  729. asm __volatile__ (
  730. #ifdef __OPTIMIZE__
  731. // When compiled with optimizations the stack unwind doesn't work properly,
  732. // causing exceptions to crash the application. By adding this prologue
  733. // and the epilogue below, the stack unwind works as it should.
  734. // TODO: runtime optimize: The prologue/epilogue shouldn't be needed if the correct cfi directives are used below
  735. "pushl %%ebp \n"
  736. ".cfi_startproc \n"
  737. ".cfi_adjust_cfa_offset 4 \n"
  738. ".cfi_rel_offset ebp, 0 \n"
  739. "movl %%esp, %%ebp \n"
  740. ".cfi_def_cfa_register ebp \n"
  741. #endif
  742. _S(CLEAR_FPU_STACK) "\n"
  743. "pushl %%ebx \n"
  744. "movl %%edx, %%ebx \n"
  745. // Need to align the stack pointer so that it is aligned to 16 bytes when making the function call.
  746. // It is assumed that when entering this function, the stack pointer is already aligned, so we need
  747. // to calculate how much we will put on the stack during this call.
  748. "movl 4(%%ebx), %%eax \n" // paramSize
  749. "addl $8, %%eax \n" // counting esp that we will push on the stack
  750. "movl %%esp, %%ecx \n"
  751. "subl %%eax, %%ecx \n"
  752. "andl $15, %%ecx \n"
  753. "movl %%esp, %%eax \n"
  754. "subl %%ecx, %%esp \n"
  755. "pushl %%eax \n" // Store the original stack pointer
  756. "movl 4(%%ebx), %%ecx \n" // paramSize
  757. "movl 0(%%ebx), %%eax \n" // args
  758. "addl %%ecx, %%eax \n" // push arguments on the stack
  759. "cmp $0, %%ecx \n"
  760. "je endcopy7 \n"
  761. "copyloop7: \n"
  762. "subl $4, %%eax \n"
  763. "pushl (%%eax) \n"
  764. "subl $4, %%ecx \n"
  765. "jne copyloop7 \n"
  766. "endcopy7: \n"
  767. "pushl 12(%%ebx) \n" // retPtr
  768. "call *8(%%ebx) \n" // func
  769. "addl 4(%%ebx), %%esp \n" // pop arguments
  770. #ifndef CALLEE_POPS_HIDDEN_RETURN_POINTER
  771. "addl $4, %%esp \n" // Pop the return pointer
  772. #endif
  773. // Pop the alignment bytes
  774. "popl %%esp \n"
  775. "popl %%ebx \n"
  776. #ifdef __OPTIMIZE__
  777. // Epilogue
  778. "movl %%ebp, %%esp \n"
  779. ".cfi_def_cfa_register esp \n"
  780. "popl %%ebp \n"
  781. ".cfi_adjust_cfa_offset -4 \n"
  782. ".cfi_restore ebp \n"
  783. ".cfi_endproc \n"
  784. #endif
  785. // Copy EAX:EDX to retQW. As the stack pointer has been
  786. // restored it is now safe to access the local variable
  787. "leal %1, %%ecx \n"
  788. "movl %%eax, 0(%%ecx) \n"
  789. "movl %%edx, 4(%%ecx) \n"
  790. : // output
  791. : "d"(a), "m"(retQW) // input - pass pointer of args in edx, pass pointer of retQW in memory argument
  792. : "%eax", "%ecx" // clobber
  793. );
  794. #endif
  795. return retQW;
  796. }
  797. asQWORD NOINLINE CallCDeclFunctionRetByRefObjLast(const void *obj, const asDWORD *args, int paramSize, asFUNCTION_t func, void *retPtr)
  798. {
  799. volatile asQWORD retQW = 0;
  800. #if defined ASM_INTEL
  801. // Copy the data to the real stack. If we fail to do
  802. // this we may run into trouble in case of exceptions.
  803. __asm
  804. {
  805. // We must save registers that are used
  806. push ecx
  807. // Clear the FPU stack, in case the called function doesn't do it by itself
  808. CLEAR_FPU_STACK
  809. push obj
  810. // Copy arguments from script
  811. // stack to application stack
  812. mov ecx, paramSize
  813. mov eax, args
  814. add eax, ecx
  815. cmp ecx, 0
  816. je endcopy
  817. copyloop:
  818. sub eax, 4
  819. push dword ptr [eax]
  820. sub ecx, 4
  821. jne copyloop
  822. endcopy:
  823. // Push the return pointer
  824. push retPtr;
  825. // Call function
  826. call [func]
  827. // Pop arguments from stack
  828. add esp, paramSize
  829. add esp, 4
  830. #ifndef CALLEE_POPS_HIDDEN_RETURN_POINTER
  831. // Pop the return pointer
  832. add esp, 4
  833. #endif
  834. // Copy return value from EAX:EDX
  835. lea ecx, retQW
  836. mov [ecx], eax
  837. mov 4[ecx], edx
  838. // Restore registers
  839. pop ecx
  840. }
  841. #elif defined ASM_AT_N_T
  842. volatile asPWORD a[] = {asPWORD(obj), asPWORD(args), asPWORD(paramSize), asPWORD(func), asPWORD(retPtr)};
  843. asm __volatile__ (
  844. #ifdef __OPTIMIZE__
  845. // When compiled with optimizations the stack unwind doesn't work properly,
  846. // causing exceptions to crash the application. By adding this prologue
  847. // and the epilogue below, the stack unwind works as it should.
  848. // TODO: runtime optimize: The prologue/epilogue shouldn't be needed if the correct cfi directives are used below
  849. "pushl %%ebp \n"
  850. ".cfi_startproc \n"
  851. ".cfi_adjust_cfa_offset 4 \n"
  852. ".cfi_rel_offset ebp, 0 \n"
  853. "movl %%esp, %%ebp \n"
  854. ".cfi_def_cfa_register ebp \n"
  855. #endif
  856. _S(CLEAR_FPU_STACK) "\n"
  857. "pushl %%ebx \n"
  858. "movl %%edx, %%ebx \n"
  859. // Need to align the stack pointer so that it is aligned to 16 bytes when making the function call.
  860. // It is assumed that when entering this function, the stack pointer is already aligned, so we need
  861. // to calculate how much we will put on the stack during this call.
  862. "movl 8(%%ebx), %%eax \n" // paramSize
  863. "addl $12, %%eax \n" // counting esp that we will push on the stack
  864. "movl %%esp, %%ecx \n"
  865. "subl %%eax, %%ecx \n"
  866. "andl $15, %%ecx \n"
  867. "movl %%esp, %%eax \n"
  868. "subl %%ecx, %%esp \n"
  869. "pushl %%eax \n" // Store the original stack pointer
  870. "pushl 0(%%ebx) \n" // obj
  871. "movl 8(%%ebx), %%ecx \n" // paramSize
  872. "movl 4(%%ebx), %%eax \n" // args
  873. "addl %%ecx, %%eax \n" // push arguments on the stack
  874. "cmp $0, %%ecx \n"
  875. "je endcopy4 \n"
  876. "copyloop4: \n"
  877. "subl $4, %%eax \n"
  878. "pushl (%%eax) \n"
  879. "subl $4, %%ecx \n"
  880. "jne copyloop4 \n"
  881. "endcopy4: \n"
  882. "pushl 16(%%ebx) \n" // retPtr
  883. "call *12(%%ebx) \n" // func
  884. "addl 8(%%ebx), %%esp \n" // pop arguments
  885. #ifndef CALLEE_POPS_HIDDEN_RETURN_POINTER
  886. "addl $8, %%esp \n" // Pop the return pointer and object pointer
  887. #else
  888. "addl $4, %%esp \n" // Pop the object pointer
  889. #endif
  890. // Pop the alignment bytes
  891. "popl %%esp \n"
  892. "popl %%ebx \n"
  893. #ifdef __OPTIMIZE__
  894. // Epilogue
  895. "movl %%ebp, %%esp \n"
  896. ".cfi_def_cfa_register esp \n"
  897. "popl %%ebp \n"
  898. ".cfi_adjust_cfa_offset -4 \n"
  899. ".cfi_restore ebp \n"
  900. ".cfi_endproc \n"
  901. #endif
  902. // Copy EAX:EDX to retQW. As the stack pointer has been
  903. // restored it is now safe to access the local variable
  904. "leal %1, %%ecx \n"
  905. "movl %%eax, 0(%%ecx) \n"
  906. "movl %%edx, 4(%%ecx) \n"
  907. : // output
  908. : "d"(a), "m"(retQW) // input - pass pointer of args in edx, pass pointer of retQW in memory argument
  909. : "%eax", "%ecx" // clobber
  910. );
  911. #endif
  912. return retQW;
  913. }
  914. asQWORD NOINLINE CallSTDCallFunction(const asDWORD *args, int paramSize, asFUNCTION_t func)
  915. {
  916. volatile asQWORD retQW = 0;
  917. #if defined ASM_INTEL
  918. // Copy the data to the real stack. If we fail to do
  919. // this we may run into trouble in case of exceptions.
  920. __asm
  921. {
  922. // We must save registers that are used
  923. push ecx
  924. // Clear the FPU stack, in case the called function doesn't do it by itself
  925. CLEAR_FPU_STACK
  926. // Copy arguments from script
  927. // stack to application stack
  928. mov ecx, paramSize
  929. mov eax, args
  930. add eax, ecx
  931. cmp ecx, 0
  932. je endcopy
  933. copyloop:
  934. sub eax, 4
  935. push dword ptr [eax]
  936. sub ecx, 4
  937. jne copyloop
  938. endcopy:
  939. // Call function
  940. call [func]
  941. // The callee already removed parameters from the stack
  942. // Copy return value from EAX:EDX
  943. lea ecx, retQW
  944. mov [ecx], eax
  945. mov 4[ecx], edx
  946. // Restore registers
  947. pop ecx
  948. }
  949. #elif defined ASM_AT_N_T
  950. volatile asPWORD a[] = {asPWORD(args), asPWORD(paramSize), asPWORD(func)};
  951. asm __volatile__ (
  952. #ifdef __OPTIMIZE__
  953. // When compiled with optimizations the stack unwind doesn't work properly,
  954. // causing exceptions to crash the application. By adding this prologue
  955. // and the epilogue below, the stack unwind works as it should.
  956. // TODO: runtime optimize: The prologue/epilogue shouldn't be needed if the correct cfi directives are used below
  957. "pushl %%ebp \n"
  958. ".cfi_startproc \n"
  959. ".cfi_adjust_cfa_offset 4 \n"
  960. ".cfi_rel_offset ebp, 0 \n"
  961. "movl %%esp, %%ebp \n"
  962. ".cfi_def_cfa_register ebp \n"
  963. #endif
  964. _S(CLEAR_FPU_STACK) "\n"
  965. "pushl %%ebx \n"
  966. "movl %%edx, %%ebx \n"
  967. // Need to align the stack pointer so that it is aligned to 16 bytes when making the function call.
  968. // It is assumed that when entering this function, the stack pointer is already aligned, so we need
  969. // to calculate how much we will put on the stack during this call.
  970. "movl 4(%%ebx), %%eax \n" // paramSize
  971. "addl $4, %%eax \n" // counting esp that we will push on the stack
  972. "movl %%esp, %%ecx \n"
  973. "subl %%eax, %%ecx \n"
  974. "andl $15, %%ecx \n"
  975. "movl %%esp, %%eax \n"
  976. "subl %%ecx, %%esp \n"
  977. "pushl %%eax \n" // Store the original stack pointer
  978. "movl 4(%%ebx), %%ecx \n" // paramSize
  979. "movl 0(%%ebx), %%eax \n" // args
  980. "addl %%ecx, %%eax \n" // push arguments on the stack
  981. "cmp $0, %%ecx \n"
  982. "je endcopy2 \n"
  983. "copyloop2: \n"
  984. "subl $4, %%eax \n"
  985. "pushl (%%eax) \n"
  986. "subl $4, %%ecx \n"
  987. "jne copyloop2 \n"
  988. "endcopy2: \n"
  989. "call *8(%%ebx) \n" // callee pops the arguments
  990. // Pop the alignment bytes
  991. "popl %%esp \n"
  992. "popl %%ebx \n"
  993. #ifdef __OPTIMIZE__
  994. // Epilogue
  995. "movl %%ebp, %%esp \n"
  996. ".cfi_def_cfa_register esp \n"
  997. "popl %%ebp \n"
  998. ".cfi_adjust_cfa_offset -4 \n"
  999. ".cfi_restore ebp \n"
  1000. ".cfi_endproc \n"
  1001. #endif
  1002. // Copy EAX:EDX to retQW. As the stack pointer has been
  1003. // restored it is now safe to access the local variable
  1004. "leal %1, %%ecx \n"
  1005. "movl %%eax, 0(%%ecx) \n"
  1006. "movl %%edx, 4(%%ecx) \n"
  1007. : // output
  1008. : "d"(a), "m"(retQW) // input - pass pointer of args in edx, pass pointer of retQW in memory argument
  1009. : "%eax", "%ecx" // clobber
  1010. );
  1011. #endif
  1012. return retQW;
  1013. }
  1014. asQWORD NOINLINE CallThisCallFunction(const void *obj, const asDWORD *args, int paramSize, asFUNCTION_t func)
  1015. {
  1016. volatile asQWORD retQW = 0;
  1017. #if defined ASM_INTEL
  1018. // Copy the data to the real stack. If we fail to do
  1019. // this we may run into trouble in case of exceptions.
  1020. __asm
  1021. {
  1022. // We must save registers that are used
  1023. push ecx
  1024. // Clear the FPU stack, in case the called function doesn't do it by itself
  1025. CLEAR_FPU_STACK
  1026. // Copy arguments from script
  1027. // stack to application stack
  1028. mov ecx, paramSize
  1029. mov eax, args
  1030. add eax, ecx
  1031. cmp ecx, 0
  1032. je endcopy
  1033. copyloop:
  1034. sub eax, 4
  1035. push dword ptr [eax]
  1036. sub ecx, 4
  1037. jne copyloop
  1038. endcopy:
  1039. #ifdef THISCALL_PASS_OBJECT_POINTER_ON_THE_STACK
  1040. // Push the object pointer on the stack
  1041. push obj
  1042. #else
  1043. // Move object pointer to ECX
  1044. mov ecx, obj
  1045. #endif
  1046. // Call function
  1047. call [func]
  1048. #ifndef THISCALL_CALLEE_POPS_ARGUMENTS
  1049. // Pop arguments
  1050. add esp, paramSize
  1051. #ifdef THISCALL_PASS_OBJECT_POINTER_ON_THE_STACK
  1052. // Pop object pointer
  1053. add esp, 4
  1054. #endif
  1055. #endif
  1056. // Copy return value from EAX:EDX
  1057. lea ecx, retQW
  1058. mov [ecx], eax
  1059. mov 4[ecx], edx
  1060. // Restore registers
  1061. pop ecx
  1062. }
  1063. #elif defined ASM_AT_N_T
  1064. volatile asPWORD a[] = {asPWORD(obj), asPWORD(args), asPWORD(paramSize), asPWORD(func)};
  1065. asm __volatile__ (
  1066. #ifdef __OPTIMIZE__
  1067. // When compiled with optimizations the stack unwind doesn't work properly,
  1068. // causing exceptions to crash the application. By adding this prologue
  1069. // and the epilogue below, the stack unwind works as it should.
  1070. // TODO: runtime optimize: The prologue/epilogue shouldn't be needed if the correct cfi directives are used below
  1071. "pushl %%ebp \n"
  1072. ".cfi_startproc \n"
  1073. ".cfi_adjust_cfa_offset 4 \n"
  1074. ".cfi_rel_offset ebp, 0 \n"
  1075. "movl %%esp, %%ebp \n"
  1076. ".cfi_def_cfa_register ebp \n"
  1077. #endif
  1078. _S(CLEAR_FPU_STACK) "\n"
  1079. "pushl %%ebx \n"
  1080. "movl %%edx, %%ebx \n"
  1081. // Need to align the stack pointer so that it is aligned to 16 bytes when making the function call.
  1082. // It is assumed that when entering this function, the stack pointer is already aligned, so we need
  1083. // to calculate how much we will put on the stack during this call.
  1084. "movl 8(%%ebx), %%eax \n" // paramSize
  1085. "addl $8, %%eax \n" // counting esp that we will push on the stack
  1086. "movl %%esp, %%ecx \n"
  1087. "subl %%eax, %%ecx \n"
  1088. "andl $15, %%ecx \n"
  1089. "movl %%esp, %%eax \n"
  1090. "subl %%ecx, %%esp \n"
  1091. "pushl %%eax \n" // Store the original stack pointer
  1092. "movl 8(%%ebx), %%ecx \n" // paramSize
  1093. "movl 4(%%ebx), %%eax \n" // args
  1094. "addl %%ecx, %%eax \n" // push all arguments on the stack
  1095. "cmp $0, %%ecx \n"
  1096. "je endcopy1 \n"
  1097. "copyloop1: \n"
  1098. "subl $4, %%eax \n"
  1099. "pushl (%%eax) \n"
  1100. "subl $4, %%ecx \n"
  1101. "jne copyloop1 \n"
  1102. "endcopy1: \n"
  1103. "movl 0(%%ebx), %%ecx \n" // move obj into ECX
  1104. #ifdef THISCALL_PASS_OBJECT_POINTER_ON_THE_STACK
  1105. "pushl %%ecx \n" // push obj on the stack
  1106. #endif
  1107. "call *12(%%ebx) \n"
  1108. #ifndef THISCALL_CALLEE_POPS_ARGUMENTS
  1109. "addl 8(%%ebx), %%esp \n" // pop arguments
  1110. #ifdef THISCALL_PASS_OBJECT_POINTER_ON_THE_STACK
  1111. "addl $4, %%esp \n" // pop obj
  1112. #endif
  1113. #endif
  1114. // Pop the alignment bytes
  1115. "popl %%esp \n"
  1116. "popl %%ebx \n"
  1117. #ifdef __OPTIMIZE__
  1118. // Epilogue
  1119. "movl %%ebp, %%esp \n"
  1120. ".cfi_def_cfa_register esp \n"
  1121. "popl %%ebp \n"
  1122. ".cfi_adjust_cfa_offset -4 \n"
  1123. ".cfi_restore ebp \n"
  1124. ".cfi_endproc \n"
  1125. #endif
  1126. // Copy EAX:EDX to retQW. As the stack pointer has been
  1127. // restored it is now safe to access the local variable
  1128. "leal %1, %%ecx \n"
  1129. "movl %%eax, 0(%%ecx) \n"
  1130. "movl %%edx, 4(%%ecx) \n"
  1131. : // output
  1132. : "d"(a), "m"(retQW) // input - pass pointer of args in edx, pass pointer of retQW in memory argument
  1133. : "%eax", "%ecx" // clobber
  1134. );
  1135. #endif
  1136. return retQW;
  1137. }
  1138. asQWORD NOINLINE CallThisCallFunctionRetByRef(const void *obj, const asDWORD *args, int paramSize, asFUNCTION_t func, void *retPtr)
  1139. {
  1140. volatile asQWORD retQW = 0;
  1141. #if defined ASM_INTEL
  1142. // Copy the data to the real stack. If we fail to do
  1143. // this we may run into trouble in case of exceptions.
  1144. __asm
  1145. {
  1146. // We must save registers that are used
  1147. push ecx
  1148. // Clear the FPU stack, in case the called function doesn't do it by itself
  1149. CLEAR_FPU_STACK
  1150. // Copy arguments from script
  1151. // stack to application stack
  1152. mov ecx, paramSize
  1153. mov eax, args
  1154. add eax, ecx
  1155. cmp ecx, 0
  1156. je endcopy
  1157. copyloop:
  1158. sub eax, 4
  1159. push dword ptr [eax]
  1160. sub ecx, 4
  1161. jne copyloop
  1162. endcopy:
  1163. #ifdef THISCALL_PASS_OBJECT_POINTER_ON_THE_STACK
  1164. // Push the object pointer on the stack
  1165. push obj
  1166. #else
  1167. // Move object pointer to ECX
  1168. mov ecx, obj
  1169. #endif
  1170. // Push the return pointer
  1171. push retPtr
  1172. // Call function
  1173. call [func]
  1174. #ifndef THISCALL_CALLEE_POPS_HIDDEN_RETURN_POINTER
  1175. // Pop the return pointer
  1176. add esp, 4
  1177. #endif
  1178. #ifndef THISCALL_CALLEE_POPS_ARGUMENTS
  1179. // Pop arguments
  1180. add esp, paramSize
  1181. #ifdef THISCALL_PASS_OBJECT_POINTER_ON_THE_STACK
  1182. // Pop object pointer
  1183. add esp, 4
  1184. #endif
  1185. #endif
  1186. // Copy return value from EAX:EDX
  1187. lea ecx, retQW
  1188. mov [ecx], eax
  1189. mov 4[ecx], edx
  1190. // Restore registers
  1191. pop ecx
  1192. }
  1193. #elif defined ASM_AT_N_T
  1194. volatile asPWORD a[] = {asPWORD(obj), asPWORD(args), asPWORD(paramSize), asPWORD(func), asPWORD(retPtr)};
  1195. asm __volatile__ (
  1196. #ifdef __OPTIMIZE__
  1197. // When compiled with optimizations the stack unwind doesn't work properly,
  1198. // causing exceptions to crash the application. By adding this prologue
  1199. // and the epilogue below, the stack unwind works as it should.
  1200. // TODO: runtime optimize: The prologue/epilogue shouldn't be needed if the correct cfi directives are used below
  1201. "pushl %%ebp \n"
  1202. ".cfi_startproc \n"
  1203. ".cfi_adjust_cfa_offset 4 \n"
  1204. ".cfi_rel_offset ebp, 0 \n"
  1205. "movl %%esp, %%ebp \n"
  1206. ".cfi_def_cfa_register ebp \n"
  1207. #endif
  1208. _S(CLEAR_FPU_STACK) "\n"
  1209. "pushl %%ebx \n"
  1210. "movl %%edx, %%ebx \n"
  1211. // Need to align the stack pointer so that it is aligned to 16 bytes when making the function call.
  1212. // It is assumed that when entering this function, the stack pointer is already aligned, so we need
  1213. // to calculate how much we will put on the stack during this call.
  1214. "movl 8(%%ebx), %%eax \n" // paramSize
  1215. "addl $12, %%eax \n" // counting esp that we will push on the stack
  1216. "movl %%esp, %%ecx \n"
  1217. "subl %%eax, %%ecx \n"
  1218. "andl $15, %%ecx \n"
  1219. "movl %%esp, %%eax \n"
  1220. "subl %%ecx, %%esp \n"
  1221. "pushl %%eax \n" // Store the original stack pointer
  1222. "movl 8(%%ebx), %%ecx \n" // paramSize
  1223. "movl 4(%%ebx), %%eax \n" // args
  1224. "addl %%ecx, %%eax \n" // push all arguments to the stack
  1225. "cmp $0, %%ecx \n"
  1226. "je endcopy3 \n"
  1227. "copyloop3: \n"
  1228. "subl $4, %%eax \n"
  1229. "pushl (%%eax) \n"
  1230. "subl $4, %%ecx \n"
  1231. "jne copyloop3 \n"
  1232. "endcopy3: \n"
  1233. #ifdef AS_MINGW47
  1234. // MinGW made some strange choices with 4.7 and the thiscall calling convention,
  1235. // returning an object in memory is completely different from when not returning
  1236. // in memory
  1237. "pushl 0(%%ebx) \n" // push obj on the stack
  1238. "movl 16(%%ebx), %%ecx \n" // move the return pointer into ECX
  1239. "call *12(%%ebx) \n" // call the function
  1240. #else
  1241. "movl 0(%%ebx), %%ecx \n" // move obj into ECX
  1242. #ifdef THISCALL_PASS_OBJECT_POINTER_ON_THE_STACK
  1243. "pushl %%ecx \n" // push obj on the stack
  1244. #endif
  1245. "pushl 16(%%ebx) \n" // push retPtr on the stack
  1246. "call *12(%%ebx) \n"
  1247. #ifndef THISCALL_CALLEE_POPS_HIDDEN_RETURN_POINTER
  1248. "addl $4, %%esp \n" // pop return pointer
  1249. #endif
  1250. #ifndef THISCALL_CALLEE_POPS_ARGUMENTS
  1251. "addl 8(%%ebx), %%esp \n" // pop arguments
  1252. #ifdef THISCALL_PASS_OBJECT_POINTER_ON_THE_STACK
  1253. "addl $4, %%esp \n" // pop the object pointer
  1254. #endif
  1255. #endif
  1256. #endif // AS_MINGW47
  1257. // Pop the alignment bytes
  1258. "popl %%esp \n"
  1259. "popl %%ebx \n"
  1260. #ifdef __OPTIMIZE__
  1261. // Epilogue
  1262. "movl %%ebp, %%esp \n"
  1263. ".cfi_def_cfa_register esp \n"
  1264. "popl %%ebp \n"
  1265. ".cfi_adjust_cfa_offset -4 \n"
  1266. ".cfi_restore ebp \n"
  1267. ".cfi_endproc \n"
  1268. #endif
  1269. // Copy EAX:EDX to retQW. As the stack pointer has been
  1270. // restored it is now safe to access the local variable
  1271. "leal %1, %%ecx \n"
  1272. "movl %%eax, 0(%%ecx) \n"
  1273. "movl %%edx, 4(%%ecx) \n"
  1274. : // output
  1275. : "d"(a), "m"(retQW) // input - pass pointer of args in edx, pass pointer of retQW in memory argument
  1276. : "%eax", "%ecx" // clobber
  1277. );
  1278. #endif
  1279. return retQW;
  1280. }
  1281. asDWORD GetReturnedFloat()
  1282. {
  1283. asDWORD f;
  1284. #if defined ASM_INTEL
  1285. // Get the float value from ST0
  1286. __asm fstp dword ptr [f]
  1287. #elif defined ASM_AT_N_T
  1288. asm("fstps %0 \n" : "=m" (f));
  1289. #endif
  1290. return f;
  1291. }
  1292. asQWORD GetReturnedDouble()
  1293. {
  1294. asQWORD d;
  1295. #if defined ASM_INTEL
  1296. // Get the double value from ST0
  1297. __asm fstp qword ptr [d]
  1298. #elif defined ASM_AT_N_T
  1299. asm("fstpl %0 \n" : "=m" (d));
  1300. #endif
  1301. return d;
  1302. }
  1303. END_AS_NAMESPACE
  1304. #endif // AS_X86
  1305. #endif // AS_MAX_PORTABILITY