immunio 0.15.4 → 0.16.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (454) hide show
  1. checksums.yaml +4 -4
  2. data/LICENSE +0 -27
  3. data/ext/immunio/Rakefile +9 -0
  4. data/lib/immunio/plugins/active_record.rb +1 -1
  5. data/lib/immunio/plugins/active_record_relation.rb +1 -1
  6. data/lib/immunio/plugins/environment_reporter.rb +20 -0
  7. data/lib/immunio/rufus_lua_ext/ref.rb +1 -3
  8. data/lib/immunio/version.rb +1 -1
  9. data/lib/immunio/vm.rb +1 -2
  10. data/lua-hooks/Makefile +97 -0
  11. data/lua-hooks/ext/all.c +41 -52
  12. data/lua-hooks/ext/all.o +0 -0
  13. data/lua-hooks/ext/libinjection/libinjection_html5.o +0 -0
  14. data/lua-hooks/ext/libinjection/libinjection_sqli.o +0 -0
  15. data/lua-hooks/ext/libinjection/libinjection_xss.o +0 -0
  16. data/lua-hooks/ext/libinjection/lualib.c +2 -2
  17. data/lua-hooks/ext/lpeg/lpcap.c +2 -2
  18. data/lua-hooks/ext/lpeg/lpcap.o +0 -0
  19. data/lua-hooks/ext/lpeg/lpcode.c +2 -2
  20. data/lua-hooks/ext/lpeg/lpcode.h +1 -1
  21. data/lua-hooks/ext/lpeg/lpcode.o +0 -0
  22. data/lua-hooks/ext/lpeg/lpprint.o +0 -0
  23. data/lua-hooks/ext/lpeg/lptree.c +2 -2
  24. data/lua-hooks/ext/lpeg/lptypes.h +1 -1
  25. data/lua-hooks/ext/lpeg/lpvm.c +2 -2
  26. data/lua-hooks/ext/lpeg/lpvm.o +0 -0
  27. data/lua-hooks/ext/lua-cmsgpack/lua_cmsgpack.c +16 -3
  28. data/lua-hooks/ext/lua-snapshot/snapshot.c +14 -7
  29. data/lua-hooks/ext/luajit/COPYRIGHT +56 -0
  30. data/lua-hooks/ext/luajit/Makefile +159 -0
  31. data/lua-hooks/ext/luajit/README +16 -0
  32. data/lua-hooks/ext/luajit/doc/bluequad-print.css +166 -0
  33. data/lua-hooks/ext/luajit/doc/bluequad.css +325 -0
  34. data/lua-hooks/ext/luajit/doc/changes.html +804 -0
  35. data/lua-hooks/ext/luajit/doc/contact.html +104 -0
  36. data/lua-hooks/ext/luajit/doc/ext_c_api.html +189 -0
  37. data/lua-hooks/ext/luajit/doc/ext_ffi.html +332 -0
  38. data/lua-hooks/ext/luajit/doc/ext_ffi_api.html +570 -0
  39. data/lua-hooks/ext/luajit/doc/ext_ffi_semantics.html +1261 -0
  40. data/lua-hooks/ext/luajit/doc/ext_ffi_tutorial.html +603 -0
  41. data/lua-hooks/ext/luajit/doc/ext_jit.html +201 -0
  42. data/lua-hooks/ext/luajit/doc/ext_profiler.html +365 -0
  43. data/lua-hooks/ext/luajit/doc/extensions.html +448 -0
  44. data/lua-hooks/ext/luajit/doc/faq.html +186 -0
  45. data/lua-hooks/ext/luajit/doc/img/contact.png +0 -0
  46. data/lua-hooks/ext/luajit/doc/install.html +659 -0
  47. data/lua-hooks/ext/luajit/doc/luajit.html +236 -0
  48. data/lua-hooks/ext/luajit/doc/running.html +309 -0
  49. data/lua-hooks/ext/luajit/doc/status.html +118 -0
  50. data/lua-hooks/ext/luajit/dynasm/dasm_arm.h +456 -0
  51. data/lua-hooks/ext/luajit/dynasm/dasm_arm.lua +1125 -0
  52. data/lua-hooks/ext/luajit/dynasm/dasm_arm64.h +518 -0
  53. data/lua-hooks/ext/luajit/dynasm/dasm_arm64.lua +1166 -0
  54. data/lua-hooks/ext/luajit/dynasm/dasm_mips.h +416 -0
  55. data/lua-hooks/ext/luajit/dynasm/dasm_mips.lua +953 -0
  56. data/lua-hooks/ext/luajit/dynasm/dasm_ppc.h +419 -0
  57. data/lua-hooks/ext/luajit/dynasm/dasm_ppc.lua +1919 -0
  58. data/lua-hooks/ext/luajit/dynasm/dasm_proto.h +83 -0
  59. data/lua-hooks/ext/luajit/dynasm/dasm_x64.lua +12 -0
  60. data/lua-hooks/ext/luajit/dynasm/dasm_x86.h +471 -0
  61. data/lua-hooks/ext/luajit/dynasm/dasm_x86.lua +1945 -0
  62. data/lua-hooks/ext/luajit/dynasm/dynasm.lua +1094 -0
  63. data/lua-hooks/ext/luajit/etc/luajit.1 +88 -0
  64. data/lua-hooks/ext/luajit/etc/luajit.pc +25 -0
  65. data/lua-hooks/ext/luajit/src/Makefile +697 -0
  66. data/lua-hooks/ext/luajit/src/Makefile.dep +244 -0
  67. data/lua-hooks/ext/luajit/src/host/README +4 -0
  68. data/lua-hooks/ext/luajit/src/host/buildvm +0 -0
  69. data/lua-hooks/ext/luajit/src/host/buildvm.c +518 -0
  70. data/lua-hooks/ext/luajit/src/host/buildvm.h +105 -0
  71. data/lua-hooks/ext/luajit/src/host/buildvm.o +0 -0
  72. data/lua-hooks/ext/luajit/src/host/buildvm_arch.h +7449 -0
  73. data/lua-hooks/ext/luajit/src/host/buildvm_asm.c +345 -0
  74. data/lua-hooks/ext/luajit/src/host/buildvm_asm.o +0 -0
  75. data/lua-hooks/ext/luajit/src/host/buildvm_fold.c +229 -0
  76. data/lua-hooks/ext/luajit/src/host/buildvm_fold.o +0 -0
  77. data/lua-hooks/ext/luajit/src/host/buildvm_lib.c +457 -0
  78. data/lua-hooks/ext/luajit/src/host/buildvm_lib.o +0 -0
  79. data/lua-hooks/ext/luajit/src/host/buildvm_libbc.h +45 -0
  80. data/lua-hooks/ext/luajit/src/host/buildvm_peobj.c +368 -0
  81. data/lua-hooks/ext/luajit/src/host/buildvm_peobj.o +0 -0
  82. data/lua-hooks/ext/luajit/src/host/genlibbc.lua +197 -0
  83. data/lua-hooks/ext/luajit/src/host/genminilua.lua +428 -0
  84. data/lua-hooks/ext/luajit/src/host/minilua +0 -0
  85. data/lua-hooks/ext/luajit/src/host/minilua.c +7770 -0
  86. data/lua-hooks/ext/luajit/src/host/minilua.o +0 -0
  87. data/lua-hooks/ext/luajit/src/jit/bc.lua +190 -0
  88. data/lua-hooks/ext/luajit/src/jit/bcsave.lua +661 -0
  89. data/lua-hooks/ext/luajit/src/jit/dis_arm.lua +689 -0
  90. data/lua-hooks/ext/luajit/src/jit/dis_mips.lua +428 -0
  91. data/lua-hooks/ext/luajit/src/jit/dis_mipsel.lua +17 -0
  92. data/lua-hooks/ext/luajit/src/jit/dis_ppc.lua +591 -0
  93. data/lua-hooks/ext/luajit/src/jit/dis_x64.lua +17 -0
  94. data/lua-hooks/ext/luajit/src/jit/dis_x86.lua +838 -0
  95. data/lua-hooks/ext/luajit/src/jit/dump.lua +706 -0
  96. data/lua-hooks/ext/luajit/src/jit/p.lua +310 -0
  97. data/lua-hooks/ext/luajit/src/jit/v.lua +170 -0
  98. data/lua-hooks/ext/luajit/src/jit/vmdef.lua +362 -0
  99. data/lua-hooks/ext/luajit/src/jit/zone.lua +45 -0
  100. data/lua-hooks/ext/{lua → luajit/src}/lauxlib.h +10 -17
  101. data/lua-hooks/ext/luajit/src/lib_aux.c +356 -0
  102. data/lua-hooks/ext/luajit/src/lib_aux.o +0 -0
  103. data/lua-hooks/ext/luajit/src/lib_aux_dyn.o +0 -0
  104. data/lua-hooks/ext/luajit/src/lib_base.c +664 -0
  105. data/lua-hooks/ext/luajit/src/lib_base.o +0 -0
  106. data/lua-hooks/ext/luajit/src/lib_base_dyn.o +0 -0
  107. data/lua-hooks/ext/luajit/src/lib_bit.c +180 -0
  108. data/lua-hooks/ext/luajit/src/lib_bit.o +0 -0
  109. data/lua-hooks/ext/luajit/src/lib_bit_dyn.o +0 -0
  110. data/lua-hooks/ext/luajit/src/lib_debug.c +405 -0
  111. data/lua-hooks/ext/luajit/src/lib_debug.o +0 -0
  112. data/lua-hooks/ext/luajit/src/lib_debug_dyn.o +0 -0
  113. data/lua-hooks/ext/luajit/src/lib_ffi.c +872 -0
  114. data/lua-hooks/ext/luajit/src/lib_ffi.o +0 -0
  115. data/lua-hooks/ext/luajit/src/lib_ffi_dyn.o +0 -0
  116. data/lua-hooks/ext/luajit/src/lib_init.c +55 -0
  117. data/lua-hooks/ext/luajit/src/lib_init.o +0 -0
  118. data/lua-hooks/ext/luajit/src/lib_init_dyn.o +0 -0
  119. data/lua-hooks/ext/luajit/src/lib_io.c +541 -0
  120. data/lua-hooks/ext/luajit/src/lib_io.o +0 -0
  121. data/lua-hooks/ext/luajit/src/lib_io_dyn.o +0 -0
  122. data/lua-hooks/ext/luajit/src/lib_jit.c +767 -0
  123. data/lua-hooks/ext/luajit/src/lib_jit.o +0 -0
  124. data/lua-hooks/ext/luajit/src/lib_jit_dyn.o +0 -0
  125. data/lua-hooks/ext/luajit/src/lib_math.c +230 -0
  126. data/lua-hooks/ext/luajit/src/lib_math.o +0 -0
  127. data/lua-hooks/ext/luajit/src/lib_math_dyn.o +0 -0
  128. data/lua-hooks/ext/luajit/src/lib_os.c +292 -0
  129. data/lua-hooks/ext/luajit/src/lib_os.o +0 -0
  130. data/lua-hooks/ext/luajit/src/lib_os_dyn.o +0 -0
  131. data/lua-hooks/ext/luajit/src/lib_package.c +610 -0
  132. data/lua-hooks/ext/luajit/src/lib_package.o +0 -0
  133. data/lua-hooks/ext/luajit/src/lib_package_dyn.o +0 -0
  134. data/lua-hooks/ext/luajit/src/lib_string.c +752 -0
  135. data/lua-hooks/ext/luajit/src/lib_string.o +0 -0
  136. data/lua-hooks/ext/luajit/src/lib_string_dyn.o +0 -0
  137. data/lua-hooks/ext/luajit/src/lib_table.c +307 -0
  138. data/lua-hooks/ext/luajit/src/lib_table.o +0 -0
  139. data/lua-hooks/ext/luajit/src/lib_table_dyn.o +0 -0
  140. data/lua-hooks/ext/luajit/src/libluajit.a +0 -0
  141. data/lua-hooks/ext/luajit/src/libluajit.so +0 -0
  142. data/lua-hooks/ext/luajit/src/lj.supp +26 -0
  143. data/lua-hooks/ext/luajit/src/lj_alloc.c +1398 -0
  144. data/lua-hooks/ext/luajit/src/lj_alloc.h +17 -0
  145. data/lua-hooks/ext/luajit/src/lj_alloc.o +0 -0
  146. data/lua-hooks/ext/luajit/src/lj_alloc_dyn.o +0 -0
  147. data/lua-hooks/ext/luajit/src/lj_api.c +1210 -0
  148. data/lua-hooks/ext/luajit/src/lj_api.o +0 -0
  149. data/lua-hooks/ext/luajit/src/lj_api_dyn.o +0 -0
  150. data/lua-hooks/ext/luajit/src/lj_arch.h +509 -0
  151. data/lua-hooks/ext/luajit/src/lj_asm.c +2278 -0
  152. data/lua-hooks/ext/luajit/src/lj_asm.h +17 -0
  153. data/lua-hooks/ext/luajit/src/lj_asm.o +0 -0
  154. data/lua-hooks/ext/luajit/src/lj_asm_arm.h +2217 -0
  155. data/lua-hooks/ext/luajit/src/lj_asm_dyn.o +0 -0
  156. data/lua-hooks/ext/luajit/src/lj_asm_mips.h +1833 -0
  157. data/lua-hooks/ext/luajit/src/lj_asm_ppc.h +2015 -0
  158. data/lua-hooks/ext/luajit/src/lj_asm_x86.h +2634 -0
  159. data/lua-hooks/ext/luajit/src/lj_bc.c +14 -0
  160. data/lua-hooks/ext/luajit/src/lj_bc.h +265 -0
  161. data/lua-hooks/ext/luajit/src/lj_bc.o +0 -0
  162. data/lua-hooks/ext/luajit/src/lj_bc_dyn.o +0 -0
  163. data/lua-hooks/ext/luajit/src/lj_bcdef.h +220 -0
  164. data/lua-hooks/ext/luajit/src/lj_bcdump.h +68 -0
  165. data/lua-hooks/ext/luajit/src/lj_bcread.c +457 -0
  166. data/lua-hooks/ext/luajit/src/lj_bcread.o +0 -0
  167. data/lua-hooks/ext/luajit/src/lj_bcread_dyn.o +0 -0
  168. data/lua-hooks/ext/luajit/src/lj_bcwrite.c +361 -0
  169. data/lua-hooks/ext/luajit/src/lj_bcwrite.o +0 -0
  170. data/lua-hooks/ext/luajit/src/lj_bcwrite_dyn.o +0 -0
  171. data/lua-hooks/ext/luajit/src/lj_buf.c +234 -0
  172. data/lua-hooks/ext/luajit/src/lj_buf.h +105 -0
  173. data/lua-hooks/ext/luajit/src/lj_buf.o +0 -0
  174. data/lua-hooks/ext/luajit/src/lj_buf_dyn.o +0 -0
  175. data/lua-hooks/ext/luajit/src/lj_carith.c +429 -0
  176. data/lua-hooks/ext/luajit/src/lj_carith.h +37 -0
  177. data/lua-hooks/ext/luajit/src/lj_carith.o +0 -0
  178. data/lua-hooks/ext/luajit/src/lj_carith_dyn.o +0 -0
  179. data/lua-hooks/ext/luajit/src/lj_ccall.c +984 -0
  180. data/lua-hooks/ext/luajit/src/lj_ccall.h +178 -0
  181. data/lua-hooks/ext/luajit/src/lj_ccall.o +0 -0
  182. data/lua-hooks/ext/luajit/src/lj_ccall_dyn.o +0 -0
  183. data/lua-hooks/ext/luajit/src/lj_ccallback.c +712 -0
  184. data/lua-hooks/ext/luajit/src/lj_ccallback.h +25 -0
  185. data/lua-hooks/ext/luajit/src/lj_ccallback.o +0 -0
  186. data/lua-hooks/ext/luajit/src/lj_ccallback_dyn.o +0 -0
  187. data/lua-hooks/ext/luajit/src/lj_cconv.c +752 -0
  188. data/lua-hooks/ext/luajit/src/lj_cconv.h +70 -0
  189. data/lua-hooks/ext/luajit/src/lj_cconv.o +0 -0
  190. data/lua-hooks/ext/luajit/src/lj_cconv_dyn.o +0 -0
  191. data/lua-hooks/ext/luajit/src/lj_cdata.c +288 -0
  192. data/lua-hooks/ext/luajit/src/lj_cdata.h +76 -0
  193. data/lua-hooks/ext/luajit/src/lj_cdata.o +0 -0
  194. data/lua-hooks/ext/luajit/src/lj_cdata_dyn.o +0 -0
  195. data/lua-hooks/ext/luajit/src/lj_char.c +43 -0
  196. data/lua-hooks/ext/luajit/src/lj_char.h +42 -0
  197. data/lua-hooks/ext/luajit/src/lj_char.o +0 -0
  198. data/lua-hooks/ext/luajit/src/lj_char_dyn.o +0 -0
  199. data/lua-hooks/ext/luajit/src/lj_clib.c +418 -0
  200. data/lua-hooks/ext/luajit/src/lj_clib.h +29 -0
  201. data/lua-hooks/ext/luajit/src/lj_clib.o +0 -0
  202. data/lua-hooks/ext/luajit/src/lj_clib_dyn.o +0 -0
  203. data/lua-hooks/ext/luajit/src/lj_cparse.c +1862 -0
  204. data/lua-hooks/ext/luajit/src/lj_cparse.h +65 -0
  205. data/lua-hooks/ext/luajit/src/lj_cparse.o +0 -0
  206. data/lua-hooks/ext/luajit/src/lj_cparse_dyn.o +0 -0
  207. data/lua-hooks/ext/luajit/src/lj_crecord.c +1834 -0
  208. data/lua-hooks/ext/luajit/src/lj_crecord.h +38 -0
  209. data/lua-hooks/ext/luajit/src/lj_crecord.o +0 -0
  210. data/lua-hooks/ext/luajit/src/lj_crecord_dyn.o +0 -0
  211. data/lua-hooks/ext/luajit/src/lj_ctype.c +635 -0
  212. data/lua-hooks/ext/luajit/src/lj_ctype.h +461 -0
  213. data/lua-hooks/ext/luajit/src/lj_ctype.o +0 -0
  214. data/lua-hooks/ext/luajit/src/lj_ctype_dyn.o +0 -0
  215. data/lua-hooks/ext/luajit/src/lj_debug.c +699 -0
  216. data/lua-hooks/ext/luajit/src/lj_debug.h +65 -0
  217. data/lua-hooks/ext/luajit/src/lj_debug.o +0 -0
  218. data/lua-hooks/ext/luajit/src/lj_debug_dyn.o +0 -0
  219. data/lua-hooks/ext/luajit/src/lj_def.h +365 -0
  220. data/lua-hooks/ext/luajit/src/lj_dispatch.c +557 -0
  221. data/lua-hooks/ext/luajit/src/lj_dispatch.h +138 -0
  222. data/lua-hooks/ext/luajit/src/lj_dispatch.o +0 -0
  223. data/lua-hooks/ext/luajit/src/lj_dispatch_dyn.o +0 -0
  224. data/lua-hooks/ext/luajit/src/lj_emit_arm.h +356 -0
  225. data/lua-hooks/ext/luajit/src/lj_emit_mips.h +211 -0
  226. data/lua-hooks/ext/luajit/src/lj_emit_ppc.h +238 -0
  227. data/lua-hooks/ext/luajit/src/lj_emit_x86.h +462 -0
  228. data/lua-hooks/ext/luajit/src/lj_err.c +794 -0
  229. data/lua-hooks/ext/luajit/src/lj_err.h +41 -0
  230. data/lua-hooks/ext/luajit/src/lj_err.o +0 -0
  231. data/lua-hooks/ext/luajit/src/lj_err_dyn.o +0 -0
  232. data/lua-hooks/ext/luajit/src/lj_errmsg.h +190 -0
  233. data/lua-hooks/ext/luajit/src/lj_ff.h +18 -0
  234. data/lua-hooks/ext/luajit/src/lj_ffdef.h +209 -0
  235. data/lua-hooks/ext/luajit/src/lj_ffrecord.c +1247 -0
  236. data/lua-hooks/ext/luajit/src/lj_ffrecord.h +24 -0
  237. data/lua-hooks/ext/luajit/src/lj_ffrecord.o +0 -0
  238. data/lua-hooks/ext/luajit/src/lj_ffrecord_dyn.o +0 -0
  239. data/lua-hooks/ext/luajit/src/lj_folddef.h +1138 -0
  240. data/lua-hooks/ext/luajit/src/lj_frame.h +259 -0
  241. data/lua-hooks/ext/luajit/src/lj_func.c +185 -0
  242. data/lua-hooks/ext/luajit/src/lj_func.h +24 -0
  243. data/lua-hooks/ext/luajit/src/lj_func.o +0 -0
  244. data/lua-hooks/ext/luajit/src/lj_func_dyn.o +0 -0
  245. data/lua-hooks/ext/luajit/src/lj_gc.c +845 -0
  246. data/lua-hooks/ext/luajit/src/lj_gc.h +134 -0
  247. data/lua-hooks/ext/luajit/src/lj_gc.o +0 -0
  248. data/lua-hooks/ext/luajit/src/lj_gc_dyn.o +0 -0
  249. data/lua-hooks/ext/luajit/src/lj_gdbjit.c +787 -0
  250. data/lua-hooks/ext/luajit/src/lj_gdbjit.h +22 -0
  251. data/lua-hooks/ext/luajit/src/lj_gdbjit.o +0 -0
  252. data/lua-hooks/ext/luajit/src/lj_gdbjit_dyn.o +0 -0
  253. data/lua-hooks/ext/luajit/src/lj_ir.c +505 -0
  254. data/lua-hooks/ext/luajit/src/lj_ir.h +577 -0
  255. data/lua-hooks/ext/luajit/src/lj_ir.o +0 -0
  256. data/lua-hooks/ext/luajit/src/lj_ir_dyn.o +0 -0
  257. data/lua-hooks/ext/luajit/src/lj_ircall.h +321 -0
  258. data/lua-hooks/ext/luajit/src/lj_iropt.h +161 -0
  259. data/lua-hooks/ext/luajit/src/lj_jit.h +440 -0
  260. data/lua-hooks/ext/luajit/src/lj_lex.c +482 -0
  261. data/lua-hooks/ext/luajit/src/lj_lex.h +86 -0
  262. data/lua-hooks/ext/luajit/src/lj_lex.o +0 -0
  263. data/lua-hooks/ext/luajit/src/lj_lex_dyn.o +0 -0
  264. data/lua-hooks/ext/luajit/src/lj_lib.c +303 -0
  265. data/lua-hooks/ext/luajit/src/lj_lib.h +115 -0
  266. data/lua-hooks/ext/luajit/src/lj_lib.o +0 -0
  267. data/lua-hooks/ext/luajit/src/lj_lib_dyn.o +0 -0
  268. data/lua-hooks/ext/luajit/src/lj_libdef.h +414 -0
  269. data/lua-hooks/ext/luajit/src/lj_load.c +168 -0
  270. data/lua-hooks/ext/luajit/src/lj_load.o +0 -0
  271. data/lua-hooks/ext/luajit/src/lj_load_dyn.o +0 -0
  272. data/lua-hooks/ext/luajit/src/lj_mcode.c +386 -0
  273. data/lua-hooks/ext/luajit/src/lj_mcode.h +30 -0
  274. data/lua-hooks/ext/luajit/src/lj_mcode.o +0 -0
  275. data/lua-hooks/ext/luajit/src/lj_mcode_dyn.o +0 -0
  276. data/lua-hooks/ext/luajit/src/lj_meta.c +477 -0
  277. data/lua-hooks/ext/luajit/src/lj_meta.h +38 -0
  278. data/lua-hooks/ext/luajit/src/lj_meta.o +0 -0
  279. data/lua-hooks/ext/luajit/src/lj_meta_dyn.o +0 -0
  280. data/lua-hooks/ext/luajit/src/lj_obj.c +50 -0
  281. data/lua-hooks/ext/luajit/src/lj_obj.h +976 -0
  282. data/lua-hooks/ext/luajit/src/lj_obj.o +0 -0
  283. data/lua-hooks/ext/luajit/src/lj_obj_dyn.o +0 -0
  284. data/lua-hooks/ext/luajit/src/lj_opt_dce.c +78 -0
  285. data/lua-hooks/ext/luajit/src/lj_opt_dce.o +0 -0
  286. data/lua-hooks/ext/luajit/src/lj_opt_dce_dyn.o +0 -0
  287. data/lua-hooks/ext/luajit/src/lj_opt_fold.c +2488 -0
  288. data/lua-hooks/ext/luajit/src/lj_opt_fold.o +0 -0
  289. data/lua-hooks/ext/luajit/src/lj_opt_fold_dyn.o +0 -0
  290. data/lua-hooks/ext/luajit/src/lj_opt_loop.c +449 -0
  291. data/lua-hooks/ext/luajit/src/lj_opt_loop.o +0 -0
  292. data/lua-hooks/ext/luajit/src/lj_opt_loop_dyn.o +0 -0
  293. data/lua-hooks/ext/luajit/src/lj_opt_mem.c +935 -0
  294. data/lua-hooks/ext/luajit/src/lj_opt_mem.o +0 -0
  295. data/lua-hooks/ext/luajit/src/lj_opt_mem_dyn.o +0 -0
  296. data/lua-hooks/ext/luajit/src/lj_opt_narrow.c +652 -0
  297. data/lua-hooks/ext/luajit/src/lj_opt_narrow.o +0 -0
  298. data/lua-hooks/ext/luajit/src/lj_opt_narrow_dyn.o +0 -0
  299. data/lua-hooks/ext/luajit/src/lj_opt_sink.c +245 -0
  300. data/lua-hooks/ext/luajit/src/lj_opt_sink.o +0 -0
  301. data/lua-hooks/ext/luajit/src/lj_opt_sink_dyn.o +0 -0
  302. data/lua-hooks/ext/luajit/src/lj_opt_split.c +856 -0
  303. data/lua-hooks/ext/luajit/src/lj_opt_split.o +0 -0
  304. data/lua-hooks/ext/luajit/src/lj_opt_split_dyn.o +0 -0
  305. data/lua-hooks/ext/luajit/src/lj_parse.c +2725 -0
  306. data/lua-hooks/ext/luajit/src/lj_parse.h +18 -0
  307. data/lua-hooks/ext/luajit/src/lj_parse.o +0 -0
  308. data/lua-hooks/ext/luajit/src/lj_parse_dyn.o +0 -0
  309. data/lua-hooks/ext/luajit/src/lj_profile.c +368 -0
  310. data/lua-hooks/ext/luajit/src/lj_profile.h +21 -0
  311. data/lua-hooks/ext/luajit/src/lj_profile.o +0 -0
  312. data/lua-hooks/ext/luajit/src/lj_profile_dyn.o +0 -0
  313. data/lua-hooks/ext/luajit/src/lj_recdef.h +270 -0
  314. data/lua-hooks/ext/luajit/src/lj_record.c +2554 -0
  315. data/lua-hooks/ext/luajit/src/lj_record.h +45 -0
  316. data/lua-hooks/ext/luajit/src/lj_record.o +0 -0
  317. data/lua-hooks/ext/luajit/src/lj_record_dyn.o +0 -0
  318. data/lua-hooks/ext/luajit/src/lj_snap.c +870 -0
  319. data/lua-hooks/ext/luajit/src/lj_snap.h +34 -0
  320. data/lua-hooks/ext/luajit/src/lj_snap.o +0 -0
  321. data/lua-hooks/ext/luajit/src/lj_snap_dyn.o +0 -0
  322. data/lua-hooks/ext/luajit/src/lj_state.c +300 -0
  323. data/lua-hooks/ext/luajit/src/lj_state.h +35 -0
  324. data/lua-hooks/ext/luajit/src/lj_state.o +0 -0
  325. data/lua-hooks/ext/luajit/src/lj_state_dyn.o +0 -0
  326. data/lua-hooks/ext/luajit/src/lj_str.c +197 -0
  327. data/lua-hooks/ext/luajit/src/lj_str.h +27 -0
  328. data/lua-hooks/ext/luajit/src/lj_str.o +0 -0
  329. data/lua-hooks/ext/luajit/src/lj_str_dyn.o +0 -0
  330. data/lua-hooks/ext/luajit/src/lj_strfmt.c +554 -0
  331. data/lua-hooks/ext/luajit/src/lj_strfmt.h +125 -0
  332. data/lua-hooks/ext/luajit/src/lj_strfmt.o +0 -0
  333. data/lua-hooks/ext/luajit/src/lj_strfmt_dyn.o +0 -0
  334. data/lua-hooks/ext/luajit/src/lj_strscan.c +547 -0
  335. data/lua-hooks/ext/luajit/src/lj_strscan.h +39 -0
  336. data/lua-hooks/ext/luajit/src/lj_strscan.o +0 -0
  337. data/lua-hooks/ext/luajit/src/lj_strscan_dyn.o +0 -0
  338. data/lua-hooks/ext/luajit/src/lj_tab.c +666 -0
  339. data/lua-hooks/ext/luajit/src/lj_tab.h +73 -0
  340. data/lua-hooks/ext/luajit/src/lj_tab.o +0 -0
  341. data/lua-hooks/ext/luajit/src/lj_tab_dyn.o +0 -0
  342. data/lua-hooks/ext/luajit/src/lj_target.h +164 -0
  343. data/lua-hooks/ext/luajit/src/lj_target_arm.h +270 -0
  344. data/lua-hooks/ext/luajit/src/lj_target_arm64.h +97 -0
  345. data/lua-hooks/ext/luajit/src/lj_target_mips.h +260 -0
  346. data/lua-hooks/ext/luajit/src/lj_target_ppc.h +280 -0
  347. data/lua-hooks/ext/luajit/src/lj_target_x86.h +345 -0
  348. data/lua-hooks/ext/luajit/src/lj_trace.c +859 -0
  349. data/lua-hooks/ext/luajit/src/lj_trace.h +54 -0
  350. data/lua-hooks/ext/luajit/src/lj_trace.o +0 -0
  351. data/lua-hooks/ext/luajit/src/lj_trace_dyn.o +0 -0
  352. data/lua-hooks/ext/luajit/src/lj_traceerr.h +63 -0
  353. data/lua-hooks/ext/luajit/src/lj_udata.c +34 -0
  354. data/lua-hooks/ext/luajit/src/lj_udata.h +14 -0
  355. data/lua-hooks/ext/luajit/src/lj_udata.o +0 -0
  356. data/lua-hooks/ext/luajit/src/lj_udata_dyn.o +0 -0
  357. data/lua-hooks/ext/luajit/src/lj_vm.S +2730 -0
  358. data/lua-hooks/ext/luajit/src/lj_vm.h +114 -0
  359. data/lua-hooks/ext/luajit/src/lj_vm.o +0 -0
  360. data/lua-hooks/ext/luajit/src/lj_vm_dyn.o +0 -0
  361. data/lua-hooks/ext/luajit/src/lj_vmevent.c +58 -0
  362. data/lua-hooks/ext/luajit/src/lj_vmevent.h +59 -0
  363. data/lua-hooks/ext/luajit/src/lj_vmevent.o +0 -0
  364. data/lua-hooks/ext/luajit/src/lj_vmevent_dyn.o +0 -0
  365. data/lua-hooks/ext/luajit/src/lj_vmmath.c +152 -0
  366. data/lua-hooks/ext/luajit/src/lj_vmmath.o +0 -0
  367. data/lua-hooks/ext/luajit/src/lj_vmmath_dyn.o +0 -0
  368. data/lua-hooks/ext/luajit/src/ljamalg.c +96 -0
  369. data/lua-hooks/ext/{lua → luajit/src}/lua.h +12 -7
  370. data/lua-hooks/ext/luajit/src/lua.hpp +9 -0
  371. data/lua-hooks/ext/luajit/src/luaconf.h +156 -0
  372. data/lua-hooks/ext/luajit/src/luajit +0 -0
  373. data/lua-hooks/ext/luajit/src/luajit.c +570 -0
  374. data/lua-hooks/ext/luajit/src/luajit.h +79 -0
  375. data/lua-hooks/ext/luajit/src/luajit.o +0 -0
  376. data/lua-hooks/ext/luajit/src/lualib.h +43 -0
  377. data/lua-hooks/ext/luajit/src/msvcbuild.bat +114 -0
  378. data/lua-hooks/ext/luajit/src/ps4build.bat +103 -0
  379. data/lua-hooks/ext/luajit/src/psvitabuild.bat +93 -0
  380. data/lua-hooks/ext/luajit/src/vm_arm.dasc +4585 -0
  381. data/lua-hooks/ext/luajit/src/vm_arm64.dasc +3764 -0
  382. data/lua-hooks/ext/luajit/src/vm_mips.dasc +4355 -0
  383. data/lua-hooks/ext/luajit/src/vm_ppc.dasc +5252 -0
  384. data/lua-hooks/ext/luajit/src/vm_x64.dasc +4902 -0
  385. data/lua-hooks/ext/luajit/src/vm_x86.dasc +5710 -0
  386. data/lua-hooks/ext/luajit/src/xb1build.bat +101 -0
  387. data/lua-hooks/ext/luajit/src/xedkbuild.bat +92 -0
  388. data/lua-hooks/ext/luautf8/lutf8lib.c +3 -3
  389. data/lua-hooks/lib/boot.lua +37 -2
  390. metadata +372 -69
  391. data/lua-hooks/ext/bitop/README +0 -22
  392. data/lua-hooks/ext/bitop/bit.c +0 -189
  393. data/lua-hooks/ext/extconf.rb +0 -38
  394. data/lua-hooks/ext/lua/COPYRIGHT +0 -34
  395. data/lua-hooks/ext/lua/lapi.c +0 -1087
  396. data/lua-hooks/ext/lua/lapi.h +0 -16
  397. data/lua-hooks/ext/lua/lauxlib.c +0 -652
  398. data/lua-hooks/ext/lua/lbaselib.c +0 -659
  399. data/lua-hooks/ext/lua/lcode.c +0 -831
  400. data/lua-hooks/ext/lua/lcode.h +0 -76
  401. data/lua-hooks/ext/lua/ldblib.c +0 -398
  402. data/lua-hooks/ext/lua/ldebug.c +0 -638
  403. data/lua-hooks/ext/lua/ldebug.h +0 -33
  404. data/lua-hooks/ext/lua/ldo.c +0 -519
  405. data/lua-hooks/ext/lua/ldo.h +0 -57
  406. data/lua-hooks/ext/lua/ldump.c +0 -164
  407. data/lua-hooks/ext/lua/lfunc.c +0 -174
  408. data/lua-hooks/ext/lua/lfunc.h +0 -34
  409. data/lua-hooks/ext/lua/lgc.c +0 -710
  410. data/lua-hooks/ext/lua/lgc.h +0 -110
  411. data/lua-hooks/ext/lua/linit.c +0 -38
  412. data/lua-hooks/ext/lua/liolib.c +0 -556
  413. data/lua-hooks/ext/lua/llex.c +0 -463
  414. data/lua-hooks/ext/lua/llex.h +0 -81
  415. data/lua-hooks/ext/lua/llimits.h +0 -128
  416. data/lua-hooks/ext/lua/lmathlib.c +0 -263
  417. data/lua-hooks/ext/lua/lmem.c +0 -86
  418. data/lua-hooks/ext/lua/lmem.h +0 -49
  419. data/lua-hooks/ext/lua/loadlib.c +0 -705
  420. data/lua-hooks/ext/lua/loadlib_rel.c +0 -760
  421. data/lua-hooks/ext/lua/lobject.c +0 -214
  422. data/lua-hooks/ext/lua/lobject.h +0 -381
  423. data/lua-hooks/ext/lua/lopcodes.c +0 -102
  424. data/lua-hooks/ext/lua/lopcodes.h +0 -268
  425. data/lua-hooks/ext/lua/loslib.c +0 -243
  426. data/lua-hooks/ext/lua/lparser.c +0 -1339
  427. data/lua-hooks/ext/lua/lparser.h +0 -82
  428. data/lua-hooks/ext/lua/lstate.c +0 -214
  429. data/lua-hooks/ext/lua/lstate.h +0 -169
  430. data/lua-hooks/ext/lua/lstring.c +0 -111
  431. data/lua-hooks/ext/lua/lstring.h +0 -31
  432. data/lua-hooks/ext/lua/lstrlib.c +0 -871
  433. data/lua-hooks/ext/lua/ltable.c +0 -588
  434. data/lua-hooks/ext/lua/ltable.h +0 -40
  435. data/lua-hooks/ext/lua/ltablib.c +0 -287
  436. data/lua-hooks/ext/lua/ltm.c +0 -75
  437. data/lua-hooks/ext/lua/ltm.h +0 -54
  438. data/lua-hooks/ext/lua/lua.c +0 -392
  439. data/lua-hooks/ext/lua/lua.def +0 -131
  440. data/lua-hooks/ext/lua/lua.rc +0 -28
  441. data/lua-hooks/ext/lua/lua_dll.rc +0 -26
  442. data/lua-hooks/ext/lua/luac.c +0 -200
  443. data/lua-hooks/ext/lua/luac.rc +0 -1
  444. data/lua-hooks/ext/lua/luaconf.h +0 -763
  445. data/lua-hooks/ext/lua/luaconf.h.in +0 -724
  446. data/lua-hooks/ext/lua/luaconf.h.orig +0 -763
  447. data/lua-hooks/ext/lua/lualib.h +0 -53
  448. data/lua-hooks/ext/lua/lundump.c +0 -227
  449. data/lua-hooks/ext/lua/lundump.h +0 -36
  450. data/lua-hooks/ext/lua/lvm.c +0 -767
  451. data/lua-hooks/ext/lua/lvm.h +0 -36
  452. data/lua-hooks/ext/lua/lzio.c +0 -82
  453. data/lua-hooks/ext/lua/lzio.h +0 -67
  454. data/lua-hooks/ext/lua/print.c +0 -227
@@ -0,0 +1,2554 @@
1
+ /*
2
+ ** Trace recorder (bytecode -> SSA IR).
3
+ ** Copyright (C) 2005-2015 Mike Pall. See Copyright Notice in luajit.h
4
+ */
5
+
6
+ #define lj_record_c
7
+ #define LUA_CORE
8
+
9
+ #include "lj_obj.h"
10
+
11
+ #if LJ_HASJIT
12
+
13
+ #include "lj_err.h"
14
+ #include "lj_str.h"
15
+ #include "lj_tab.h"
16
+ #include "lj_meta.h"
17
+ #include "lj_frame.h"
18
+ #if LJ_HASFFI
19
+ #include "lj_ctype.h"
20
+ #endif
21
+ #include "lj_bc.h"
22
+ #include "lj_ff.h"
23
+ #if LJ_HASPROFILE
24
+ #include "lj_debug.h"
25
+ #endif
26
+ #include "lj_ir.h"
27
+ #include "lj_jit.h"
28
+ #include "lj_ircall.h"
29
+ #include "lj_iropt.h"
30
+ #include "lj_trace.h"
31
+ #include "lj_record.h"
32
+ #include "lj_ffrecord.h"
33
+ #include "lj_snap.h"
34
+ #include "lj_dispatch.h"
35
+ #include "lj_vm.h"
36
+
37
+ /* Some local macros to save typing. Undef'd at the end. */
38
+ #define IR(ref) (&J->cur.ir[(ref)])
39
+
40
+ /* Pass IR on to next optimization in chain (FOLD). */
41
+ #define emitir(ot, a, b) (lj_ir_set(J, (ot), (a), (b)), lj_opt_fold(J))
42
+
43
+ /* Emit raw IR without passing through optimizations. */
44
+ #define emitir_raw(ot, a, b) (lj_ir_set(J, (ot), (a), (b)), lj_ir_emit(J))
45
+
46
+ /* -- Sanity checks ------------------------------------------------------- */
47
+
48
+ #ifdef LUA_USE_ASSERT
49
+ /* Sanity check the whole IR -- sloooow. */
50
+ static void rec_check_ir(jit_State *J)
51
+ {
52
+ IRRef i, nins = J->cur.nins, nk = J->cur.nk;
53
+ lua_assert(nk <= REF_BIAS && nins >= REF_BIAS && nins < 65536);
54
+ for (i = nins-1; i >= nk; i--) {
55
+ IRIns *ir = IR(i);
56
+ uint32_t mode = lj_ir_mode[ir->o];
57
+ IRRef op1 = ir->op1;
58
+ IRRef op2 = ir->op2;
59
+ switch (irm_op1(mode)) {
60
+ case IRMnone: lua_assert(op1 == 0); break;
61
+ case IRMref: lua_assert(op1 >= nk);
62
+ lua_assert(i >= REF_BIAS ? op1 < i : op1 > i); break;
63
+ case IRMlit: break;
64
+ case IRMcst: lua_assert(i < REF_BIAS); continue;
65
+ }
66
+ switch (irm_op2(mode)) {
67
+ case IRMnone: lua_assert(op2 == 0); break;
68
+ case IRMref: lua_assert(op2 >= nk);
69
+ lua_assert(i >= REF_BIAS ? op2 < i : op2 > i); break;
70
+ case IRMlit: break;
71
+ case IRMcst: lua_assert(0); break;
72
+ }
73
+ if (ir->prev) {
74
+ lua_assert(ir->prev >= nk);
75
+ lua_assert(i >= REF_BIAS ? ir->prev < i : ir->prev > i);
76
+ lua_assert(ir->o == IR_NOP || IR(ir->prev)->o == ir->o);
77
+ }
78
+ }
79
+ }
80
+
81
+ /* Compare stack slots and frames of the recorder and the VM. */
82
+ static void rec_check_slots(jit_State *J)
83
+ {
84
+ BCReg s, nslots = J->baseslot + J->maxslot;
85
+ int32_t depth = 0;
86
+ cTValue *base = J->L->base - J->baseslot;
87
+ lua_assert(J->baseslot >= 1 && J->baseslot < LJ_MAX_JSLOTS);
88
+ lua_assert(J->baseslot == 1 || (J->slot[J->baseslot-1] & TREF_FRAME));
89
+ lua_assert(nslots < LJ_MAX_JSLOTS);
90
+ for (s = 0; s < nslots; s++) {
91
+ TRef tr = J->slot[s];
92
+ if (tr) {
93
+ cTValue *tv = &base[s];
94
+ IRRef ref = tref_ref(tr);
95
+ IRIns *ir;
96
+ lua_assert(ref >= J->cur.nk && ref < J->cur.nins);
97
+ ir = IR(ref);
98
+ lua_assert(irt_t(ir->t) == tref_t(tr));
99
+ if (s == 0) {
100
+ lua_assert(tref_isfunc(tr));
101
+ } else if ((tr & TREF_FRAME)) {
102
+ GCfunc *fn = gco2func(frame_gc(tv));
103
+ BCReg delta = (BCReg)(tv - frame_prev(tv));
104
+ lua_assert(tref_isfunc(tr));
105
+ if (tref_isk(tr)) lua_assert(fn == ir_kfunc(ir));
106
+ lua_assert(s > delta ? (J->slot[s-delta] & TREF_FRAME) : (s == delta));
107
+ depth++;
108
+ } else if ((tr & TREF_CONT)) {
109
+ lua_assert(ir_kptr(ir) == gcrefp(tv->gcr, void));
110
+ lua_assert((J->slot[s+1] & TREF_FRAME));
111
+ depth++;
112
+ } else {
113
+ if (tvisnumber(tv))
114
+ lua_assert(tref_isnumber(tr)); /* Could be IRT_INT etc., too. */
115
+ else
116
+ lua_assert(itype2irt(tv) == tref_type(tr));
117
+ if (tref_isk(tr)) { /* Compare constants. */
118
+ TValue tvk;
119
+ lj_ir_kvalue(J->L, &tvk, ir);
120
+ if (!(tvisnum(&tvk) && tvisnan(&tvk)))
121
+ lua_assert(lj_obj_equal(tv, &tvk));
122
+ else
123
+ lua_assert(tvisnum(tv) && tvisnan(tv));
124
+ }
125
+ }
126
+ }
127
+ }
128
+ lua_assert(J->framedepth == depth);
129
+ }
130
+ #endif
131
+
132
+ /* -- Type handling and specialization ------------------------------------ */
133
+
134
+ /* Note: these functions return tagged references (TRef). */
135
+
136
+ /* Specialize a slot to a specific type. Note: slot can be negative! */
137
+ static TRef sloadt(jit_State *J, int32_t slot, IRType t, int mode)
138
+ {
139
+ /* Caller may set IRT_GUARD in t. */
140
+ TRef ref = emitir_raw(IRT(IR_SLOAD, t), (int32_t)J->baseslot+slot, mode);
141
+ J->base[slot] = ref;
142
+ return ref;
143
+ }
144
+
145
+ /* Specialize a slot to the runtime type. Note: slot can be negative! */
146
+ static TRef sload(jit_State *J, int32_t slot)
147
+ {
148
+ IRType t = itype2irt(&J->L->base[slot]);
149
+ TRef ref = emitir_raw(IRTG(IR_SLOAD, t), (int32_t)J->baseslot+slot,
150
+ IRSLOAD_TYPECHECK);
151
+ if (irtype_ispri(t)) ref = TREF_PRI(t); /* Canonicalize primitive refs. */
152
+ J->base[slot] = ref;
153
+ return ref;
154
+ }
155
+
156
+ /* Get TRef from slot. Load slot and specialize if not done already. */
157
+ #define getslot(J, s) (J->base[(s)] ? J->base[(s)] : sload(J, (int32_t)(s)))
158
+
159
+ /* Get TRef for current function. */
160
+ static TRef getcurrf(jit_State *J)
161
+ {
162
+ if (J->base[-1])
163
+ return J->base[-1];
164
+ lua_assert(J->baseslot == 1);
165
+ return sloadt(J, -1, IRT_FUNC, IRSLOAD_READONLY);
166
+ }
167
+
168
+ /* Compare for raw object equality.
169
+ ** Returns 0 if the objects are the same.
170
+ ** Returns 1 if they are different, but the same type.
171
+ ** Returns 2 for two different types.
172
+ ** Comparisons between primitives always return 1 -- no caller cares about it.
173
+ */
174
+ int lj_record_objcmp(jit_State *J, TRef a, TRef b, cTValue *av, cTValue *bv)
175
+ {
176
+ int diff = !lj_obj_equal(av, bv);
177
+ if (!tref_isk2(a, b)) { /* Shortcut, also handles primitives. */
178
+ IRType ta = tref_isinteger(a) ? IRT_INT : tref_type(a);
179
+ IRType tb = tref_isinteger(b) ? IRT_INT : tref_type(b);
180
+ if (ta != tb) {
181
+ /* Widen mixed number/int comparisons to number/number comparison. */
182
+ if (ta == IRT_INT && tb == IRT_NUM) {
183
+ a = emitir(IRTN(IR_CONV), a, IRCONV_NUM_INT);
184
+ ta = IRT_NUM;
185
+ } else if (ta == IRT_NUM && tb == IRT_INT) {
186
+ b = emitir(IRTN(IR_CONV), b, IRCONV_NUM_INT);
187
+ } else {
188
+ return 2; /* Two different types are never equal. */
189
+ }
190
+ }
191
+ emitir(IRTG(diff ? IR_NE : IR_EQ, ta), a, b);
192
+ }
193
+ return diff;
194
+ }
195
+
196
+ /* Constify a value. Returns 0 for non-representable object types. */
197
+ TRef lj_record_constify(jit_State *J, cTValue *o)
198
+ {
199
+ if (tvisgcv(o))
200
+ return lj_ir_kgc(J, gcV(o), itype2irt(o));
201
+ else if (tvisint(o))
202
+ return lj_ir_kint(J, intV(o));
203
+ else if (tvisnum(o))
204
+ return lj_ir_knumint(J, numV(o));
205
+ else if (tvisbool(o))
206
+ return TREF_PRI(itype2irt(o));
207
+ else
208
+ return 0; /* Can't represent lightuserdata (pointless). */
209
+ }
210
+
211
+ /* -- Record loop ops ----------------------------------------------------- */
212
+
213
+ /* Loop event. */
214
+ typedef enum {
215
+ LOOPEV_LEAVE, /* Loop is left or not entered. */
216
+ LOOPEV_ENTERLO, /* Loop is entered with a low iteration count left. */
217
+ LOOPEV_ENTER /* Loop is entered. */
218
+ } LoopEvent;
219
+
220
+ /* Canonicalize slots: convert integers to numbers. */
221
+ static void canonicalize_slots(jit_State *J)
222
+ {
223
+ BCReg s;
224
+ if (LJ_DUALNUM) return;
225
+ for (s = J->baseslot+J->maxslot-1; s >= 1; s--) {
226
+ TRef tr = J->slot[s];
227
+ if (tref_isinteger(tr)) {
228
+ IRIns *ir = IR(tref_ref(tr));
229
+ if (!(ir->o == IR_SLOAD && (ir->op2 & IRSLOAD_READONLY)))
230
+ J->slot[s] = emitir(IRTN(IR_CONV), tr, IRCONV_NUM_INT);
231
+ }
232
+ }
233
+ }
234
+
235
+ /* Stop recording. */
236
+ void lj_record_stop(jit_State *J, TraceLink linktype, TraceNo lnk)
237
+ {
238
+ #ifdef LUAJIT_ENABLE_TABLE_BUMP
239
+ if (J->retryrec)
240
+ lj_trace_err(J, LJ_TRERR_RETRY);
241
+ #endif
242
+ lj_trace_end(J);
243
+ J->cur.linktype = (uint8_t)linktype;
244
+ J->cur.link = (uint16_t)lnk;
245
+ /* Looping back at the same stack level? */
246
+ if (lnk == J->cur.traceno && J->framedepth + J->retdepth == 0) {
247
+ if ((J->flags & JIT_F_OPT_LOOP)) /* Shall we try to create a loop? */
248
+ goto nocanon; /* Do not canonicalize or we lose the narrowing. */
249
+ if (J->cur.root) /* Otherwise ensure we always link to the root trace. */
250
+ J->cur.link = J->cur.root;
251
+ }
252
+ canonicalize_slots(J);
253
+ nocanon:
254
+ /* Note: all loop ops must set J->pc to the following instruction! */
255
+ lj_snap_add(J); /* Add loop snapshot. */
256
+ J->needsnap = 0;
257
+ J->mergesnap = 1; /* In case recording continues. */
258
+ }
259
+
260
+ /* Search bytecode backwards for a int/num constant slot initializer. */
261
+ static TRef find_kinit(jit_State *J, const BCIns *endpc, BCReg slot, IRType t)
262
+ {
263
+ /* This algorithm is rather simplistic and assumes quite a bit about
264
+ ** how the bytecode is generated. It works fine for FORI initializers,
265
+ ** but it won't necessarily work in other cases (e.g. iterator arguments).
266
+ ** It doesn't do anything fancy, either (like backpropagating MOVs).
267
+ */
268
+ const BCIns *pc, *startpc = proto_bc(J->pt);
269
+ for (pc = endpc-1; pc > startpc; pc--) {
270
+ BCIns ins = *pc;
271
+ BCOp op = bc_op(ins);
272
+ /* First try to find the last instruction that stores to this slot. */
273
+ if (bcmode_a(op) == BCMbase && bc_a(ins) <= slot) {
274
+ return 0; /* Multiple results, e.g. from a CALL or KNIL. */
275
+ } else if (bcmode_a(op) == BCMdst && bc_a(ins) == slot) {
276
+ if (op == BC_KSHORT || op == BC_KNUM) { /* Found const. initializer. */
277
+ /* Now try to verify there's no forward jump across it. */
278
+ const BCIns *kpc = pc;
279
+ for (; pc > startpc; pc--)
280
+ if (bc_op(*pc) == BC_JMP) {
281
+ const BCIns *target = pc+bc_j(*pc)+1;
282
+ if (target > kpc && target <= endpc)
283
+ return 0; /* Conditional assignment. */
284
+ }
285
+ if (op == BC_KSHORT) {
286
+ int32_t k = (int32_t)(int16_t)bc_d(ins);
287
+ return t == IRT_INT ? lj_ir_kint(J, k) : lj_ir_knum(J, (lua_Number)k);
288
+ } else {
289
+ cTValue *tv = proto_knumtv(J->pt, bc_d(ins));
290
+ if (t == IRT_INT) {
291
+ int32_t k = numberVint(tv);
292
+ if (tvisint(tv) || numV(tv) == (lua_Number)k) /* -0 is ok here. */
293
+ return lj_ir_kint(J, k);
294
+ return 0; /* Type mismatch. */
295
+ } else {
296
+ return lj_ir_knum(J, numberVnum(tv));
297
+ }
298
+ }
299
+ }
300
+ return 0; /* Non-constant initializer. */
301
+ }
302
+ }
303
+ return 0; /* No assignment to this slot found? */
304
+ }
305
+
306
+ /* Load and optionally convert a FORI argument from a slot. */
307
+ static TRef fori_load(jit_State *J, BCReg slot, IRType t, int mode)
308
+ {
309
+ int conv = (tvisint(&J->L->base[slot]) != (t==IRT_INT)) ? IRSLOAD_CONVERT : 0;
310
+ return sloadt(J, (int32_t)slot,
311
+ t + (((mode & IRSLOAD_TYPECHECK) ||
312
+ (conv && t == IRT_INT && !(mode >> 16))) ?
313
+ IRT_GUARD : 0),
314
+ mode + conv);
315
+ }
316
+
317
+ /* Peek before FORI to find a const initializer. Otherwise load from slot. */
318
+ static TRef fori_arg(jit_State *J, const BCIns *fori, BCReg slot,
319
+ IRType t, int mode)
320
+ {
321
+ TRef tr = J->base[slot];
322
+ if (!tr) {
323
+ tr = find_kinit(J, fori, slot, t);
324
+ if (!tr)
325
+ tr = fori_load(J, slot, t, mode);
326
+ }
327
+ return tr;
328
+ }
329
+
330
+ /* Return the direction of the FOR loop iterator.
331
+ ** It's important to exactly reproduce the semantics of the interpreter.
332
+ */
333
+ static int rec_for_direction(cTValue *o)
334
+ {
335
+ return (tvisint(o) ? intV(o) : (int32_t)o->u32.hi) >= 0;
336
+ }
337
+
338
+ /* Simulate the runtime behavior of the FOR loop iterator. */
339
+ static LoopEvent rec_for_iter(IROp *op, cTValue *o, int isforl)
340
+ {
341
+ lua_Number stopv = numberVnum(&o[FORL_STOP]);
342
+ lua_Number idxv = numberVnum(&o[FORL_IDX]);
343
+ lua_Number stepv = numberVnum(&o[FORL_STEP]);
344
+ if (isforl)
345
+ idxv += stepv;
346
+ if (rec_for_direction(&o[FORL_STEP])) {
347
+ if (idxv <= stopv) {
348
+ *op = IR_LE;
349
+ return idxv + 2*stepv > stopv ? LOOPEV_ENTERLO : LOOPEV_ENTER;
350
+ }
351
+ *op = IR_GT; return LOOPEV_LEAVE;
352
+ } else {
353
+ if (stopv <= idxv) {
354
+ *op = IR_GE;
355
+ return idxv + 2*stepv < stopv ? LOOPEV_ENTERLO : LOOPEV_ENTER;
356
+ }
357
+ *op = IR_LT; return LOOPEV_LEAVE;
358
+ }
359
+ }
360
+
361
+ /* Record checks for FOR loop overflow and step direction. */
362
+ static void rec_for_check(jit_State *J, IRType t, int dir,
363
+ TRef stop, TRef step, int init)
364
+ {
365
+ if (!tref_isk(step)) {
366
+ /* Non-constant step: need a guard for the direction. */
367
+ TRef zero = (t == IRT_INT) ? lj_ir_kint(J, 0) : lj_ir_knum_zero(J);
368
+ emitir(IRTG(dir ? IR_GE : IR_LT, t), step, zero);
369
+ /* Add hoistable overflow checks for a narrowed FORL index. */
370
+ if (init && t == IRT_INT) {
371
+ if (tref_isk(stop)) {
372
+ /* Constant stop: optimize check away or to a range check for step. */
373
+ int32_t k = IR(tref_ref(stop))->i;
374
+ if (dir) {
375
+ if (k > 0)
376
+ emitir(IRTGI(IR_LE), step, lj_ir_kint(J, (int32_t)0x7fffffff-k));
377
+ } else {
378
+ if (k < 0)
379
+ emitir(IRTGI(IR_GE), step, lj_ir_kint(J, (int32_t)0x80000000-k));
380
+ }
381
+ } else {
382
+ /* Stop+step variable: need full overflow check. */
383
+ TRef tr = emitir(IRTGI(IR_ADDOV), step, stop);
384
+ emitir(IRTI(IR_USE), tr, 0); /* ADDOV is weak. Avoid dead result. */
385
+ }
386
+ }
387
+ } else if (init && t == IRT_INT && !tref_isk(stop)) {
388
+ /* Constant step: optimize overflow check to a range check for stop. */
389
+ int32_t k = IR(tref_ref(step))->i;
390
+ k = (int32_t)(dir ? 0x7fffffff : 0x80000000) - k;
391
+ emitir(IRTGI(dir ? IR_LE : IR_GE), stop, lj_ir_kint(J, k));
392
+ }
393
+ }
394
+
395
+ /* Record a FORL instruction. */
396
+ static void rec_for_loop(jit_State *J, const BCIns *fori, ScEvEntry *scev,
397
+ int init)
398
+ {
399
+ BCReg ra = bc_a(*fori);
400
+ cTValue *tv = &J->L->base[ra];
401
+ TRef idx = J->base[ra+FORL_IDX];
402
+ IRType t = idx ? tref_type(idx) :
403
+ (init || LJ_DUALNUM) ? lj_opt_narrow_forl(J, tv) : IRT_NUM;
404
+ int mode = IRSLOAD_INHERIT +
405
+ ((!LJ_DUALNUM || tvisint(tv) == (t == IRT_INT)) ? IRSLOAD_READONLY : 0);
406
+ TRef stop = fori_arg(J, fori, ra+FORL_STOP, t, mode);
407
+ TRef step = fori_arg(J, fori, ra+FORL_STEP, t, mode);
408
+ int tc, dir = rec_for_direction(&tv[FORL_STEP]);
409
+ lua_assert(bc_op(*fori) == BC_FORI || bc_op(*fori) == BC_JFORI);
410
+ scev->t.irt = t;
411
+ scev->dir = dir;
412
+ scev->stop = tref_ref(stop);
413
+ scev->step = tref_ref(step);
414
+ rec_for_check(J, t, dir, stop, step, init);
415
+ scev->start = tref_ref(find_kinit(J, fori, ra+FORL_IDX, IRT_INT));
416
+ tc = (LJ_DUALNUM &&
417
+ !(scev->start && irref_isk(scev->stop) && irref_isk(scev->step) &&
418
+ tvisint(&tv[FORL_IDX]) == (t == IRT_INT))) ?
419
+ IRSLOAD_TYPECHECK : 0;
420
+ if (tc) {
421
+ J->base[ra+FORL_STOP] = stop;
422
+ J->base[ra+FORL_STEP] = step;
423
+ }
424
+ if (!idx)
425
+ idx = fori_load(J, ra+FORL_IDX, t,
426
+ IRSLOAD_INHERIT + tc + (J->scev.start << 16));
427
+ if (!init)
428
+ J->base[ra+FORL_IDX] = idx = emitir(IRT(IR_ADD, t), idx, step);
429
+ J->base[ra+FORL_EXT] = idx;
430
+ scev->idx = tref_ref(idx);
431
+ setmref(scev->pc, fori);
432
+ J->maxslot = ra+FORL_EXT+1;
433
+ }
434
+
435
+ /* Record FORL/JFORL or FORI/JFORI. */
436
+ static LoopEvent rec_for(jit_State *J, const BCIns *fori, int isforl)
437
+ {
438
+ BCReg ra = bc_a(*fori);
439
+ TValue *tv = &J->L->base[ra];
440
+ TRef *tr = &J->base[ra];
441
+ IROp op;
442
+ LoopEvent ev;
443
+ TRef stop;
444
+ IRType t;
445
+ if (isforl) { /* Handle FORL/JFORL opcodes. */
446
+ TRef idx = tr[FORL_IDX];
447
+ if (mref(J->scev.pc, const BCIns) == fori && tref_ref(idx) == J->scev.idx) {
448
+ t = J->scev.t.irt;
449
+ stop = J->scev.stop;
450
+ idx = emitir(IRT(IR_ADD, t), idx, J->scev.step);
451
+ tr[FORL_EXT] = tr[FORL_IDX] = idx;
452
+ } else {
453
+ ScEvEntry scev;
454
+ rec_for_loop(J, fori, &scev, 0);
455
+ t = scev.t.irt;
456
+ stop = scev.stop;
457
+ }
458
+ } else { /* Handle FORI/JFORI opcodes. */
459
+ BCReg i;
460
+ lj_meta_for(J->L, tv);
461
+ t = (LJ_DUALNUM || tref_isint(tr[FORL_IDX])) ? lj_opt_narrow_forl(J, tv) :
462
+ IRT_NUM;
463
+ for (i = FORL_IDX; i <= FORL_STEP; i++) {
464
+ if (!tr[i]) sload(J, ra+i);
465
+ lua_assert(tref_isnumber_str(tr[i]));
466
+ if (tref_isstr(tr[i]))
467
+ tr[i] = emitir(IRTG(IR_STRTO, IRT_NUM), tr[i], 0);
468
+ if (t == IRT_INT) {
469
+ if (!tref_isinteger(tr[i]))
470
+ tr[i] = emitir(IRTGI(IR_CONV), tr[i], IRCONV_INT_NUM|IRCONV_CHECK);
471
+ } else {
472
+ if (!tref_isnum(tr[i]))
473
+ tr[i] = emitir(IRTN(IR_CONV), tr[i], IRCONV_NUM_INT);
474
+ }
475
+ }
476
+ tr[FORL_EXT] = tr[FORL_IDX];
477
+ stop = tr[FORL_STOP];
478
+ rec_for_check(J, t, rec_for_direction(&tv[FORL_STEP]),
479
+ stop, tr[FORL_STEP], 1);
480
+ }
481
+
482
+ ev = rec_for_iter(&op, tv, isforl);
483
+ if (ev == LOOPEV_LEAVE) {
484
+ J->maxslot = ra+FORL_EXT+1;
485
+ J->pc = fori+1;
486
+ } else {
487
+ J->maxslot = ra;
488
+ J->pc = fori+bc_j(*fori)+1;
489
+ }
490
+ lj_snap_add(J);
491
+
492
+ emitir(IRTG(op, t), tr[FORL_IDX], stop);
493
+
494
+ if (ev == LOOPEV_LEAVE) {
495
+ J->maxslot = ra;
496
+ J->pc = fori+bc_j(*fori)+1;
497
+ } else {
498
+ J->maxslot = ra+FORL_EXT+1;
499
+ J->pc = fori+1;
500
+ }
501
+ J->needsnap = 1;
502
+ return ev;
503
+ }
504
+
505
+ /* Record ITERL/JITERL. */
506
+ static LoopEvent rec_iterl(jit_State *J, const BCIns iterins)
507
+ {
508
+ BCReg ra = bc_a(iterins);
509
+ lua_assert(!LJ_FR2); /* TODO_FR2: handle different frame setup. */
510
+ if (!tref_isnil(getslot(J, ra))) { /* Looping back? */
511
+ J->base[ra-1] = J->base[ra]; /* Copy result of ITERC to control var. */
512
+ J->maxslot = ra-1+bc_b(J->pc[-1]);
513
+ J->pc += bc_j(iterins)+1;
514
+ return LOOPEV_ENTER;
515
+ } else {
516
+ J->maxslot = ra-3;
517
+ J->pc++;
518
+ return LOOPEV_LEAVE;
519
+ }
520
+ }
521
+
522
+ /* Record LOOP/JLOOP. Now, that was easy. */
523
+ static LoopEvent rec_loop(jit_State *J, BCReg ra)
524
+ {
525
+ if (ra < J->maxslot) J->maxslot = ra;
526
+ J->pc++;
527
+ return LOOPEV_ENTER;
528
+ }
529
+
530
+ /* Check if a loop repeatedly failed to trace because it didn't loop back. */
531
+ static int innerloopleft(jit_State *J, const BCIns *pc)
532
+ {
533
+ ptrdiff_t i;
534
+ for (i = 0; i < PENALTY_SLOTS; i++)
535
+ if (mref(J->penalty[i].pc, const BCIns) == pc) {
536
+ if ((J->penalty[i].reason == LJ_TRERR_LLEAVE ||
537
+ J->penalty[i].reason == LJ_TRERR_LINNER) &&
538
+ J->penalty[i].val >= 2*PENALTY_MIN)
539
+ return 1;
540
+ break;
541
+ }
542
+ return 0;
543
+ }
544
+
545
+ /* Handle the case when an interpreted loop op is hit. */
546
+ static void rec_loop_interp(jit_State *J, const BCIns *pc, LoopEvent ev)
547
+ {
548
+ if (J->parent == 0 && J->exitno == 0) {
549
+ if (pc == J->startpc && J->framedepth + J->retdepth == 0) {
550
+ /* Same loop? */
551
+ if (ev == LOOPEV_LEAVE) /* Must loop back to form a root trace. */
552
+ lj_trace_err(J, LJ_TRERR_LLEAVE);
553
+ lj_record_stop(J, LJ_TRLINK_LOOP, J->cur.traceno); /* Looping trace. */
554
+ } else if (ev != LOOPEV_LEAVE) { /* Entering inner loop? */
555
+ /* It's usually better to abort here and wait until the inner loop
556
+ ** is traced. But if the inner loop repeatedly didn't loop back,
557
+ ** this indicates a low trip count. In this case try unrolling
558
+ ** an inner loop even in a root trace. But it's better to be a bit
559
+ ** more conservative here and only do it for very short loops.
560
+ */
561
+ if (bc_j(*pc) != -1 && !innerloopleft(J, pc))
562
+ lj_trace_err(J, LJ_TRERR_LINNER); /* Root trace hit an inner loop. */
563
+ if ((ev != LOOPEV_ENTERLO &&
564
+ J->loopref && J->cur.nins - J->loopref > 24) || --J->loopunroll < 0)
565
+ lj_trace_err(J, LJ_TRERR_LUNROLL); /* Limit loop unrolling. */
566
+ J->loopref = J->cur.nins;
567
+ }
568
+ } else if (ev != LOOPEV_LEAVE) { /* Side trace enters an inner loop. */
569
+ J->loopref = J->cur.nins;
570
+ if (--J->loopunroll < 0)
571
+ lj_trace_err(J, LJ_TRERR_LUNROLL); /* Limit loop unrolling. */
572
+ } /* Side trace continues across a loop that's left or not entered. */
573
+ }
574
+
575
+ /* Handle the case when an already compiled loop op is hit. */
576
+ static void rec_loop_jit(jit_State *J, TraceNo lnk, LoopEvent ev)
577
+ {
578
+ if (J->parent == 0 && J->exitno == 0) { /* Root trace hit an inner loop. */
579
+ /* Better let the inner loop spawn a side trace back here. */
580
+ lj_trace_err(J, LJ_TRERR_LINNER);
581
+ } else if (ev != LOOPEV_LEAVE) { /* Side trace enters a compiled loop. */
582
+ J->instunroll = 0; /* Cannot continue across a compiled loop op. */
583
+ if (J->pc == J->startpc && J->framedepth + J->retdepth == 0)
584
+ lj_record_stop(J, LJ_TRLINK_LOOP, J->cur.traceno); /* Form extra loop. */
585
+ else
586
+ lj_record_stop(J, LJ_TRLINK_ROOT, lnk); /* Link to the loop. */
587
+ } /* Side trace continues across a loop that's left or not entered. */
588
+ }
589
+
590
+ /* -- Record profiler hook checks ----------------------------------------- */
591
+
592
+ #if LJ_HASPROFILE
593
+
594
+ /* Need to insert profiler hook check? */
595
+ static int rec_profile_need(jit_State *J, GCproto *pt, const BCIns *pc)
596
+ {
597
+ GCproto *ppt;
598
+ lua_assert(J->prof_mode == 'f' || J->prof_mode == 'l');
599
+ if (!pt)
600
+ return 0;
601
+ ppt = J->prev_pt;
602
+ J->prev_pt = pt;
603
+ if (pt != ppt && ppt) {
604
+ J->prev_line = -1;
605
+ return 1;
606
+ }
607
+ if (J->prof_mode == 'l') {
608
+ BCLine line = lj_debug_line(pt, proto_bcpos(pt, pc));
609
+ BCLine pline = J->prev_line;
610
+ J->prev_line = line;
611
+ if (pline != line)
612
+ return 1;
613
+ }
614
+ return 0;
615
+ }
616
+
617
+ static void rec_profile_ins(jit_State *J, const BCIns *pc)
618
+ {
619
+ if (J->prof_mode && rec_profile_need(J, J->pt, pc)) {
620
+ emitir(IRTG(IR_PROF, IRT_NIL), 0, 0);
621
+ lj_snap_add(J);
622
+ }
623
+ }
624
+
625
+ static void rec_profile_ret(jit_State *J)
626
+ {
627
+ if (J->prof_mode == 'f') {
628
+ emitir(IRTG(IR_PROF, IRT_NIL), 0, 0);
629
+ J->prev_pt = NULL;
630
+ lj_snap_add(J);
631
+ }
632
+ }
633
+
634
+ #endif
635
+
636
+ /* -- Record calls and returns -------------------------------------------- */
637
+
638
+ /* Specialize to the runtime value of the called function or its prototype. */
639
+ static TRef rec_call_specialize(jit_State *J, GCfunc *fn, TRef tr)
640
+ {
641
+ TRef kfunc;
642
+ if (isluafunc(fn)) {
643
+ GCproto *pt = funcproto(fn);
644
+ /* Too many closures created? Probably not a monomorphic function. */
645
+ if (pt->flags >= PROTO_CLC_POLY) { /* Specialize to prototype instead. */
646
+ TRef trpt = emitir(IRT(IR_FLOAD, IRT_P32), tr, IRFL_FUNC_PC);
647
+ emitir(IRTG(IR_EQ, IRT_P32), trpt, lj_ir_kptr(J, proto_bc(pt)));
648
+ (void)lj_ir_kgc(J, obj2gco(pt), IRT_PROTO); /* Prevent GC of proto. */
649
+ return tr;
650
+ }
651
+ } else {
652
+ /* Don't specialize to non-monomorphic builtins. */
653
+ switch (fn->c.ffid) {
654
+ case FF_coroutine_wrap_aux:
655
+ case FF_string_gmatch_aux:
656
+ /* NYI: io_file_iter doesn't have an ffid, yet. */
657
+ { /* Specialize to the ffid. */
658
+ TRef trid = emitir(IRT(IR_FLOAD, IRT_U8), tr, IRFL_FUNC_FFID);
659
+ emitir(IRTG(IR_EQ, IRT_INT), trid, lj_ir_kint(J, fn->c.ffid));
660
+ }
661
+ return tr;
662
+ default:
663
+ /* NYI: don't specialize to non-monomorphic C functions. */
664
+ break;
665
+ }
666
+ }
667
+ /* Otherwise specialize to the function (closure) value itself. */
668
+ kfunc = lj_ir_kfunc(J, fn);
669
+ emitir(IRTG(IR_EQ, IRT_FUNC), tr, kfunc);
670
+ return kfunc;
671
+ }
672
+
673
+ /* Record call setup. */
674
+ static void rec_call_setup(jit_State *J, BCReg func, ptrdiff_t nargs)
675
+ {
676
+ RecordIndex ix;
677
+ TValue *functv = &J->L->base[func];
678
+ TRef *fbase = &J->base[func];
679
+ ptrdiff_t i;
680
+ lua_assert(!LJ_FR2); /* TODO_FR2: handle different frame setup. */
681
+ for (i = 0; i <= nargs; i++)
682
+ (void)getslot(J, func+i); /* Ensure func and all args have a reference. */
683
+ if (!tref_isfunc(fbase[0])) { /* Resolve __call metamethod. */
684
+ ix.tab = fbase[0];
685
+ copyTV(J->L, &ix.tabv, functv);
686
+ if (!lj_record_mm_lookup(J, &ix, MM_call) || !tref_isfunc(ix.mobj))
687
+ lj_trace_err(J, LJ_TRERR_NOMM);
688
+ for (i = ++nargs; i > 0; i--) /* Shift arguments up. */
689
+ fbase[i] = fbase[i-1];
690
+ fbase[0] = ix.mobj; /* Replace function. */
691
+ functv = &ix.mobjv;
692
+ }
693
+ fbase[0] = TREF_FRAME | rec_call_specialize(J, funcV(functv), fbase[0]);
694
+ J->maxslot = (BCReg)nargs;
695
+ }
696
+
697
+ /* Record call. */
698
+ void lj_record_call(jit_State *J, BCReg func, ptrdiff_t nargs)
699
+ {
700
+ rec_call_setup(J, func, nargs);
701
+ /* Bump frame. */
702
+ J->framedepth++;
703
+ J->base += func+1;
704
+ J->baseslot += func+1;
705
+ }
706
+
707
+ /* Record tail call. */
708
+ void lj_record_tailcall(jit_State *J, BCReg func, ptrdiff_t nargs)
709
+ {
710
+ rec_call_setup(J, func, nargs);
711
+ if (frame_isvarg(J->L->base - 1)) {
712
+ BCReg cbase = (BCReg)frame_delta(J->L->base - 1);
713
+ if (--J->framedepth < 0)
714
+ lj_trace_err(J, LJ_TRERR_NYIRETL);
715
+ J->baseslot -= (BCReg)cbase;
716
+ J->base -= cbase;
717
+ func += cbase;
718
+ }
719
+ /* Move func + args down. */
720
+ memmove(&J->base[-1], &J->base[func], sizeof(TRef)*(J->maxslot+1));
721
+ /* Note: the new TREF_FRAME is now at J->base[-1] (even for slot #0). */
722
+ /* Tailcalls can form a loop, so count towards the loop unroll limit. */
723
+ if (++J->tailcalled > J->loopunroll)
724
+ lj_trace_err(J, LJ_TRERR_LUNROLL);
725
+ }
726
+
727
+ /* Check unroll limits for down-recursion. */
728
+ static int check_downrec_unroll(jit_State *J, GCproto *pt)
729
+ {
730
+ IRRef ptref;
731
+ for (ptref = J->chain[IR_KGC]; ptref; ptref = IR(ptref)->prev)
732
+ if (ir_kgc(IR(ptref)) == obj2gco(pt)) {
733
+ int count = 0;
734
+ IRRef ref;
735
+ for (ref = J->chain[IR_RETF]; ref; ref = IR(ref)->prev)
736
+ if (IR(ref)->op1 == ptref)
737
+ count++;
738
+ if (count) {
739
+ if (J->pc == J->startpc) {
740
+ if (count + J->tailcalled > J->param[JIT_P_recunroll])
741
+ return 1;
742
+ } else {
743
+ lj_trace_err(J, LJ_TRERR_DOWNREC);
744
+ }
745
+ }
746
+ }
747
+ return 0;
748
+ }
749
+
750
+ static TRef rec_cat(jit_State *J, BCReg baseslot, BCReg topslot);
751
+
752
+ /* Record return. */
753
+ void lj_record_ret(jit_State *J, BCReg rbase, ptrdiff_t gotresults)
754
+ {
755
+ TValue *frame = J->L->base - 1;
756
+ ptrdiff_t i;
757
+ for (i = 0; i < gotresults; i++)
758
+ (void)getslot(J, rbase+i); /* Ensure all results have a reference. */
759
+ while (frame_ispcall(frame)) { /* Immediately resolve pcall() returns. */
760
+ BCReg cbase = (BCReg)frame_delta(frame);
761
+ if (--J->framedepth < 0)
762
+ lj_trace_err(J, LJ_TRERR_NYIRETL);
763
+ lua_assert(J->baseslot > 1);
764
+ gotresults++;
765
+ rbase += cbase;
766
+ J->baseslot -= (BCReg)cbase;
767
+ J->base -= cbase;
768
+ J->base[--rbase] = TREF_TRUE; /* Prepend true to results. */
769
+ frame = frame_prevd(frame);
770
+ }
771
+ /* Return to lower frame via interpreter for unhandled cases. */
772
+ if (J->framedepth == 0 && J->pt && bc_isret(bc_op(*J->pc)) &&
773
+ (!frame_islua(frame) ||
774
+ (J->parent == 0 && J->exitno == 0 &&
775
+ !bc_isret(bc_op(J->cur.startins))))) {
776
+ /* NYI: specialize to frame type and return directly, not via RET*. */
777
+ for (i = 0; i < (ptrdiff_t)rbase; i++)
778
+ J->base[i] = 0; /* Purge dead slots. */
779
+ J->maxslot = rbase + (BCReg)gotresults;
780
+ lj_record_stop(J, LJ_TRLINK_RETURN, 0); /* Return to interpreter. */
781
+ return;
782
+ }
783
+ if (frame_isvarg(frame)) {
784
+ BCReg cbase = (BCReg)frame_delta(frame);
785
+ if (--J->framedepth < 0) /* NYI: return of vararg func to lower frame. */
786
+ lj_trace_err(J, LJ_TRERR_NYIRETL);
787
+ lua_assert(J->baseslot > 1);
788
+ rbase += cbase;
789
+ J->baseslot -= (BCReg)cbase;
790
+ J->base -= cbase;
791
+ frame = frame_prevd(frame);
792
+ }
793
+ if (frame_islua(frame)) { /* Return to Lua frame. */
794
+ BCIns callins = *(frame_pc(frame)-1);
795
+ ptrdiff_t nresults = bc_b(callins) ? (ptrdiff_t)bc_b(callins)-1 :gotresults;
796
+ BCReg cbase = bc_a(callins);
797
+ GCproto *pt = funcproto(frame_func(frame - (cbase+1-LJ_FR2)));
798
+ lua_assert(!LJ_FR2); /* TODO_FR2: handle different frame teardown. */
799
+ if ((pt->flags & PROTO_NOJIT))
800
+ lj_trace_err(J, LJ_TRERR_CJITOFF);
801
+ if (J->framedepth == 0 && J->pt && frame == J->L->base - 1) {
802
+ if (check_downrec_unroll(J, pt)) {
803
+ J->maxslot = (BCReg)(rbase + gotresults);
804
+ lj_snap_purge(J);
805
+ lj_record_stop(J, LJ_TRLINK_DOWNREC, J->cur.traceno); /* Down-rec. */
806
+ return;
807
+ }
808
+ lj_snap_add(J);
809
+ }
810
+ for (i = 0; i < nresults; i++) /* Adjust results. */
811
+ J->base[i-1] = i < gotresults ? J->base[rbase+i] : TREF_NIL;
812
+ J->maxslot = cbase+(BCReg)nresults;
813
+ if (J->framedepth > 0) { /* Return to a frame that is part of the trace. */
814
+ J->framedepth--;
815
+ lua_assert(J->baseslot > cbase+1);
816
+ J->baseslot -= cbase+1;
817
+ J->base -= cbase+1;
818
+ } else if (J->parent == 0 && J->exitno == 0 &&
819
+ !bc_isret(bc_op(J->cur.startins))) {
820
+ /* Return to lower frame would leave the loop in a root trace. */
821
+ lj_trace_err(J, LJ_TRERR_LLEAVE);
822
+ } else if (J->needsnap) { /* Tailcalled to ff with side-effects. */
823
+ lj_trace_err(J, LJ_TRERR_NYIRETL); /* No way to insert snapshot here. */
824
+ } else { /* Return to lower frame. Guard for the target we return to. */
825
+ TRef trpt = lj_ir_kgc(J, obj2gco(pt), IRT_PROTO);
826
+ TRef trpc = lj_ir_kptr(J, (void *)frame_pc(frame));
827
+ emitir(IRTG(IR_RETF, IRT_P32), trpt, trpc);
828
+ J->retdepth++;
829
+ J->needsnap = 1;
830
+ lua_assert(J->baseslot == 1);
831
+ /* Shift result slots up and clear the slots of the new frame below. */
832
+ memmove(J->base + cbase, J->base-1, sizeof(TRef)*nresults);
833
+ memset(J->base-1, 0, sizeof(TRef)*(cbase+1));
834
+ }
835
+ } else if (frame_iscont(frame)) { /* Return to continuation frame. */
836
+ ASMFunction cont = frame_contf(frame);
837
+ BCReg cbase = (BCReg)frame_delta(frame);
838
+ if ((J->framedepth -= 2) < 0)
839
+ lj_trace_err(J, LJ_TRERR_NYIRETL);
840
+ J->baseslot -= (BCReg)cbase;
841
+ J->base -= cbase;
842
+ J->maxslot = cbase-2;
843
+ if (cont == lj_cont_ra) {
844
+ /* Copy result to destination slot. */
845
+ BCReg dst = bc_a(*(frame_contpc(frame)-1));
846
+ J->base[dst] = gotresults ? J->base[cbase+rbase] : TREF_NIL;
847
+ if (dst >= J->maxslot) J->maxslot = dst+1;
848
+ } else if (cont == lj_cont_nop) {
849
+ /* Nothing to do here. */
850
+ } else if (cont == lj_cont_cat) {
851
+ BCReg bslot = bc_b(*(frame_contpc(frame)-1));
852
+ TRef tr = gotresults ? J->base[cbase+rbase] : TREF_NIL;
853
+ if (bslot != cbase-2) { /* Concatenate the remainder. */
854
+ TValue *b = J->L->base, save; /* Simulate lower frame and result. */
855
+ J->base[cbase-2] = tr;
856
+ copyTV(J->L, &save, b-2);
857
+ if (gotresults) copyTV(J->L, b-2, b+rbase); else setnilV(b-2);
858
+ J->L->base = b - cbase;
859
+ tr = rec_cat(J, bslot, cbase-2);
860
+ b = J->L->base + cbase; /* Undo. */
861
+ J->L->base = b;
862
+ copyTV(J->L, b-2, &save);
863
+ }
864
+ if (tr) { /* Store final result. */
865
+ BCReg dst = bc_a(*(frame_contpc(frame)-1));
866
+ J->base[dst] = tr;
867
+ if (dst >= J->maxslot) J->maxslot = dst+1;
868
+ } /* Otherwise continue with another __concat call. */
869
+ } else {
870
+ /* Result type already specialized. */
871
+ lua_assert(cont == lj_cont_condf || cont == lj_cont_condt);
872
+ }
873
+ } else {
874
+ lj_trace_err(J, LJ_TRERR_NYIRETL); /* NYI: handle return to C frame. */
875
+ }
876
+ lua_assert(J->baseslot >= 1);
877
+ }
878
+
879
+ /* -- Metamethod handling ------------------------------------------------- */
880
+
881
+ /* Prepare to record call to metamethod. */
882
+ static BCReg rec_mm_prep(jit_State *J, ASMFunction cont)
883
+ {
884
+ BCReg s, top = cont == lj_cont_cat ? J->maxslot : curr_proto(J->L)->framesize;
885
+ #if LJ_64
886
+ TRef trcont = lj_ir_kptr(J, (void *)((int64_t)cont-(int64_t)lj_vm_asm_begin));
887
+ #else
888
+ TRef trcont = lj_ir_kptr(J, (void *)cont);
889
+ #endif
890
+ J->base[top] = trcont | TREF_CONT;
891
+ J->framedepth++;
892
+ for (s = J->maxslot; s < top; s++)
893
+ J->base[s] = 0; /* Clear frame gap to avoid resurrecting previous refs. */
894
+ return top+1;
895
+ }
896
+
897
+ /* Record metamethod lookup. */
898
+ int lj_record_mm_lookup(jit_State *J, RecordIndex *ix, MMS mm)
899
+ {
900
+ RecordIndex mix;
901
+ GCtab *mt;
902
+ if (tref_istab(ix->tab)) {
903
+ mt = tabref(tabV(&ix->tabv)->metatable);
904
+ mix.tab = emitir(IRT(IR_FLOAD, IRT_TAB), ix->tab, IRFL_TAB_META);
905
+ } else if (tref_isudata(ix->tab)) {
906
+ int udtype = udataV(&ix->tabv)->udtype;
907
+ mt = tabref(udataV(&ix->tabv)->metatable);
908
+ /* The metatables of special userdata objects are treated as immutable. */
909
+ if (udtype != UDTYPE_USERDATA) {
910
+ cTValue *mo;
911
+ if (LJ_HASFFI && udtype == UDTYPE_FFI_CLIB) {
912
+ /* Specialize to the C library namespace object. */
913
+ emitir(IRTG(IR_EQ, IRT_P32), ix->tab, lj_ir_kptr(J, udataV(&ix->tabv)));
914
+ } else {
915
+ /* Specialize to the type of userdata. */
916
+ TRef tr = emitir(IRT(IR_FLOAD, IRT_U8), ix->tab, IRFL_UDATA_UDTYPE);
917
+ emitir(IRTGI(IR_EQ), tr, lj_ir_kint(J, udtype));
918
+ }
919
+ immutable_mt:
920
+ mo = lj_tab_getstr(mt, mmname_str(J2G(J), mm));
921
+ if (!mo || tvisnil(mo))
922
+ return 0; /* No metamethod. */
923
+ /* Treat metamethod or index table as immutable, too. */
924
+ if (!(tvisfunc(mo) || tvistab(mo)))
925
+ lj_trace_err(J, LJ_TRERR_BADTYPE);
926
+ copyTV(J->L, &ix->mobjv, mo);
927
+ ix->mobj = lj_ir_kgc(J, gcV(mo), tvisfunc(mo) ? IRT_FUNC : IRT_TAB);
928
+ ix->mtv = mt;
929
+ ix->mt = TREF_NIL; /* Dummy value for comparison semantics. */
930
+ return 1; /* Got metamethod or index table. */
931
+ }
932
+ mix.tab = emitir(IRT(IR_FLOAD, IRT_TAB), ix->tab, IRFL_UDATA_META);
933
+ } else {
934
+ /* Specialize to base metatable. Must flush mcode in lua_setmetatable(). */
935
+ mt = tabref(basemt_obj(J2G(J), &ix->tabv));
936
+ if (mt == NULL) {
937
+ ix->mt = TREF_NIL;
938
+ return 0; /* No metamethod. */
939
+ }
940
+ /* The cdata metatable is treated as immutable. */
941
+ if (LJ_HASFFI && tref_iscdata(ix->tab)) goto immutable_mt;
942
+ ix->mt = mix.tab = lj_ir_ktab(J, mt);
943
+ goto nocheck;
944
+ }
945
+ ix->mt = mt ? mix.tab : TREF_NIL;
946
+ emitir(IRTG(mt ? IR_NE : IR_EQ, IRT_TAB), mix.tab, lj_ir_knull(J, IRT_TAB));
947
+ nocheck:
948
+ if (mt) {
949
+ GCstr *mmstr = mmname_str(J2G(J), mm);
950
+ cTValue *mo = lj_tab_getstr(mt, mmstr);
951
+ if (mo && !tvisnil(mo))
952
+ copyTV(J->L, &ix->mobjv, mo);
953
+ ix->mtv = mt;
954
+ settabV(J->L, &mix.tabv, mt);
955
+ setstrV(J->L, &mix.keyv, mmstr);
956
+ mix.key = lj_ir_kstr(J, mmstr);
957
+ mix.val = 0;
958
+ mix.idxchain = 0;
959
+ ix->mobj = lj_record_idx(J, &mix);
960
+ return !tref_isnil(ix->mobj); /* 1 if metamethod found, 0 if not. */
961
+ }
962
+ return 0; /* No metamethod. */
963
+ }
964
+
965
+ /* Record call to arithmetic metamethod. */
966
+ static TRef rec_mm_arith(jit_State *J, RecordIndex *ix, MMS mm)
967
+ {
968
+ /* Set up metamethod call first to save ix->tab and ix->tabv. */
969
+ BCReg func = rec_mm_prep(J, mm == MM_concat ? lj_cont_cat : lj_cont_ra);
970
+ TRef *base = J->base + func;
971
+ TValue *basev = J->L->base + func;
972
+ base[1] = ix->tab; base[2] = ix->key;
973
+ copyTV(J->L, basev+1, &ix->tabv);
974
+ copyTV(J->L, basev+2, &ix->keyv);
975
+ if (!lj_record_mm_lookup(J, ix, mm)) { /* Lookup mm on 1st operand. */
976
+ if (mm != MM_unm) {
977
+ ix->tab = ix->key;
978
+ copyTV(J->L, &ix->tabv, &ix->keyv);
979
+ if (lj_record_mm_lookup(J, ix, mm)) /* Lookup mm on 2nd operand. */
980
+ goto ok;
981
+ }
982
+ lj_trace_err(J, LJ_TRERR_NOMM);
983
+ }
984
+ ok:
985
+ lua_assert(!LJ_FR2); /* TODO_FR2: handle different frame setup. */
986
+ base[0] = ix->mobj;
987
+ copyTV(J->L, basev+0, &ix->mobjv);
988
+ lj_record_call(J, func, 2);
989
+ return 0; /* No result yet. */
990
+ }
991
+
992
+ /* Record call to __len metamethod. */
993
+ static TRef rec_mm_len(jit_State *J, TRef tr, TValue *tv)
994
+ {
995
+ RecordIndex ix;
996
+ ix.tab = tr;
997
+ copyTV(J->L, &ix.tabv, tv);
998
+ if (lj_record_mm_lookup(J, &ix, MM_len)) {
999
+ BCReg func = rec_mm_prep(J, lj_cont_ra);
1000
+ TRef *base = J->base + func;
1001
+ TValue *basev = J->L->base + func;
1002
+ lua_assert(!LJ_FR2); /* TODO_FR2: handle different frame setup. */
1003
+ base[0] = ix.mobj; copyTV(J->L, basev+0, &ix.mobjv);
1004
+ base[1] = tr; copyTV(J->L, basev+1, tv);
1005
+ #if LJ_52
1006
+ base[2] = tr; copyTV(J->L, basev+2, tv);
1007
+ #else
1008
+ base[2] = TREF_NIL; setnilV(basev+2);
1009
+ #endif
1010
+ lj_record_call(J, func, 2);
1011
+ } else {
1012
+ if (LJ_52 && tref_istab(tr))
1013
+ return lj_ir_call(J, IRCALL_lj_tab_len, tr);
1014
+ lj_trace_err(J, LJ_TRERR_NOMM);
1015
+ }
1016
+ return 0; /* No result yet. */
1017
+ }
1018
+
1019
+ /* Call a comparison metamethod. */
1020
+ static void rec_mm_callcomp(jit_State *J, RecordIndex *ix, int op)
1021
+ {
1022
+ BCReg func = rec_mm_prep(J, (op&1) ? lj_cont_condf : lj_cont_condt);
1023
+ TRef *base = J->base + func;
1024
+ TValue *tv = J->L->base + func;
1025
+ lua_assert(!LJ_FR2); /* TODO_FR2: handle different frame setup. */
1026
+ base[0] = ix->mobj; base[1] = ix->val; base[2] = ix->key;
1027
+ copyTV(J->L, tv+0, &ix->mobjv);
1028
+ copyTV(J->L, tv+1, &ix->valv);
1029
+ copyTV(J->L, tv+2, &ix->keyv);
1030
+ lj_record_call(J, func, 2);
1031
+ }
1032
+
1033
+ /* Record call to equality comparison metamethod (for tab and udata only). */
1034
+ static void rec_mm_equal(jit_State *J, RecordIndex *ix, int op)
1035
+ {
1036
+ ix->tab = ix->val;
1037
+ copyTV(J->L, &ix->tabv, &ix->valv);
1038
+ if (lj_record_mm_lookup(J, ix, MM_eq)) { /* Lookup mm on 1st operand. */
1039
+ cTValue *bv;
1040
+ TRef mo1 = ix->mobj;
1041
+ TValue mo1v;
1042
+ copyTV(J->L, &mo1v, &ix->mobjv);
1043
+ /* Avoid the 2nd lookup and the objcmp if the metatables are equal. */
1044
+ bv = &ix->keyv;
1045
+ if (tvistab(bv) && tabref(tabV(bv)->metatable) == ix->mtv) {
1046
+ TRef mt2 = emitir(IRT(IR_FLOAD, IRT_TAB), ix->key, IRFL_TAB_META);
1047
+ emitir(IRTG(IR_EQ, IRT_TAB), mt2, ix->mt);
1048
+ } else if (tvisudata(bv) && tabref(udataV(bv)->metatable) == ix->mtv) {
1049
+ TRef mt2 = emitir(IRT(IR_FLOAD, IRT_TAB), ix->key, IRFL_UDATA_META);
1050
+ emitir(IRTG(IR_EQ, IRT_TAB), mt2, ix->mt);
1051
+ } else { /* Lookup metamethod on 2nd operand and compare both. */
1052
+ ix->tab = ix->key;
1053
+ copyTV(J->L, &ix->tabv, bv);
1054
+ if (!lj_record_mm_lookup(J, ix, MM_eq) ||
1055
+ lj_record_objcmp(J, mo1, ix->mobj, &mo1v, &ix->mobjv))
1056
+ return;
1057
+ }
1058
+ rec_mm_callcomp(J, ix, op);
1059
+ }
1060
+ }
1061
+
1062
+ /* Record call to ordered comparison metamethods (for arbitrary objects). */
1063
+ static void rec_mm_comp(jit_State *J, RecordIndex *ix, int op)
1064
+ {
1065
+ ix->tab = ix->val;
1066
+ copyTV(J->L, &ix->tabv, &ix->valv);
1067
+ while (1) {
1068
+ MMS mm = (op & 2) ? MM_le : MM_lt; /* Try __le + __lt or only __lt. */
1069
+ #if LJ_52
1070
+ if (!lj_record_mm_lookup(J, ix, mm)) { /* Lookup mm on 1st operand. */
1071
+ ix->tab = ix->key;
1072
+ copyTV(J->L, &ix->tabv, &ix->keyv);
1073
+ if (!lj_record_mm_lookup(J, ix, mm)) /* Lookup mm on 2nd operand. */
1074
+ goto nomatch;
1075
+ }
1076
+ rec_mm_callcomp(J, ix, op);
1077
+ return;
1078
+ #else
1079
+ if (lj_record_mm_lookup(J, ix, mm)) { /* Lookup mm on 1st operand. */
1080
+ cTValue *bv;
1081
+ TRef mo1 = ix->mobj;
1082
+ TValue mo1v;
1083
+ copyTV(J->L, &mo1v, &ix->mobjv);
1084
+ /* Avoid the 2nd lookup and the objcmp if the metatables are equal. */
1085
+ bv = &ix->keyv;
1086
+ if (tvistab(bv) && tabref(tabV(bv)->metatable) == ix->mtv) {
1087
+ TRef mt2 = emitir(IRT(IR_FLOAD, IRT_TAB), ix->key, IRFL_TAB_META);
1088
+ emitir(IRTG(IR_EQ, IRT_TAB), mt2, ix->mt);
1089
+ } else if (tvisudata(bv) && tabref(udataV(bv)->metatable) == ix->mtv) {
1090
+ TRef mt2 = emitir(IRT(IR_FLOAD, IRT_TAB), ix->key, IRFL_UDATA_META);
1091
+ emitir(IRTG(IR_EQ, IRT_TAB), mt2, ix->mt);
1092
+ } else { /* Lookup metamethod on 2nd operand and compare both. */
1093
+ ix->tab = ix->key;
1094
+ copyTV(J->L, &ix->tabv, bv);
1095
+ if (!lj_record_mm_lookup(J, ix, mm) ||
1096
+ lj_record_objcmp(J, mo1, ix->mobj, &mo1v, &ix->mobjv))
1097
+ goto nomatch;
1098
+ }
1099
+ rec_mm_callcomp(J, ix, op);
1100
+ return;
1101
+ }
1102
+ #endif
1103
+ nomatch:
1104
+ /* Lookup failed. Retry with __lt and swapped operands. */
1105
+ if (!(op & 2)) break; /* Already at __lt. Interpreter will throw. */
1106
+ ix->tab = ix->key; ix->key = ix->val; ix->val = ix->tab;
1107
+ copyTV(J->L, &ix->tabv, &ix->keyv);
1108
+ copyTV(J->L, &ix->keyv, &ix->valv);
1109
+ copyTV(J->L, &ix->valv, &ix->tabv);
1110
+ op ^= 3;
1111
+ }
1112
+ }
1113
+
1114
+ #if LJ_HASFFI
1115
+ /* Setup call to cdata comparison metamethod. */
1116
+ static void rec_mm_comp_cdata(jit_State *J, RecordIndex *ix, int op, MMS mm)
1117
+ {
1118
+ lj_snap_add(J);
1119
+ if (tref_iscdata(ix->val)) {
1120
+ ix->tab = ix->val;
1121
+ copyTV(J->L, &ix->tabv, &ix->valv);
1122
+ } else {
1123
+ lua_assert(tref_iscdata(ix->key));
1124
+ ix->tab = ix->key;
1125
+ copyTV(J->L, &ix->tabv, &ix->keyv);
1126
+ }
1127
+ lj_record_mm_lookup(J, ix, mm);
1128
+ rec_mm_callcomp(J, ix, op);
1129
+ }
1130
+ #endif
1131
+
1132
+ /* -- Indexed access ------------------------------------------------------ */
1133
+
1134
+ #ifdef LUAJIT_ENABLE_TABLE_BUMP
1135
+ /* Bump table allocations in bytecode when they grow during recording. */
1136
+ static void rec_idx_bump(jit_State *J, RecordIndex *ix)
1137
+ {
1138
+ RBCHashEntry *rbc = &J->rbchash[(ix->tab & (RBCHASH_SLOTS-1))];
1139
+ if (tref_ref(ix->tab) == rbc->ref) {
1140
+ const BCIns *pc = mref(rbc->pc, const BCIns);
1141
+ GCtab *tb = tabV(&ix->tabv);
1142
+ uint32_t nhbits;
1143
+ IRIns *ir;
1144
+ if (!tvisnil(&ix->keyv))
1145
+ (void)lj_tab_set(J->L, tb, &ix->keyv); /* Grow table right now. */
1146
+ nhbits = tb->hmask > 0 ? lj_fls(tb->hmask)+1 : 0;
1147
+ ir = IR(tref_ref(ix->tab));
1148
+ if (ir->o == IR_TNEW) {
1149
+ uint32_t ah = bc_d(*pc);
1150
+ uint32_t asize = ah & 0x7ff, hbits = ah >> 11;
1151
+ if (nhbits > hbits) hbits = nhbits;
1152
+ if (tb->asize > asize) {
1153
+ asize = tb->asize <= 0x7ff ? tb->asize : 0x7ff;
1154
+ }
1155
+ if ((asize | (hbits<<11)) != ah) { /* Has the size changed? */
1156
+ /* Patch bytecode, but continue recording (for more patching). */
1157
+ setbc_d(pc, (asize | (hbits<<11)));
1158
+ /* Patching TNEW operands is only safe if the trace is aborted. */
1159
+ ir->op1 = asize; ir->op2 = hbits;
1160
+ J->retryrec = 1; /* Abort the trace at the end of recording. */
1161
+ }
1162
+ } else if (ir->o == IR_TDUP) {
1163
+ GCtab *tpl = gco2tab(proto_kgc(&gcref(rbc->pt)->pt, ~(ptrdiff_t)bc_d(*pc)));
1164
+ /* Grow template table, but preserve keys with nil values. */
1165
+ if ((tb->asize > tpl->asize && (1u << nhbits)-1 == tpl->hmask) ||
1166
+ (tb->asize == tpl->asize && (1u << nhbits)-1 > tpl->hmask)) {
1167
+ Node *node = noderef(tpl->node);
1168
+ uint32_t i, hmask = tpl->hmask, asize;
1169
+ TValue *array;
1170
+ for (i = 0; i <= hmask; i++) {
1171
+ if (!tvisnil(&node[i].key) && tvisnil(&node[i].val))
1172
+ settabV(J->L, &node[i].val, tpl);
1173
+ }
1174
+ if (!tvisnil(&ix->keyv) && tref_isk(ix->key)) {
1175
+ TValue *o = lj_tab_set(J->L, tpl, &ix->keyv);
1176
+ if (tvisnil(o)) settabV(J->L, o, tpl);
1177
+ }
1178
+ lj_tab_resize(J->L, tpl, tb->asize, nhbits);
1179
+ node = noderef(tpl->node);
1180
+ hmask = tpl->hmask;
1181
+ for (i = 0; i <= hmask; i++) {
1182
+ /* This is safe, since template tables only hold immutable values. */
1183
+ if (tvistab(&node[i].val))
1184
+ setnilV(&node[i].val);
1185
+ }
1186
+ /* The shape of the table may have changed. Clean up array part, too. */
1187
+ asize = tpl->asize;
1188
+ array = tvref(tpl->array);
1189
+ for (i = 0; i < asize; i++) {
1190
+ if (tvistab(&array[i]))
1191
+ setnilV(&array[i]);
1192
+ }
1193
+ J->retryrec = 1; /* Abort the trace at the end of recording. */
1194
+ }
1195
+ }
1196
+ }
1197
+ }
1198
+ #endif
1199
+
1200
+ /* Record bounds-check. */
1201
+ static void rec_idx_abc(jit_State *J, TRef asizeref, TRef ikey, uint32_t asize)
1202
+ {
1203
+ /* Try to emit invariant bounds checks. */
1204
+ if ((J->flags & (JIT_F_OPT_LOOP|JIT_F_OPT_ABC)) ==
1205
+ (JIT_F_OPT_LOOP|JIT_F_OPT_ABC)) {
1206
+ IRRef ref = tref_ref(ikey);
1207
+ IRIns *ir = IR(ref);
1208
+ int32_t ofs = 0;
1209
+ IRRef ofsref = 0;
1210
+ /* Handle constant offsets. */
1211
+ if (ir->o == IR_ADD && irref_isk(ir->op2)) {
1212
+ ofsref = ir->op2;
1213
+ ofs = IR(ofsref)->i;
1214
+ ref = ir->op1;
1215
+ ir = IR(ref);
1216
+ }
1217
+ /* Got scalar evolution analysis results for this reference? */
1218
+ if (ref == J->scev.idx) {
1219
+ int32_t stop;
1220
+ lua_assert(irt_isint(J->scev.t) && ir->o == IR_SLOAD);
1221
+ stop = numberVint(&(J->L->base - J->baseslot)[ir->op1 + FORL_STOP]);
1222
+ /* Runtime value for stop of loop is within bounds? */
1223
+ if ((uint64_t)stop + ofs < (uint64_t)asize) {
1224
+ /* Emit invariant bounds check for stop. */
1225
+ emitir(IRTG(IR_ABC, IRT_P32), asizeref, ofs == 0 ? J->scev.stop :
1226
+ emitir(IRTI(IR_ADD), J->scev.stop, ofsref));
1227
+ /* Emit invariant bounds check for start, if not const or negative. */
1228
+ if (!(J->scev.dir && J->scev.start &&
1229
+ (int64_t)IR(J->scev.start)->i + ofs >= 0))
1230
+ emitir(IRTG(IR_ABC, IRT_P32), asizeref, ikey);
1231
+ return;
1232
+ }
1233
+ }
1234
+ }
1235
+ emitir(IRTGI(IR_ABC), asizeref, ikey); /* Emit regular bounds check. */
1236
+ }
1237
+
1238
+ /* Record indexed key lookup. */
1239
+ static TRef rec_idx_key(jit_State *J, RecordIndex *ix, IRRef *rbref)
1240
+ {
1241
+ TRef key;
1242
+ GCtab *t = tabV(&ix->tabv);
1243
+ ix->oldv = lj_tab_get(J->L, t, &ix->keyv); /* Lookup previous value. */
1244
+ *rbref = 0;
1245
+
1246
+ /* Integer keys are looked up in the array part first. */
1247
+ key = ix->key;
1248
+ if (tref_isnumber(key)) {
1249
+ int32_t k = numberVint(&ix->keyv);
1250
+ if (!tvisint(&ix->keyv) && numV(&ix->keyv) != (lua_Number)k)
1251
+ k = LJ_MAX_ASIZE;
1252
+ if ((MSize)k < LJ_MAX_ASIZE) { /* Potential array key? */
1253
+ TRef ikey = lj_opt_narrow_index(J, key);
1254
+ TRef asizeref = emitir(IRTI(IR_FLOAD), ix->tab, IRFL_TAB_ASIZE);
1255
+ if ((MSize)k < t->asize) { /* Currently an array key? */
1256
+ TRef arrayref;
1257
+ rec_idx_abc(J, asizeref, ikey, t->asize);
1258
+ arrayref = emitir(IRT(IR_FLOAD, IRT_P32), ix->tab, IRFL_TAB_ARRAY);
1259
+ return emitir(IRT(IR_AREF, IRT_P32), arrayref, ikey);
1260
+ } else { /* Currently not in array (may be an array extension)? */
1261
+ emitir(IRTGI(IR_ULE), asizeref, ikey); /* Inv. bounds check. */
1262
+ if (k == 0 && tref_isk(key))
1263
+ key = lj_ir_knum_zero(J); /* Canonicalize 0 or +-0.0 to +0.0. */
1264
+ /* And continue with the hash lookup. */
1265
+ }
1266
+ } else if (!tref_isk(key)) {
1267
+ /* We can rule out const numbers which failed the integerness test
1268
+ ** above. But all other numbers are potential array keys.
1269
+ */
1270
+ if (t->asize == 0) { /* True sparse tables have an empty array part. */
1271
+ /* Guard that the array part stays empty. */
1272
+ TRef tmp = emitir(IRTI(IR_FLOAD), ix->tab, IRFL_TAB_ASIZE);
1273
+ emitir(IRTGI(IR_EQ), tmp, lj_ir_kint(J, 0));
1274
+ } else {
1275
+ lj_trace_err(J, LJ_TRERR_NYITMIX);
1276
+ }
1277
+ }
1278
+ }
1279
+
1280
+ /* Otherwise the key is located in the hash part. */
1281
+ if (t->hmask == 0) { /* Shortcut for empty hash part. */
1282
+ /* Guard that the hash part stays empty. */
1283
+ TRef tmp = emitir(IRTI(IR_FLOAD), ix->tab, IRFL_TAB_HMASK);
1284
+ emitir(IRTGI(IR_EQ), tmp, lj_ir_kint(J, 0));
1285
+ return lj_ir_kkptr(J, niltvg(J2G(J)));
1286
+ }
1287
+ if (tref_isinteger(key)) /* Hash keys are based on numbers, not ints. */
1288
+ key = emitir(IRTN(IR_CONV), key, IRCONV_NUM_INT);
1289
+ if (tref_isk(key)) {
1290
+ /* Optimize lookup of constant hash keys. */
1291
+ MSize hslot = (MSize)((char *)ix->oldv - (char *)&noderef(t->node)[0].val);
1292
+ if (t->hmask > 0 && hslot <= t->hmask*(MSize)sizeof(Node) &&
1293
+ hslot <= 65535*(MSize)sizeof(Node)) {
1294
+ TRef node, kslot, hm;
1295
+ *rbref = J->cur.nins; /* Mark possible rollback point. */
1296
+ hm = emitir(IRTI(IR_FLOAD), ix->tab, IRFL_TAB_HMASK);
1297
+ emitir(IRTGI(IR_EQ), hm, lj_ir_kint(J, (int32_t)t->hmask));
1298
+ node = emitir(IRT(IR_FLOAD, IRT_P32), ix->tab, IRFL_TAB_NODE);
1299
+ kslot = lj_ir_kslot(J, key, hslot / sizeof(Node));
1300
+ return emitir(IRTG(IR_HREFK, IRT_P32), node, kslot);
1301
+ }
1302
+ }
1303
+ /* Fall back to a regular hash lookup. */
1304
+ return emitir(IRT(IR_HREF, IRT_P32), ix->tab, key);
1305
+ }
1306
+
1307
+ /* Determine whether a key is NOT one of the fast metamethod names. */
1308
+ static int nommstr(jit_State *J, TRef key)
1309
+ {
1310
+ if (tref_isstr(key)) {
1311
+ if (tref_isk(key)) {
1312
+ GCstr *str = ir_kstr(IR(tref_ref(key)));
1313
+ uint32_t mm;
1314
+ for (mm = 0; mm <= MM_FAST; mm++)
1315
+ if (mmname_str(J2G(J), mm) == str)
1316
+ return 0; /* MUST be one the fast metamethod names. */
1317
+ } else {
1318
+ return 0; /* Variable string key MAY be a metamethod name. */
1319
+ }
1320
+ }
1321
+ return 1; /* CANNOT be a metamethod name. */
1322
+ }
1323
+
1324
+ /* Record indexed load/store. */
1325
+ TRef lj_record_idx(jit_State *J, RecordIndex *ix)
1326
+ {
1327
+ TRef xref;
1328
+ IROp xrefop, loadop;
1329
+ IRRef rbref;
1330
+ cTValue *oldv;
1331
+
1332
+ while (!tref_istab(ix->tab)) { /* Handle non-table lookup. */
1333
+ /* Never call raw lj_record_idx() on non-table. */
1334
+ lua_assert(ix->idxchain != 0);
1335
+ if (!lj_record_mm_lookup(J, ix, ix->val ? MM_newindex : MM_index))
1336
+ lj_trace_err(J, LJ_TRERR_NOMM);
1337
+ handlemm:
1338
+ if (tref_isfunc(ix->mobj)) { /* Handle metamethod call. */
1339
+ BCReg func = rec_mm_prep(J, ix->val ? lj_cont_nop : lj_cont_ra);
1340
+ TRef *base = J->base + func;
1341
+ TValue *tv = J->L->base + func;
1342
+ lua_assert(!LJ_FR2); /* TODO_FR2: handle different frame setup. */
1343
+ base[0] = ix->mobj; base[1] = ix->tab; base[2] = ix->key;
1344
+ setfuncV(J->L, tv+0, funcV(&ix->mobjv));
1345
+ copyTV(J->L, tv+1, &ix->tabv);
1346
+ copyTV(J->L, tv+2, &ix->keyv);
1347
+ if (ix->val) {
1348
+ base[3] = ix->val;
1349
+ copyTV(J->L, tv+3, &ix->valv);
1350
+ lj_record_call(J, func, 3); /* mobj(tab, key, val) */
1351
+ return 0;
1352
+ } else {
1353
+ lj_record_call(J, func, 2); /* res = mobj(tab, key) */
1354
+ return 0; /* No result yet. */
1355
+ }
1356
+ }
1357
+ /* Otherwise retry lookup with metaobject. */
1358
+ ix->tab = ix->mobj;
1359
+ copyTV(J->L, &ix->tabv, &ix->mobjv);
1360
+ if (--ix->idxchain == 0)
1361
+ lj_trace_err(J, LJ_TRERR_IDXLOOP);
1362
+ }
1363
+
1364
+ /* First catch nil and NaN keys for tables. */
1365
+ if (tvisnil(&ix->keyv) || (tvisnum(&ix->keyv) && tvisnan(&ix->keyv))) {
1366
+ if (ix->val) /* Better fail early. */
1367
+ lj_trace_err(J, LJ_TRERR_STORENN);
1368
+ if (tref_isk(ix->key)) {
1369
+ if (ix->idxchain && lj_record_mm_lookup(J, ix, MM_index))
1370
+ goto handlemm;
1371
+ return TREF_NIL;
1372
+ }
1373
+ }
1374
+
1375
+ /* Record the key lookup. */
1376
+ xref = rec_idx_key(J, ix, &rbref);
1377
+ xrefop = IR(tref_ref(xref))->o;
1378
+ loadop = xrefop == IR_AREF ? IR_ALOAD : IR_HLOAD;
1379
+ /* The lj_meta_tset() inconsistency is gone, but better play safe. */
1380
+ oldv = xrefop == IR_KKPTR ? (cTValue *)ir_kptr(IR(tref_ref(xref))) : ix->oldv;
1381
+
1382
+ if (ix->val == 0) { /* Indexed load */
1383
+ IRType t = itype2irt(oldv);
1384
+ TRef res;
1385
+ if (oldv == niltvg(J2G(J))) {
1386
+ emitir(IRTG(IR_EQ, IRT_P32), xref, lj_ir_kkptr(J, niltvg(J2G(J))));
1387
+ res = TREF_NIL;
1388
+ } else {
1389
+ res = emitir(IRTG(loadop, t), xref, 0);
1390
+ }
1391
+ if (tref_ref(res) < rbref) /* HREFK + load forwarded? */
1392
+ lj_ir_rollback(J, rbref); /* Rollback to eliminate hmask guard. */
1393
+ if (t == IRT_NIL && ix->idxchain && lj_record_mm_lookup(J, ix, MM_index))
1394
+ goto handlemm;
1395
+ if (irtype_ispri(t)) res = TREF_PRI(t); /* Canonicalize primitives. */
1396
+ return res;
1397
+ } else { /* Indexed store. */
1398
+ GCtab *mt = tabref(tabV(&ix->tabv)->metatable);
1399
+ int keybarrier = tref_isgcv(ix->key) && !tref_isnil(ix->val);
1400
+ if (tref_ref(xref) < rbref) /* HREFK forwarded? */
1401
+ lj_ir_rollback(J, rbref); /* Rollback to eliminate hmask guard. */
1402
+ if (tvisnil(oldv)) { /* Previous value was nil? */
1403
+ /* Need to duplicate the hasmm check for the early guards. */
1404
+ int hasmm = 0;
1405
+ if (ix->idxchain && mt) {
1406
+ cTValue *mo = lj_tab_getstr(mt, mmname_str(J2G(J), MM_newindex));
1407
+ hasmm = mo && !tvisnil(mo);
1408
+ }
1409
+ if (hasmm)
1410
+ emitir(IRTG(loadop, IRT_NIL), xref, 0); /* Guard for nil value. */
1411
+ else if (xrefop == IR_HREF)
1412
+ emitir(IRTG(oldv == niltvg(J2G(J)) ? IR_EQ : IR_NE, IRT_P32),
1413
+ xref, lj_ir_kkptr(J, niltvg(J2G(J))));
1414
+ if (ix->idxchain && lj_record_mm_lookup(J, ix, MM_newindex)) {
1415
+ lua_assert(hasmm);
1416
+ goto handlemm;
1417
+ }
1418
+ lua_assert(!hasmm);
1419
+ if (oldv == niltvg(J2G(J))) { /* Need to insert a new key. */
1420
+ TRef key = ix->key;
1421
+ if (tref_isinteger(key)) /* NEWREF needs a TValue as a key. */
1422
+ key = emitir(IRTN(IR_CONV), key, IRCONV_NUM_INT);
1423
+ xref = emitir(IRT(IR_NEWREF, IRT_P32), ix->tab, key);
1424
+ keybarrier = 0; /* NEWREF already takes care of the key barrier. */
1425
+ #ifdef LUAJIT_ENABLE_TABLE_BUMP
1426
+ if ((J->flags & JIT_F_OPT_SINK)) /* Avoid a separate flag. */
1427
+ rec_idx_bump(J, ix);
1428
+ #endif
1429
+ }
1430
+ } else if (!lj_opt_fwd_wasnonnil(J, loadop, tref_ref(xref))) {
1431
+ /* Cannot derive that the previous value was non-nil, must do checks. */
1432
+ if (xrefop == IR_HREF) /* Guard against store to niltv. */
1433
+ emitir(IRTG(IR_NE, IRT_P32), xref, lj_ir_kkptr(J, niltvg(J2G(J))));
1434
+ if (ix->idxchain) { /* Metamethod lookup required? */
1435
+ /* A check for NULL metatable is cheaper (hoistable) than a load. */
1436
+ if (!mt) {
1437
+ TRef mtref = emitir(IRT(IR_FLOAD, IRT_TAB), ix->tab, IRFL_TAB_META);
1438
+ emitir(IRTG(IR_EQ, IRT_TAB), mtref, lj_ir_knull(J, IRT_TAB));
1439
+ } else {
1440
+ IRType t = itype2irt(oldv);
1441
+ emitir(IRTG(loadop, t), xref, 0); /* Guard for non-nil value. */
1442
+ }
1443
+ }
1444
+ } else {
1445
+ keybarrier = 0; /* Previous non-nil value kept the key alive. */
1446
+ }
1447
+ /* Convert int to number before storing. */
1448
+ if (!LJ_DUALNUM && tref_isinteger(ix->val))
1449
+ ix->val = emitir(IRTN(IR_CONV), ix->val, IRCONV_NUM_INT);
1450
+ emitir(IRT(loadop+IRDELTA_L2S, tref_type(ix->val)), xref, ix->val);
1451
+ if (keybarrier || tref_isgcv(ix->val))
1452
+ emitir(IRT(IR_TBAR, IRT_NIL), ix->tab, 0);
1453
+ /* Invalidate neg. metamethod cache for stores with certain string keys. */
1454
+ if (!nommstr(J, ix->key)) {
1455
+ TRef fref = emitir(IRT(IR_FREF, IRT_P32), ix->tab, IRFL_TAB_NOMM);
1456
+ emitir(IRT(IR_FSTORE, IRT_U8), fref, lj_ir_kint(J, 0));
1457
+ }
1458
+ J->needsnap = 1;
1459
+ return 0;
1460
+ }
1461
+ }
1462
+
1463
+ static void rec_tsetm(jit_State *J, BCReg ra, BCReg rn, int32_t i)
1464
+ {
1465
+ RecordIndex ix;
1466
+ cTValue *basev = J->L->base;
1467
+ GCtab *t = tabV(&basev[ra-1]);
1468
+ settabV(J->L, &ix.tabv, t);
1469
+ ix.tab = getslot(J, ra-1);
1470
+ ix.idxchain = 0;
1471
+ #ifdef LUAJIT_ENABLE_TABLE_BUMP
1472
+ if ((J->flags & JIT_F_OPT_SINK)) {
1473
+ if (t->asize < i+rn-ra)
1474
+ lj_tab_reasize(J->L, t, i+rn-ra);
1475
+ setnilV(&ix.keyv);
1476
+ rec_idx_bump(J, &ix);
1477
+ }
1478
+ #endif
1479
+ for (; ra < rn; i++, ra++) {
1480
+ setintV(&ix.keyv, i);
1481
+ ix.key = lj_ir_kint(J, i);
1482
+ copyTV(J->L, &ix.valv, &basev[ra]);
1483
+ ix.val = getslot(J, ra);
1484
+ lj_record_idx(J, &ix);
1485
+ }
1486
+ }
1487
+
1488
+ /* -- Upvalue access ------------------------------------------------------ */
1489
+
1490
+ /* Check whether upvalue is immutable and ok to constify. */
1491
+ static int rec_upvalue_constify(jit_State *J, GCupval *uvp)
1492
+ {
1493
+ if (uvp->immutable) {
1494
+ cTValue *o = uvval(uvp);
1495
+ /* Don't constify objects that may retain large amounts of memory. */
1496
+ #if LJ_HASFFI
1497
+ if (tviscdata(o)) {
1498
+ GCcdata *cd = cdataV(o);
1499
+ if (!cdataisv(cd) && !(cd->marked & LJ_GC_CDATA_FIN)) {
1500
+ CType *ct = ctype_raw(ctype_ctsG(J2G(J)), cd->ctypeid);
1501
+ if (!ctype_hassize(ct->info) || ct->size <= 16)
1502
+ return 1;
1503
+ }
1504
+ return 0;
1505
+ }
1506
+ #else
1507
+ UNUSED(J);
1508
+ #endif
1509
+ if (!(tvistab(o) || tvisudata(o) || tvisthread(o)))
1510
+ return 1;
1511
+ }
1512
+ return 0;
1513
+ }
1514
+
1515
+ /* Record upvalue load/store. */
1516
+ static TRef rec_upvalue(jit_State *J, uint32_t uv, TRef val)
1517
+ {
1518
+ GCupval *uvp = &gcref(J->fn->l.uvptr[uv])->uv;
1519
+ TRef fn = getcurrf(J);
1520
+ IRRef uref;
1521
+ int needbarrier = 0;
1522
+ if (rec_upvalue_constify(J, uvp)) { /* Try to constify immutable upvalue. */
1523
+ TRef tr, kfunc;
1524
+ lua_assert(val == 0);
1525
+ if (!tref_isk(fn)) { /* Late specialization of current function. */
1526
+ if (J->pt->flags >= PROTO_CLC_POLY)
1527
+ goto noconstify;
1528
+ kfunc = lj_ir_kfunc(J, J->fn);
1529
+ emitir(IRTG(IR_EQ, IRT_FUNC), fn, kfunc);
1530
+ J->base[-1] = TREF_FRAME | kfunc;
1531
+ fn = kfunc;
1532
+ }
1533
+ tr = lj_record_constify(J, uvval(uvp));
1534
+ if (tr)
1535
+ return tr;
1536
+ }
1537
+ noconstify:
1538
+ /* Note: this effectively limits LJ_MAX_UPVAL to 127. */
1539
+ uv = (uv << 8) | (hashrot(uvp->dhash, uvp->dhash + HASH_BIAS) & 0xff);
1540
+ if (!uvp->closed) {
1541
+ /* In current stack? */
1542
+ if (uvval(uvp) >= tvref(J->L->stack) &&
1543
+ uvval(uvp) < tvref(J->L->maxstack)) {
1544
+ int32_t slot = (int32_t)(uvval(uvp) - (J->L->base - J->baseslot));
1545
+ if (slot >= 0) { /* Aliases an SSA slot? */
1546
+ slot -= (int32_t)J->baseslot; /* Note: slot number may be negative! */
1547
+ /* NYI: add IR to guard that it's still aliasing the same slot. */
1548
+ if (val == 0) {
1549
+ return getslot(J, slot);
1550
+ } else {
1551
+ J->base[slot] = val;
1552
+ if (slot >= (int32_t)J->maxslot) J->maxslot = (BCReg)(slot+1);
1553
+ return 0;
1554
+ }
1555
+ }
1556
+ }
1557
+ uref = tref_ref(emitir(IRTG(IR_UREFO, IRT_P32), fn, uv));
1558
+ } else {
1559
+ needbarrier = 1;
1560
+ uref = tref_ref(emitir(IRTG(IR_UREFC, IRT_P32), fn, uv));
1561
+ }
1562
+ if (val == 0) { /* Upvalue load */
1563
+ IRType t = itype2irt(uvval(uvp));
1564
+ TRef res = emitir(IRTG(IR_ULOAD, t), uref, 0);
1565
+ if (irtype_ispri(t)) res = TREF_PRI(t); /* Canonicalize primitive refs. */
1566
+ return res;
1567
+ } else { /* Upvalue store. */
1568
+ /* Convert int to number before storing. */
1569
+ if (!LJ_DUALNUM && tref_isinteger(val))
1570
+ val = emitir(IRTN(IR_CONV), val, IRCONV_NUM_INT);
1571
+ emitir(IRT(IR_USTORE, tref_type(val)), uref, val);
1572
+ if (needbarrier && tref_isgcv(val))
1573
+ emitir(IRT(IR_OBAR, IRT_NIL), uref, val);
1574
+ J->needsnap = 1;
1575
+ return 0;
1576
+ }
1577
+ }
1578
+
1579
+ /* -- Record calls to Lua functions --------------------------------------- */
1580
+
1581
+ /* Check unroll limits for calls. */
1582
+ static void check_call_unroll(jit_State *J, TraceNo lnk)
1583
+ {
1584
+ cTValue *frame = J->L->base - 1;
1585
+ void *pc = mref(frame_func(frame)->l.pc, void);
1586
+ int32_t depth = J->framedepth;
1587
+ int32_t count = 0;
1588
+ if ((J->pt->flags & PROTO_VARARG)) depth--; /* Vararg frame still missing. */
1589
+ for (; depth > 0; depth--) { /* Count frames with same prototype. */
1590
+ if (frame_iscont(frame)) depth--;
1591
+ frame = frame_prev(frame);
1592
+ if (mref(frame_func(frame)->l.pc, void) == pc)
1593
+ count++;
1594
+ }
1595
+ if (J->pc == J->startpc) {
1596
+ if (count + J->tailcalled > J->param[JIT_P_recunroll]) {
1597
+ J->pc++;
1598
+ if (J->framedepth + J->retdepth == 0)
1599
+ lj_record_stop(J, LJ_TRLINK_TAILREC, J->cur.traceno); /* Tail-rec. */
1600
+ else
1601
+ lj_record_stop(J, LJ_TRLINK_UPREC, J->cur.traceno); /* Up-recursion. */
1602
+ }
1603
+ } else {
1604
+ if (count > J->param[JIT_P_callunroll]) {
1605
+ if (lnk) { /* Possible tail- or up-recursion. */
1606
+ lj_trace_flush(J, lnk); /* Flush trace that only returns. */
1607
+ /* Set a small, pseudo-random hotcount for a quick retry of JFUNC*. */
1608
+ hotcount_set(J2GG(J), J->pc+1, LJ_PRNG_BITS(J, 4));
1609
+ }
1610
+ lj_trace_err(J, LJ_TRERR_CUNROLL);
1611
+ }
1612
+ }
1613
+ }
1614
+
1615
+ /* Record Lua function setup. */
1616
+ static void rec_func_setup(jit_State *J)
1617
+ {
1618
+ GCproto *pt = J->pt;
1619
+ BCReg s, numparams = pt->numparams;
1620
+ if ((pt->flags & PROTO_NOJIT))
1621
+ lj_trace_err(J, LJ_TRERR_CJITOFF);
1622
+ if (J->baseslot + pt->framesize >= LJ_MAX_JSLOTS)
1623
+ lj_trace_err(J, LJ_TRERR_STACKOV);
1624
+ /* Fill up missing parameters with nil. */
1625
+ for (s = J->maxslot; s < numparams; s++)
1626
+ J->base[s] = TREF_NIL;
1627
+ /* The remaining slots should never be read before they are written. */
1628
+ J->maxslot = numparams;
1629
+ }
1630
+
1631
+ /* Record Lua vararg function setup. */
1632
+ static void rec_func_vararg(jit_State *J)
1633
+ {
1634
+ GCproto *pt = J->pt;
1635
+ BCReg s, fixargs, vframe = J->maxslot+1;
1636
+ lua_assert((pt->flags & PROTO_VARARG));
1637
+ if (J->baseslot + vframe + pt->framesize >= LJ_MAX_JSLOTS)
1638
+ lj_trace_err(J, LJ_TRERR_STACKOV);
1639
+ J->base[vframe-1] = J->base[-1]; /* Copy function up. */
1640
+ /* Copy fixarg slots up and set their original slots to nil. */
1641
+ fixargs = pt->numparams < J->maxslot ? pt->numparams : J->maxslot;
1642
+ for (s = 0; s < fixargs; s++) {
1643
+ J->base[vframe+s] = J->base[s];
1644
+ J->base[s] = TREF_NIL;
1645
+ }
1646
+ J->maxslot = fixargs;
1647
+ J->framedepth++;
1648
+ J->base += vframe;
1649
+ J->baseslot += vframe;
1650
+ }
1651
+
1652
+ /* Record entry to a Lua function. */
1653
+ static void rec_func_lua(jit_State *J)
1654
+ {
1655
+ rec_func_setup(J);
1656
+ check_call_unroll(J, 0);
1657
+ }
1658
+
1659
+ /* Record entry to an already compiled function. */
1660
+ static void rec_func_jit(jit_State *J, TraceNo lnk)
1661
+ {
1662
+ GCtrace *T;
1663
+ rec_func_setup(J);
1664
+ T = traceref(J, lnk);
1665
+ if (T->linktype == LJ_TRLINK_RETURN) { /* Trace returns to interpreter? */
1666
+ check_call_unroll(J, lnk);
1667
+ /* Temporarily unpatch JFUNC* to continue recording across function. */
1668
+ J->patchins = *J->pc;
1669
+ J->patchpc = (BCIns *)J->pc;
1670
+ *J->patchpc = T->startins;
1671
+ return;
1672
+ }
1673
+ J->instunroll = 0; /* Cannot continue across a compiled function. */
1674
+ if (J->pc == J->startpc && J->framedepth + J->retdepth == 0)
1675
+ lj_record_stop(J, LJ_TRLINK_TAILREC, J->cur.traceno); /* Extra tail-rec. */
1676
+ else
1677
+ lj_record_stop(J, LJ_TRLINK_ROOT, lnk); /* Link to the function. */
1678
+ }
1679
+
1680
+ /* -- Vararg handling ----------------------------------------------------- */
1681
+
1682
+ /* Detect y = select(x, ...) idiom. */
1683
+ static int select_detect(jit_State *J)
1684
+ {
1685
+ BCIns ins = J->pc[1];
1686
+ if (bc_op(ins) == BC_CALLM && bc_b(ins) == 2 && bc_c(ins) == 1) {
1687
+ cTValue *func = &J->L->base[bc_a(ins)];
1688
+ if (tvisfunc(func) && funcV(func)->c.ffid == FF_select)
1689
+ return 1;
1690
+ }
1691
+ return 0;
1692
+ }
1693
+
1694
+ /* Record vararg instruction. */
1695
+ static void rec_varg(jit_State *J, BCReg dst, ptrdiff_t nresults)
1696
+ {
1697
+ int32_t numparams = J->pt->numparams;
1698
+ ptrdiff_t nvararg = frame_delta(J->L->base-1) - numparams - 1;
1699
+ lua_assert(frame_isvarg(J->L->base-1));
1700
+ if (J->framedepth > 0) { /* Simple case: varargs defined on-trace. */
1701
+ ptrdiff_t i;
1702
+ if (nvararg < 0) nvararg = 0;
1703
+ if (nresults == -1) {
1704
+ nresults = nvararg;
1705
+ J->maxslot = dst + (BCReg)nvararg;
1706
+ } else if (dst + nresults > J->maxslot) {
1707
+ J->maxslot = dst + (BCReg)nresults;
1708
+ }
1709
+ for (i = 0; i < nresults; i++)
1710
+ J->base[dst+i] = i < nvararg ? getslot(J, i - nvararg - 1) : TREF_NIL;
1711
+ } else { /* Unknown number of varargs passed to trace. */
1712
+ TRef fr = emitir(IRTI(IR_SLOAD), 0, IRSLOAD_READONLY|IRSLOAD_FRAME);
1713
+ int32_t frofs = 8*(1+numparams)+FRAME_VARG;
1714
+ if (nresults >= 0) { /* Known fixed number of results. */
1715
+ ptrdiff_t i;
1716
+ if (nvararg > 0) {
1717
+ ptrdiff_t nload = nvararg >= nresults ? nresults : nvararg;
1718
+ TRef vbase;
1719
+ if (nvararg >= nresults)
1720
+ emitir(IRTGI(IR_GE), fr, lj_ir_kint(J, frofs+8*(int32_t)nresults));
1721
+ else
1722
+ emitir(IRTGI(IR_EQ), fr,
1723
+ lj_ir_kint(J, (int32_t)frame_ftsz(J->L->base-1)));
1724
+ vbase = emitir(IRTI(IR_SUB), REF_BASE, fr);
1725
+ vbase = emitir(IRT(IR_ADD, IRT_P32), vbase, lj_ir_kint(J, frofs-8));
1726
+ for (i = 0; i < nload; i++) {
1727
+ IRType t = itype2irt(&J->L->base[i-1-nvararg]);
1728
+ TRef aref = emitir(IRT(IR_AREF, IRT_P32),
1729
+ vbase, lj_ir_kint(J, (int32_t)i));
1730
+ TRef tr = emitir(IRTG(IR_VLOAD, t), aref, 0);
1731
+ if (irtype_ispri(t)) tr = TREF_PRI(t); /* Canonicalize primitives. */
1732
+ J->base[dst+i] = tr;
1733
+ }
1734
+ } else {
1735
+ emitir(IRTGI(IR_LE), fr, lj_ir_kint(J, frofs));
1736
+ nvararg = 0;
1737
+ }
1738
+ for (i = nvararg; i < nresults; i++)
1739
+ J->base[dst+i] = TREF_NIL;
1740
+ if (dst + (BCReg)nresults > J->maxslot)
1741
+ J->maxslot = dst + (BCReg)nresults;
1742
+ } else if (select_detect(J)) { /* y = select(x, ...) */
1743
+ TRef tridx = J->base[dst-1];
1744
+ TRef tr = TREF_NIL;
1745
+ ptrdiff_t idx = lj_ffrecord_select_mode(J, tridx, &J->L->base[dst-1]);
1746
+ if (idx < 0) goto nyivarg;
1747
+ if (idx != 0 && !tref_isinteger(tridx))
1748
+ tridx = emitir(IRTGI(IR_CONV), tridx, IRCONV_INT_NUM|IRCONV_INDEX);
1749
+ if (idx != 0 && tref_isk(tridx)) {
1750
+ emitir(IRTGI(idx <= nvararg ? IR_GE : IR_LT),
1751
+ fr, lj_ir_kint(J, frofs+8*(int32_t)idx));
1752
+ frofs -= 8; /* Bias for 1-based index. */
1753
+ } else if (idx <= nvararg) { /* Compute size. */
1754
+ TRef tmp = emitir(IRTI(IR_ADD), fr, lj_ir_kint(J, -frofs));
1755
+ if (numparams)
1756
+ emitir(IRTGI(IR_GE), tmp, lj_ir_kint(J, 0));
1757
+ tr = emitir(IRTI(IR_BSHR), tmp, lj_ir_kint(J, 3));
1758
+ if (idx != 0) {
1759
+ tridx = emitir(IRTI(IR_ADD), tridx, lj_ir_kint(J, -1));
1760
+ rec_idx_abc(J, tr, tridx, (uint32_t)nvararg);
1761
+ }
1762
+ } else {
1763
+ TRef tmp = lj_ir_kint(J, frofs);
1764
+ if (idx != 0) {
1765
+ TRef tmp2 = emitir(IRTI(IR_BSHL), tridx, lj_ir_kint(J, 3));
1766
+ tmp = emitir(IRTI(IR_ADD), tmp2, tmp);
1767
+ } else {
1768
+ tr = lj_ir_kint(J, 0);
1769
+ }
1770
+ emitir(IRTGI(IR_LT), fr, tmp);
1771
+ }
1772
+ if (idx != 0 && idx <= nvararg) {
1773
+ IRType t;
1774
+ TRef aref, vbase = emitir(IRTI(IR_SUB), REF_BASE, fr);
1775
+ vbase = emitir(IRT(IR_ADD, IRT_P32), vbase, lj_ir_kint(J, frofs-8));
1776
+ t = itype2irt(&J->L->base[idx-2-nvararg]);
1777
+ aref = emitir(IRT(IR_AREF, IRT_P32), vbase, tridx);
1778
+ tr = emitir(IRTG(IR_VLOAD, t), aref, 0);
1779
+ if (irtype_ispri(t)) tr = TREF_PRI(t); /* Canonicalize primitives. */
1780
+ }
1781
+ J->base[dst-2] = tr;
1782
+ J->maxslot = dst-1;
1783
+ J->bcskip = 2; /* Skip CALLM + select. */
1784
+ } else {
1785
+ nyivarg:
1786
+ setintV(&J->errinfo, BC_VARG);
1787
+ lj_trace_err_info(J, LJ_TRERR_NYIBC);
1788
+ }
1789
+ }
1790
+ }
1791
+
1792
+ /* -- Record allocations -------------------------------------------------- */
1793
+
1794
+ static TRef rec_tnew(jit_State *J, uint32_t ah)
1795
+ {
1796
+ uint32_t asize = ah & 0x7ff;
1797
+ uint32_t hbits = ah >> 11;
1798
+ TRef tr;
1799
+ if (asize == 0x7ff) asize = 0x801;
1800
+ tr = emitir(IRTG(IR_TNEW, IRT_TAB), asize, hbits);
1801
+ #ifdef LUAJIT_ENABLE_TABLE_BUMP
1802
+ J->rbchash[(tr & (RBCHASH_SLOTS-1))].ref = tref_ref(tr);
1803
+ setmref(J->rbchash[(tr & (RBCHASH_SLOTS-1))].pc, J->pc);
1804
+ setgcref(J->rbchash[(tr & (RBCHASH_SLOTS-1))].pt, obj2gco(J->pt));
1805
+ #endif
1806
+ return tr;
1807
+ }
1808
+
1809
+ /* -- Concatenation ------------------------------------------------------- */
1810
+
1811
+ static TRef rec_cat(jit_State *J, BCReg baseslot, BCReg topslot)
1812
+ {
1813
+ TRef *top = &J->base[topslot];
1814
+ TValue savetv[5];
1815
+ BCReg s;
1816
+ RecordIndex ix;
1817
+ lua_assert(baseslot < topslot);
1818
+ for (s = baseslot; s <= topslot; s++)
1819
+ (void)getslot(J, s); /* Ensure all arguments have a reference. */
1820
+ if (tref_isnumber_str(top[0]) && tref_isnumber_str(top[-1])) {
1821
+ TRef tr, hdr, *trp, *xbase, *base = &J->base[baseslot];
1822
+ /* First convert numbers to strings. */
1823
+ for (trp = top; trp >= base; trp--) {
1824
+ if (tref_isnumber(*trp))
1825
+ *trp = emitir(IRT(IR_TOSTR, IRT_STR), *trp,
1826
+ tref_isnum(*trp) ? IRTOSTR_NUM : IRTOSTR_INT);
1827
+ else if (!tref_isstr(*trp))
1828
+ break;
1829
+ }
1830
+ xbase = ++trp;
1831
+ tr = hdr = emitir(IRT(IR_BUFHDR, IRT_P32),
1832
+ lj_ir_kptr(J, &J2G(J)->tmpbuf), IRBUFHDR_RESET);
1833
+ do {
1834
+ tr = emitir(IRT(IR_BUFPUT, IRT_P32), tr, *trp++);
1835
+ } while (trp <= top);
1836
+ tr = emitir(IRT(IR_BUFSTR, IRT_STR), tr, hdr);
1837
+ J->maxslot = (BCReg)(xbase - J->base);
1838
+ if (xbase == base) return tr; /* Return simple concatenation result. */
1839
+ /* Pass partial result. */
1840
+ topslot = J->maxslot--;
1841
+ *xbase = tr;
1842
+ top = xbase;
1843
+ setstrV(J->L, &ix.keyv, &J2G(J)->strempty); /* Simulate string result. */
1844
+ } else {
1845
+ J->maxslot = topslot-1;
1846
+ copyTV(J->L, &ix.keyv, &J->L->base[topslot]);
1847
+ }
1848
+ copyTV(J->L, &ix.tabv, &J->L->base[topslot-1]);
1849
+ ix.tab = top[-1];
1850
+ ix.key = top[0];
1851
+ memcpy(savetv, &J->L->base[topslot-1], sizeof(savetv)); /* Save slots. */
1852
+ rec_mm_arith(J, &ix, MM_concat); /* Call __concat metamethod. */
1853
+ memcpy(&J->L->base[topslot-1], savetv, sizeof(savetv)); /* Restore slots. */
1854
+ return 0; /* No result yet. */
1855
+ }
1856
+
1857
+ /* -- Record bytecode ops ------------------------------------------------- */
1858
+
1859
+ /* Prepare for comparison. */
1860
+ static void rec_comp_prep(jit_State *J)
1861
+ {
1862
+ /* Prevent merging with snapshot #0 (GC exit) since we fixup the PC. */
1863
+ if (J->cur.nsnap == 1 && J->cur.snap[0].ref == J->cur.nins)
1864
+ emitir_raw(IRT(IR_NOP, IRT_NIL), 0, 0);
1865
+ lj_snap_add(J);
1866
+ }
1867
+
1868
+ /* Fixup comparison. */
1869
+ static void rec_comp_fixup(jit_State *J, const BCIns *pc, int cond)
1870
+ {
1871
+ BCIns jmpins = pc[1];
1872
+ const BCIns *npc = pc + 2 + (cond ? bc_j(jmpins) : 0);
1873
+ SnapShot *snap = &J->cur.snap[J->cur.nsnap-1];
1874
+ /* Set PC to opposite target to avoid re-recording the comp. in side trace. */
1875
+ J->cur.snapmap[snap->mapofs + snap->nent] = SNAP_MKPC(npc);
1876
+ J->needsnap = 1;
1877
+ if (bc_a(jmpins) < J->maxslot) J->maxslot = bc_a(jmpins);
1878
+ lj_snap_shrink(J); /* Shrink last snapshot if possible. */
1879
+ }
1880
+
1881
+ /* Record the next bytecode instruction (_before_ it's executed). */
1882
+ void lj_record_ins(jit_State *J)
1883
+ {
1884
+ cTValue *lbase;
1885
+ RecordIndex ix;
1886
+ const BCIns *pc;
1887
+ BCIns ins;
1888
+ BCOp op;
1889
+ TRef ra, rb, rc;
1890
+
1891
+ /* Perform post-processing action before recording the next instruction. */
1892
+ if (LJ_UNLIKELY(J->postproc != LJ_POST_NONE)) {
1893
+ switch (J->postproc) {
1894
+ case LJ_POST_FIXCOMP: /* Fixup comparison. */
1895
+ pc = (const BCIns *)(uintptr_t)J2G(J)->tmptv.u64;
1896
+ rec_comp_fixup(J, pc, (!tvistruecond(&J2G(J)->tmptv2) ^ (bc_op(*pc)&1)));
1897
+ /* fallthrough */
1898
+ case LJ_POST_FIXGUARD: /* Fixup and emit pending guard. */
1899
+ case LJ_POST_FIXGUARDSNAP: /* Fixup and emit pending guard and snapshot. */
1900
+ if (!tvistruecond(&J2G(J)->tmptv2)) {
1901
+ J->fold.ins.o ^= 1; /* Flip guard to opposite. */
1902
+ if (J->postproc == LJ_POST_FIXGUARDSNAP) {
1903
+ SnapShot *snap = &J->cur.snap[J->cur.nsnap-1];
1904
+ J->cur.snapmap[snap->mapofs+snap->nent-1]--; /* False -> true. */
1905
+ }
1906
+ }
1907
+ lj_opt_fold(J); /* Emit pending guard. */
1908
+ /* fallthrough */
1909
+ case LJ_POST_FIXBOOL:
1910
+ if (!tvistruecond(&J2G(J)->tmptv2)) {
1911
+ BCReg s;
1912
+ TValue *tv = J->L->base;
1913
+ for (s = 0; s < J->maxslot; s++) /* Fixup stack slot (if any). */
1914
+ if (J->base[s] == TREF_TRUE && tvisfalse(&tv[s])) {
1915
+ J->base[s] = TREF_FALSE;
1916
+ break;
1917
+ }
1918
+ }
1919
+ break;
1920
+ case LJ_POST_FIXCONST:
1921
+ {
1922
+ BCReg s;
1923
+ TValue *tv = J->L->base;
1924
+ for (s = 0; s < J->maxslot; s++) /* Constify stack slots (if any). */
1925
+ if (J->base[s] == TREF_NIL && !tvisnil(&tv[s]))
1926
+ J->base[s] = lj_record_constify(J, &tv[s]);
1927
+ }
1928
+ break;
1929
+ case LJ_POST_FFRETRY: /* Suppress recording of retried fast function. */
1930
+ if (bc_op(*J->pc) >= BC__MAX)
1931
+ return;
1932
+ break;
1933
+ default: lua_assert(0); break;
1934
+ }
1935
+ J->postproc = LJ_POST_NONE;
1936
+ }
1937
+
1938
+ /* Need snapshot before recording next bytecode (e.g. after a store). */
1939
+ if (J->needsnap) {
1940
+ J->needsnap = 0;
1941
+ lj_snap_purge(J);
1942
+ lj_snap_add(J);
1943
+ J->mergesnap = 1;
1944
+ }
1945
+
1946
+ /* Skip some bytecodes. */
1947
+ if (LJ_UNLIKELY(J->bcskip > 0)) {
1948
+ J->bcskip--;
1949
+ return;
1950
+ }
1951
+
1952
+ /* Record only closed loops for root traces. */
1953
+ pc = J->pc;
1954
+ if (J->framedepth == 0 &&
1955
+ (MSize)((char *)pc - (char *)J->bc_min) >= J->bc_extent)
1956
+ lj_trace_err(J, LJ_TRERR_LLEAVE);
1957
+
1958
+ #ifdef LUA_USE_ASSERT
1959
+ rec_check_slots(J);
1960
+ rec_check_ir(J);
1961
+ #endif
1962
+
1963
+ #if LJ_HASPROFILE
1964
+ rec_profile_ins(J, pc);
1965
+ #endif
1966
+
1967
+ /* Keep a copy of the runtime values of var/num/str operands. */
1968
+ #define rav (&ix.valv)
1969
+ #define rbv (&ix.tabv)
1970
+ #define rcv (&ix.keyv)
1971
+
1972
+ lbase = J->L->base;
1973
+ ins = *pc;
1974
+ op = bc_op(ins);
1975
+ ra = bc_a(ins);
1976
+ ix.val = 0;
1977
+ switch (bcmode_a(op)) {
1978
+ case BCMvar:
1979
+ copyTV(J->L, rav, &lbase[ra]); ix.val = ra = getslot(J, ra); break;
1980
+ default: break; /* Handled later. */
1981
+ }
1982
+ rb = bc_b(ins);
1983
+ rc = bc_c(ins);
1984
+ switch (bcmode_b(op)) {
1985
+ case BCMnone: rb = 0; rc = bc_d(ins); break; /* Upgrade rc to 'rd'. */
1986
+ case BCMvar:
1987
+ copyTV(J->L, rbv, &lbase[rb]); ix.tab = rb = getslot(J, rb); break;
1988
+ default: break; /* Handled later. */
1989
+ }
1990
+ switch (bcmode_c(op)) {
1991
+ case BCMvar:
1992
+ copyTV(J->L, rcv, &lbase[rc]); ix.key = rc = getslot(J, rc); break;
1993
+ case BCMpri: setpriV(rcv, ~rc); ix.key = rc = TREF_PRI(IRT_NIL+rc); break;
1994
+ case BCMnum: { cTValue *tv = proto_knumtv(J->pt, rc);
1995
+ copyTV(J->L, rcv, tv); ix.key = rc = tvisint(tv) ? lj_ir_kint(J, intV(tv)) :
1996
+ lj_ir_knumint(J, numV(tv)); } break;
1997
+ case BCMstr: { GCstr *s = gco2str(proto_kgc(J->pt, ~(ptrdiff_t)rc));
1998
+ setstrV(J->L, rcv, s); ix.key = rc = lj_ir_kstr(J, s); } break;
1999
+ default: break; /* Handled later. */
2000
+ }
2001
+
2002
+ switch (op) {
2003
+
2004
+ /* -- Comparison ops ---------------------------------------------------- */
2005
+
2006
+ case BC_ISLT: case BC_ISGE: case BC_ISLE: case BC_ISGT:
2007
+ #if LJ_HASFFI
2008
+ if (tref_iscdata(ra) || tref_iscdata(rc)) {
2009
+ rec_mm_comp_cdata(J, &ix, op, ((int)op & 2) ? MM_le : MM_lt);
2010
+ break;
2011
+ }
2012
+ #endif
2013
+ /* Emit nothing for two numeric or string consts. */
2014
+ if (!(tref_isk2(ra,rc) && tref_isnumber_str(ra) && tref_isnumber_str(rc))) {
2015
+ IRType ta = tref_isinteger(ra) ? IRT_INT : tref_type(ra);
2016
+ IRType tc = tref_isinteger(rc) ? IRT_INT : tref_type(rc);
2017
+ int irop;
2018
+ if (ta != tc) {
2019
+ /* Widen mixed number/int comparisons to number/number comparison. */
2020
+ if (ta == IRT_INT && tc == IRT_NUM) {
2021
+ ra = emitir(IRTN(IR_CONV), ra, IRCONV_NUM_INT);
2022
+ ta = IRT_NUM;
2023
+ } else if (ta == IRT_NUM && tc == IRT_INT) {
2024
+ rc = emitir(IRTN(IR_CONV), rc, IRCONV_NUM_INT);
2025
+ } else if (LJ_52) {
2026
+ ta = IRT_NIL; /* Force metamethod for different types. */
2027
+ } else if (!((ta == IRT_FALSE || ta == IRT_TRUE) &&
2028
+ (tc == IRT_FALSE || tc == IRT_TRUE))) {
2029
+ break; /* Interpreter will throw for two different types. */
2030
+ }
2031
+ }
2032
+ rec_comp_prep(J);
2033
+ irop = (int)op - (int)BC_ISLT + (int)IR_LT;
2034
+ if (ta == IRT_NUM) {
2035
+ if ((irop & 1)) irop ^= 4; /* ISGE/ISGT are unordered. */
2036
+ if (!lj_ir_numcmp(numberVnum(rav), numberVnum(rcv), (IROp)irop))
2037
+ irop ^= 5;
2038
+ } else if (ta == IRT_INT) {
2039
+ if (!lj_ir_numcmp(numberVnum(rav), numberVnum(rcv), (IROp)irop))
2040
+ irop ^= 1;
2041
+ } else if (ta == IRT_STR) {
2042
+ if (!lj_ir_strcmp(strV(rav), strV(rcv), (IROp)irop)) irop ^= 1;
2043
+ ra = lj_ir_call(J, IRCALL_lj_str_cmp, ra, rc);
2044
+ rc = lj_ir_kint(J, 0);
2045
+ ta = IRT_INT;
2046
+ } else {
2047
+ rec_mm_comp(J, &ix, (int)op);
2048
+ break;
2049
+ }
2050
+ emitir(IRTG(irop, ta), ra, rc);
2051
+ rec_comp_fixup(J, J->pc, ((int)op ^ irop) & 1);
2052
+ }
2053
+ break;
2054
+
2055
+ case BC_ISEQV: case BC_ISNEV:
2056
+ case BC_ISEQS: case BC_ISNES:
2057
+ case BC_ISEQN: case BC_ISNEN:
2058
+ case BC_ISEQP: case BC_ISNEP:
2059
+ #if LJ_HASFFI
2060
+ if (tref_iscdata(ra) || tref_iscdata(rc)) {
2061
+ rec_mm_comp_cdata(J, &ix, op, MM_eq);
2062
+ break;
2063
+ }
2064
+ #endif
2065
+ /* Emit nothing for two non-table, non-udata consts. */
2066
+ if (!(tref_isk2(ra, rc) && !(tref_istab(ra) || tref_isudata(ra)))) {
2067
+ int diff;
2068
+ rec_comp_prep(J);
2069
+ diff = lj_record_objcmp(J, ra, rc, rav, rcv);
2070
+ if (diff == 2 || !(tref_istab(ra) || tref_isudata(ra)))
2071
+ rec_comp_fixup(J, J->pc, ((int)op & 1) == !diff);
2072
+ else if (diff == 1) /* Only check __eq if different, but same type. */
2073
+ rec_mm_equal(J, &ix, (int)op);
2074
+ }
2075
+ break;
2076
+
2077
+ /* -- Unary test and copy ops ------------------------------------------- */
2078
+
2079
+ case BC_ISTC: case BC_ISFC:
2080
+ if ((op & 1) == tref_istruecond(rc))
2081
+ rc = 0; /* Don't store if condition is not true. */
2082
+ /* fallthrough */
2083
+ case BC_IST: case BC_ISF: /* Type specialization suffices. */
2084
+ if (bc_a(pc[1]) < J->maxslot)
2085
+ J->maxslot = bc_a(pc[1]); /* Shrink used slots. */
2086
+ break;
2087
+
2088
+ case BC_ISTYPE: case BC_ISNUM:
2089
+ /* These coercions need to correspond with lj_meta_istype(). */
2090
+ if (LJ_DUALNUM && rc == ~LJ_TNUMX+1)
2091
+ ra = lj_opt_narrow_toint(J, ra);
2092
+ else if (rc == ~LJ_TNUMX+2)
2093
+ ra = lj_ir_tonum(J, ra);
2094
+ else if (rc == ~LJ_TSTR+1)
2095
+ ra = lj_ir_tostr(J, ra);
2096
+ /* else: type specialization suffices. */
2097
+ J->base[bc_a(ins)] = ra;
2098
+ break;
2099
+
2100
+ /* -- Unary ops --------------------------------------------------------- */
2101
+
2102
+ case BC_NOT:
2103
+ /* Type specialization already forces const result. */
2104
+ rc = tref_istruecond(rc) ? TREF_FALSE : TREF_TRUE;
2105
+ break;
2106
+
2107
+ case BC_LEN:
2108
+ if (tref_isstr(rc))
2109
+ rc = emitir(IRTI(IR_FLOAD), rc, IRFL_STR_LEN);
2110
+ else if (!LJ_52 && tref_istab(rc))
2111
+ rc = lj_ir_call(J, IRCALL_lj_tab_len, rc);
2112
+ else
2113
+ rc = rec_mm_len(J, rc, rcv);
2114
+ break;
2115
+
2116
+ /* -- Arithmetic ops ---------------------------------------------------- */
2117
+
2118
+ case BC_UNM:
2119
+ if (tref_isnumber_str(rc)) {
2120
+ rc = lj_opt_narrow_unm(J, rc, rcv);
2121
+ } else {
2122
+ ix.tab = rc;
2123
+ copyTV(J->L, &ix.tabv, rcv);
2124
+ rc = rec_mm_arith(J, &ix, MM_unm);
2125
+ }
2126
+ break;
2127
+
2128
+ case BC_ADDNV: case BC_SUBNV: case BC_MULNV: case BC_DIVNV: case BC_MODNV:
2129
+ /* Swap rb/rc and rbv/rcv. rav is temp. */
2130
+ ix.tab = rc; ix.key = rc = rb; rb = ix.tab;
2131
+ copyTV(J->L, rav, rbv);
2132
+ copyTV(J->L, rbv, rcv);
2133
+ copyTV(J->L, rcv, rav);
2134
+ if (op == BC_MODNV)
2135
+ goto recmod;
2136
+ /* fallthrough */
2137
+ case BC_ADDVN: case BC_SUBVN: case BC_MULVN: case BC_DIVVN:
2138
+ case BC_ADDVV: case BC_SUBVV: case BC_MULVV: case BC_DIVVV: {
2139
+ MMS mm = bcmode_mm(op);
2140
+ if (tref_isnumber_str(rb) && tref_isnumber_str(rc))
2141
+ rc = lj_opt_narrow_arith(J, rb, rc, rbv, rcv,
2142
+ (int)mm - (int)MM_add + (int)IR_ADD);
2143
+ else
2144
+ rc = rec_mm_arith(J, &ix, mm);
2145
+ break;
2146
+ }
2147
+
2148
+ case BC_MODVN: case BC_MODVV:
2149
+ recmod:
2150
+ if (tref_isnumber_str(rb) && tref_isnumber_str(rc))
2151
+ rc = lj_opt_narrow_mod(J, rb, rc, rcv);
2152
+ else
2153
+ rc = rec_mm_arith(J, &ix, MM_mod);
2154
+ break;
2155
+
2156
+ case BC_POW:
2157
+ if (tref_isnumber_str(rb) && tref_isnumber_str(rc))
2158
+ rc = lj_opt_narrow_pow(J, lj_ir_tonum(J, rb), rc, rcv);
2159
+ else
2160
+ rc = rec_mm_arith(J, &ix, MM_pow);
2161
+ break;
2162
+
2163
+ /* -- Miscellaneous ops ------------------------------------------------- */
2164
+
2165
+ case BC_CAT:
2166
+ rc = rec_cat(J, rb, rc);
2167
+ break;
2168
+
2169
+ /* -- Constant and move ops --------------------------------------------- */
2170
+
2171
+ case BC_MOV:
2172
+ /* Clear gap of method call to avoid resurrecting previous refs. */
2173
+ if (ra > J->maxslot) J->base[ra-1] = 0;
2174
+ break;
2175
+ case BC_KSTR: case BC_KNUM: case BC_KPRI:
2176
+ break;
2177
+ case BC_KSHORT:
2178
+ rc = lj_ir_kint(J, (int32_t)(int16_t)rc);
2179
+ break;
2180
+ case BC_KNIL:
2181
+ while (ra <= rc)
2182
+ J->base[ra++] = TREF_NIL;
2183
+ if (rc >= J->maxslot) J->maxslot = rc+1;
2184
+ break;
2185
+ #if LJ_HASFFI
2186
+ case BC_KCDATA:
2187
+ rc = lj_ir_kgc(J, proto_kgc(J->pt, ~(ptrdiff_t)rc), IRT_CDATA);
2188
+ break;
2189
+ #endif
2190
+
2191
+ /* -- Upvalue and function ops ------------------------------------------ */
2192
+
2193
+ case BC_UGET:
2194
+ rc = rec_upvalue(J, rc, 0);
2195
+ break;
2196
+ case BC_USETV: case BC_USETS: case BC_USETN: case BC_USETP:
2197
+ rec_upvalue(J, ra, rc);
2198
+ break;
2199
+
2200
+ /* -- Table ops --------------------------------------------------------- */
2201
+
2202
+ case BC_GGET: case BC_GSET:
2203
+ settabV(J->L, &ix.tabv, tabref(J->fn->l.env));
2204
+ ix.tab = emitir(IRT(IR_FLOAD, IRT_TAB), getcurrf(J), IRFL_FUNC_ENV);
2205
+ ix.idxchain = LJ_MAX_IDXCHAIN;
2206
+ rc = lj_record_idx(J, &ix);
2207
+ break;
2208
+
2209
+ case BC_TGETB: case BC_TSETB:
2210
+ setintV(&ix.keyv, (int32_t)rc);
2211
+ ix.key = lj_ir_kint(J, (int32_t)rc);
2212
+ /* fallthrough */
2213
+ case BC_TGETV: case BC_TGETS: case BC_TSETV: case BC_TSETS:
2214
+ ix.idxchain = LJ_MAX_IDXCHAIN;
2215
+ rc = lj_record_idx(J, &ix);
2216
+ break;
2217
+ case BC_TGETR: case BC_TSETR:
2218
+ ix.idxchain = 0;
2219
+ rc = lj_record_idx(J, &ix);
2220
+ break;
2221
+
2222
+ case BC_TSETM:
2223
+ rec_tsetm(J, ra, (BCReg)(J->L->top - J->L->base), (int32_t)rcv->u32.lo);
2224
+ break;
2225
+
2226
+ case BC_TNEW:
2227
+ rc = rec_tnew(J, rc);
2228
+ break;
2229
+ case BC_TDUP:
2230
+ rc = emitir(IRTG(IR_TDUP, IRT_TAB),
2231
+ lj_ir_ktab(J, gco2tab(proto_kgc(J->pt, ~(ptrdiff_t)rc))), 0);
2232
+ #ifdef LUAJIT_ENABLE_TABLE_BUMP
2233
+ J->rbchash[(rc & (RBCHASH_SLOTS-1))].ref = tref_ref(rc);
2234
+ setmref(J->rbchash[(rc & (RBCHASH_SLOTS-1))].pc, pc);
2235
+ setgcref(J->rbchash[(rc & (RBCHASH_SLOTS-1))].pt, obj2gco(J->pt));
2236
+ #endif
2237
+ break;
2238
+
2239
+ /* -- Calls and vararg handling ----------------------------------------- */
2240
+
2241
+ case BC_ITERC:
2242
+ J->base[ra] = getslot(J, ra-3-LJ_FR2);
2243
+ J->base[ra+1] = getslot(J, ra-2-LJ_FR2);
2244
+ J->base[ra+2] = getslot(J, ra-1-LJ_FR2);
2245
+ { /* Do the actual copy now because lj_record_call needs the values. */
2246
+ TValue *b = &J->L->base[ra];
2247
+ copyTV(J->L, b, b-3-LJ_FR2);
2248
+ copyTV(J->L, b+1, b-2-LJ_FR2);
2249
+ copyTV(J->L, b+2, b-1-LJ_FR2);
2250
+ }
2251
+ lj_record_call(J, ra, (ptrdiff_t)rc-1);
2252
+ break;
2253
+
2254
+ /* L->top is set to L->base+ra+rc+NARGS-1+1. See lj_dispatch_ins(). */
2255
+ case BC_CALLM:
2256
+ rc = (BCReg)(J->L->top - J->L->base) - ra - LJ_FR2;
2257
+ /* fallthrough */
2258
+ case BC_CALL:
2259
+ lj_record_call(J, ra, (ptrdiff_t)rc-1);
2260
+ break;
2261
+
2262
+ case BC_CALLMT:
2263
+ rc = (BCReg)(J->L->top - J->L->base) - ra - LJ_FR2;
2264
+ /* fallthrough */
2265
+ case BC_CALLT:
2266
+ lj_record_tailcall(J, ra, (ptrdiff_t)rc-1);
2267
+ break;
2268
+
2269
+ case BC_VARG:
2270
+ rec_varg(J, ra, (ptrdiff_t)rb-1);
2271
+ break;
2272
+
2273
+ /* -- Returns ----------------------------------------------------------- */
2274
+
2275
+ case BC_RETM:
2276
+ /* L->top is set to L->base+ra+rc+NRESULTS-1, see lj_dispatch_ins(). */
2277
+ rc = (BCReg)(J->L->top - J->L->base) - ra + 1;
2278
+ /* fallthrough */
2279
+ case BC_RET: case BC_RET0: case BC_RET1:
2280
+ #if LJ_HASPROFILE
2281
+ rec_profile_ret(J);
2282
+ #endif
2283
+ lj_record_ret(J, ra, (ptrdiff_t)rc-1);
2284
+ break;
2285
+
2286
+ /* -- Loops and branches ------------------------------------------------ */
2287
+
2288
+ case BC_FORI:
2289
+ if (rec_for(J, pc, 0) != LOOPEV_LEAVE)
2290
+ J->loopref = J->cur.nins;
2291
+ break;
2292
+ case BC_JFORI:
2293
+ lua_assert(bc_op(pc[(ptrdiff_t)rc-BCBIAS_J]) == BC_JFORL);
2294
+ if (rec_for(J, pc, 0) != LOOPEV_LEAVE) /* Link to existing loop. */
2295
+ lj_record_stop(J, LJ_TRLINK_ROOT, bc_d(pc[(ptrdiff_t)rc-BCBIAS_J]));
2296
+ /* Continue tracing if the loop is not entered. */
2297
+ break;
2298
+
2299
+ case BC_FORL:
2300
+ rec_loop_interp(J, pc, rec_for(J, pc+((ptrdiff_t)rc-BCBIAS_J), 1));
2301
+ break;
2302
+ case BC_ITERL:
2303
+ rec_loop_interp(J, pc, rec_iterl(J, *pc));
2304
+ break;
2305
+ case BC_LOOP:
2306
+ rec_loop_interp(J, pc, rec_loop(J, ra));
2307
+ break;
2308
+
2309
+ case BC_JFORL:
2310
+ rec_loop_jit(J, rc, rec_for(J, pc+bc_j(traceref(J, rc)->startins), 1));
2311
+ break;
2312
+ case BC_JITERL:
2313
+ rec_loop_jit(J, rc, rec_iterl(J, traceref(J, rc)->startins));
2314
+ break;
2315
+ case BC_JLOOP:
2316
+ rec_loop_jit(J, rc, rec_loop(J, ra));
2317
+ break;
2318
+
2319
+ case BC_IFORL:
2320
+ case BC_IITERL:
2321
+ case BC_ILOOP:
2322
+ case BC_IFUNCF:
2323
+ case BC_IFUNCV:
2324
+ lj_trace_err(J, LJ_TRERR_BLACKL);
2325
+ break;
2326
+
2327
+ case BC_JMP:
2328
+ if (ra < J->maxslot)
2329
+ J->maxslot = ra; /* Shrink used slots. */
2330
+ break;
2331
+
2332
+ /* -- Function headers -------------------------------------------------- */
2333
+
2334
+ case BC_FUNCF:
2335
+ rec_func_lua(J);
2336
+ break;
2337
+ case BC_JFUNCF:
2338
+ rec_func_jit(J, rc);
2339
+ break;
2340
+
2341
+ case BC_FUNCV:
2342
+ rec_func_vararg(J);
2343
+ rec_func_lua(J);
2344
+ break;
2345
+ case BC_JFUNCV:
2346
+ lua_assert(0); /* Cannot happen. No hotcall counting for varag funcs. */
2347
+ break;
2348
+
2349
+ case BC_FUNCC:
2350
+ case BC_FUNCCW:
2351
+ lj_ffrecord_func(J);
2352
+ break;
2353
+
2354
+ default:
2355
+ if (op >= BC__MAX) {
2356
+ lj_ffrecord_func(J);
2357
+ break;
2358
+ }
2359
+ /* fallthrough */
2360
+ case BC_ITERN:
2361
+ case BC_ISNEXT:
2362
+ case BC_UCLO:
2363
+ case BC_FNEW:
2364
+ setintV(&J->errinfo, (int32_t)op);
2365
+ lj_trace_err_info(J, LJ_TRERR_NYIBC);
2366
+ break;
2367
+ }
2368
+
2369
+ /* rc == 0 if we have no result yet, e.g. pending __index metamethod call. */
2370
+ if (bcmode_a(op) == BCMdst && rc) {
2371
+ J->base[ra] = rc;
2372
+ if (ra >= J->maxslot) J->maxslot = ra+1;
2373
+ }
2374
+
2375
+ #undef rav
2376
+ #undef rbv
2377
+ #undef rcv
2378
+
2379
+ /* Limit the number of recorded IR instructions. */
2380
+ if (J->cur.nins > REF_FIRST+(IRRef)J->param[JIT_P_maxrecord])
2381
+ lj_trace_err(J, LJ_TRERR_TRACEOV);
2382
+ }
2383
+
2384
+ /* -- Recording setup ----------------------------------------------------- */
2385
+
2386
+ /* Setup recording for a root trace started by a hot loop. */
2387
+ static const BCIns *rec_setup_root(jit_State *J)
2388
+ {
2389
+ /* Determine the next PC and the bytecode range for the loop. */
2390
+ const BCIns *pcj, *pc = J->pc;
2391
+ BCIns ins = *pc;
2392
+ BCReg ra = bc_a(ins);
2393
+ switch (bc_op(ins)) {
2394
+ case BC_FORL:
2395
+ J->bc_extent = (MSize)(-bc_j(ins))*sizeof(BCIns);
2396
+ pc += 1+bc_j(ins);
2397
+ J->bc_min = pc;
2398
+ break;
2399
+ case BC_ITERL:
2400
+ lua_assert(bc_op(pc[-1]) == BC_ITERC);
2401
+ J->maxslot = ra + bc_b(pc[-1]) - 1;
2402
+ J->bc_extent = (MSize)(-bc_j(ins))*sizeof(BCIns);
2403
+ pc += 1+bc_j(ins);
2404
+ lua_assert(bc_op(pc[-1]) == BC_JMP);
2405
+ J->bc_min = pc;
2406
+ break;
2407
+ case BC_LOOP:
2408
+ /* Only check BC range for real loops, but not for "repeat until true". */
2409
+ pcj = pc + bc_j(ins);
2410
+ ins = *pcj;
2411
+ if (bc_op(ins) == BC_JMP && bc_j(ins) < 0) {
2412
+ J->bc_min = pcj+1 + bc_j(ins);
2413
+ J->bc_extent = (MSize)(-bc_j(ins))*sizeof(BCIns);
2414
+ }
2415
+ J->maxslot = ra;
2416
+ pc++;
2417
+ break;
2418
+ case BC_RET:
2419
+ case BC_RET0:
2420
+ case BC_RET1:
2421
+ /* No bytecode range check for down-recursive root traces. */
2422
+ J->maxslot = ra + bc_d(ins) - 1;
2423
+ break;
2424
+ case BC_FUNCF:
2425
+ /* No bytecode range check for root traces started by a hot call. */
2426
+ J->maxslot = J->pt->numparams;
2427
+ pc++;
2428
+ break;
2429
+ case BC_CALLM:
2430
+ case BC_CALL:
2431
+ case BC_ITERC:
2432
+ /* No bytecode range check for stitched traces. */
2433
+ pc++;
2434
+ break;
2435
+ default:
2436
+ lua_assert(0);
2437
+ break;
2438
+ }
2439
+ return pc;
2440
+ }
2441
+
2442
+ /* Setup for recording a new trace. */
2443
+ void lj_record_setup(jit_State *J)
2444
+ {
2445
+ uint32_t i;
2446
+
2447
+ /* Initialize state related to current trace. */
2448
+ memset(J->slot, 0, sizeof(J->slot));
2449
+ memset(J->chain, 0, sizeof(J->chain));
2450
+ #ifdef LUAJIT_ENABLE_TABLE_BUMP
2451
+ memset(J->rbchash, 0, sizeof(J->rbchash));
2452
+ #endif
2453
+ memset(J->bpropcache, 0, sizeof(J->bpropcache));
2454
+ J->scev.idx = REF_NIL;
2455
+ setmref(J->scev.pc, NULL);
2456
+
2457
+ J->baseslot = 1; /* Invoking function is at base[-1]. */
2458
+ J->base = J->slot + J->baseslot;
2459
+ J->maxslot = 0;
2460
+ J->framedepth = 0;
2461
+ J->retdepth = 0;
2462
+
2463
+ J->instunroll = J->param[JIT_P_instunroll];
2464
+ J->loopunroll = J->param[JIT_P_loopunroll];
2465
+ J->tailcalled = 0;
2466
+ J->loopref = 0;
2467
+
2468
+ J->bc_min = NULL; /* Means no limit. */
2469
+ J->bc_extent = ~(MSize)0;
2470
+
2471
+ /* Emit instructions for fixed references. Also triggers initial IR alloc. */
2472
+ emitir_raw(IRT(IR_BASE, IRT_P32), J->parent, J->exitno);
2473
+ for (i = 0; i <= 2; i++) {
2474
+ IRIns *ir = IR(REF_NIL-i);
2475
+ ir->i = 0;
2476
+ ir->t.irt = (uint8_t)(IRT_NIL+i);
2477
+ ir->o = IR_KPRI;
2478
+ ir->prev = 0;
2479
+ }
2480
+ J->cur.nk = REF_TRUE;
2481
+
2482
+ J->startpc = J->pc;
2483
+ setmref(J->cur.startpc, J->pc);
2484
+ if (J->parent) { /* Side trace. */
2485
+ GCtrace *T = traceref(J, J->parent);
2486
+ TraceNo root = T->root ? T->root : J->parent;
2487
+ J->cur.root = (uint16_t)root;
2488
+ J->cur.startins = BCINS_AD(BC_JMP, 0, 0);
2489
+ /* Check whether we could at least potentially form an extra loop. */
2490
+ if (J->exitno == 0 && T->snap[0].nent == 0) {
2491
+ /* We can narrow a FORL for some side traces, too. */
2492
+ if (J->pc > proto_bc(J->pt) && bc_op(J->pc[-1]) == BC_JFORI &&
2493
+ bc_d(J->pc[bc_j(J->pc[-1])-1]) == root) {
2494
+ lj_snap_add(J);
2495
+ rec_for_loop(J, J->pc-1, &J->scev, 1);
2496
+ goto sidecheck;
2497
+ }
2498
+ } else {
2499
+ J->startpc = NULL; /* Prevent forming an extra loop. */
2500
+ }
2501
+ lj_snap_replay(J, T);
2502
+ sidecheck:
2503
+ if (traceref(J, J->cur.root)->nchild >= J->param[JIT_P_maxside] ||
2504
+ T->snap[J->exitno].count >= J->param[JIT_P_hotexit] +
2505
+ J->param[JIT_P_tryside]) {
2506
+ lj_record_stop(J, LJ_TRLINK_INTERP, 0);
2507
+ }
2508
+ } else { /* Root trace. */
2509
+ J->cur.root = 0;
2510
+ J->cur.startins = *J->pc;
2511
+ J->pc = rec_setup_root(J);
2512
+ /* Note: the loop instruction itself is recorded at the end and not
2513
+ ** at the start! So snapshot #0 needs to point to the *next* instruction.
2514
+ */
2515
+ lj_snap_add(J);
2516
+ if (bc_op(J->cur.startins) == BC_FORL)
2517
+ rec_for_loop(J, J->pc-1, &J->scev, 1);
2518
+ else if (bc_op(J->cur.startins) == BC_ITERC)
2519
+ J->startpc = NULL;
2520
+ if (1 + J->pt->framesize >= LJ_MAX_JSLOTS)
2521
+ lj_trace_err(J, LJ_TRERR_STACKOV);
2522
+ }
2523
+ #if LJ_HASPROFILE
2524
+ J->prev_pt = NULL;
2525
+ J->prev_line = -1;
2526
+ #endif
2527
+ #ifdef LUAJIT_ENABLE_CHECKHOOK
2528
+ /* Regularly check for instruction/line hooks from compiled code and
2529
+ ** exit to the interpreter if the hooks are set.
2530
+ **
2531
+ ** This is a compile-time option and disabled by default, since the
2532
+ ** hook checks may be quite expensive in tight loops.
2533
+ **
2534
+ ** Note this is only useful if hooks are *not* set most of the time.
2535
+ ** Use this only if you want to *asynchronously* interrupt the execution.
2536
+ **
2537
+ ** You can set the instruction hook via lua_sethook() with a count of 1
2538
+ ** from a signal handler or another native thread. Please have a look
2539
+ ** at the first few functions in luajit.c for an example (Ctrl-C handler).
2540
+ */
2541
+ {
2542
+ TRef tr = emitir(IRT(IR_XLOAD, IRT_U8),
2543
+ lj_ir_kptr(J, &J2G(J)->hookmask), IRXLOAD_VOLATILE);
2544
+ tr = emitir(IRTI(IR_BAND), tr, lj_ir_kint(J, (LUA_MASKLINE|LUA_MASKCOUNT)));
2545
+ emitir(IRTGI(IR_EQ), tr, lj_ir_kint(J, 0));
2546
+ }
2547
+ #endif
2548
+ }
2549
+
2550
+ #undef IR
2551
+ #undef emitir_raw
2552
+ #undef emitir
2553
+
2554
+ #endif