libv8-sgonyea 3.3.10

Sign up to get free protection for your applications and to get access to all the features.
Files changed (500) hide show
  1. data/.gitignore +8 -0
  2. data/.gitmodules +3 -0
  3. data/Gemfile +4 -0
  4. data/README.md +76 -0
  5. data/Rakefile +113 -0
  6. data/ext/libv8/extconf.rb +28 -0
  7. data/lib/libv8.rb +15 -0
  8. data/lib/libv8/Makefile +30 -0
  9. data/lib/libv8/detect_cpu.rb +27 -0
  10. data/lib/libv8/fpic-on-linux-amd64.patch +13 -0
  11. data/lib/libv8/v8/.gitignore +35 -0
  12. data/lib/libv8/v8/AUTHORS +44 -0
  13. data/lib/libv8/v8/ChangeLog +2839 -0
  14. data/lib/libv8/v8/LICENSE +52 -0
  15. data/lib/libv8/v8/LICENSE.strongtalk +29 -0
  16. data/lib/libv8/v8/LICENSE.v8 +26 -0
  17. data/lib/libv8/v8/LICENSE.valgrind +45 -0
  18. data/lib/libv8/v8/SConstruct +1478 -0
  19. data/lib/libv8/v8/build/README.txt +49 -0
  20. data/lib/libv8/v8/build/all.gyp +18 -0
  21. data/lib/libv8/v8/build/armu.gypi +32 -0
  22. data/lib/libv8/v8/build/common.gypi +144 -0
  23. data/lib/libv8/v8/build/gyp_v8 +145 -0
  24. data/lib/libv8/v8/include/v8-debug.h +395 -0
  25. data/lib/libv8/v8/include/v8-preparser.h +117 -0
  26. data/lib/libv8/v8/include/v8-profiler.h +505 -0
  27. data/lib/libv8/v8/include/v8-testing.h +104 -0
  28. data/lib/libv8/v8/include/v8.h +4124 -0
  29. data/lib/libv8/v8/include/v8stdint.h +53 -0
  30. data/lib/libv8/v8/preparser/SConscript +38 -0
  31. data/lib/libv8/v8/preparser/preparser-process.cc +379 -0
  32. data/lib/libv8/v8/src/SConscript +368 -0
  33. data/lib/libv8/v8/src/accessors.cc +767 -0
  34. data/lib/libv8/v8/src/accessors.h +123 -0
  35. data/lib/libv8/v8/src/allocation-inl.h +49 -0
  36. data/lib/libv8/v8/src/allocation.cc +122 -0
  37. data/lib/libv8/v8/src/allocation.h +143 -0
  38. data/lib/libv8/v8/src/api.cc +5845 -0
  39. data/lib/libv8/v8/src/api.h +574 -0
  40. data/lib/libv8/v8/src/apinatives.js +110 -0
  41. data/lib/libv8/v8/src/apiutils.h +73 -0
  42. data/lib/libv8/v8/src/arguments.h +118 -0
  43. data/lib/libv8/v8/src/arm/assembler-arm-inl.h +353 -0
  44. data/lib/libv8/v8/src/arm/assembler-arm.cc +2661 -0
  45. data/lib/libv8/v8/src/arm/assembler-arm.h +1375 -0
  46. data/lib/libv8/v8/src/arm/builtins-arm.cc +1658 -0
  47. data/lib/libv8/v8/src/arm/code-stubs-arm.cc +6398 -0
  48. data/lib/libv8/v8/src/arm/code-stubs-arm.h +673 -0
  49. data/lib/libv8/v8/src/arm/codegen-arm.cc +52 -0
  50. data/lib/libv8/v8/src/arm/codegen-arm.h +91 -0
  51. data/lib/libv8/v8/src/arm/constants-arm.cc +152 -0
  52. data/lib/libv8/v8/src/arm/constants-arm.h +775 -0
  53. data/lib/libv8/v8/src/arm/cpu-arm.cc +120 -0
  54. data/lib/libv8/v8/src/arm/debug-arm.cc +317 -0
  55. data/lib/libv8/v8/src/arm/deoptimizer-arm.cc +754 -0
  56. data/lib/libv8/v8/src/arm/disasm-arm.cc +1506 -0
  57. data/lib/libv8/v8/src/arm/frames-arm.cc +45 -0
  58. data/lib/libv8/v8/src/arm/frames-arm.h +168 -0
  59. data/lib/libv8/v8/src/arm/full-codegen-arm.cc +4375 -0
  60. data/lib/libv8/v8/src/arm/ic-arm.cc +1562 -0
  61. data/lib/libv8/v8/src/arm/lithium-arm.cc +2206 -0
  62. data/lib/libv8/v8/src/arm/lithium-arm.h +2348 -0
  63. data/lib/libv8/v8/src/arm/lithium-codegen-arm.cc +4526 -0
  64. data/lib/libv8/v8/src/arm/lithium-codegen-arm.h +403 -0
  65. data/lib/libv8/v8/src/arm/lithium-gap-resolver-arm.cc +305 -0
  66. data/lib/libv8/v8/src/arm/lithium-gap-resolver-arm.h +84 -0
  67. data/lib/libv8/v8/src/arm/macro-assembler-arm.cc +3163 -0
  68. data/lib/libv8/v8/src/arm/macro-assembler-arm.h +1126 -0
  69. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.cc +1287 -0
  70. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.h +253 -0
  71. data/lib/libv8/v8/src/arm/simulator-arm.cc +3424 -0
  72. data/lib/libv8/v8/src/arm/simulator-arm.h +431 -0
  73. data/lib/libv8/v8/src/arm/stub-cache-arm.cc +4243 -0
  74. data/lib/libv8/v8/src/array.js +1366 -0
  75. data/lib/libv8/v8/src/assembler.cc +1207 -0
  76. data/lib/libv8/v8/src/assembler.h +858 -0
  77. data/lib/libv8/v8/src/ast-inl.h +112 -0
  78. data/lib/libv8/v8/src/ast.cc +1146 -0
  79. data/lib/libv8/v8/src/ast.h +2188 -0
  80. data/lib/libv8/v8/src/atomicops.h +167 -0
  81. data/lib/libv8/v8/src/atomicops_internals_arm_gcc.h +145 -0
  82. data/lib/libv8/v8/src/atomicops_internals_mips_gcc.h +169 -0
  83. data/lib/libv8/v8/src/atomicops_internals_x86_gcc.cc +133 -0
  84. data/lib/libv8/v8/src/atomicops_internals_x86_gcc.h +287 -0
  85. data/lib/libv8/v8/src/atomicops_internals_x86_macosx.h +301 -0
  86. data/lib/libv8/v8/src/atomicops_internals_x86_msvc.h +203 -0
  87. data/lib/libv8/v8/src/bignum-dtoa.cc +655 -0
  88. data/lib/libv8/v8/src/bignum-dtoa.h +81 -0
  89. data/lib/libv8/v8/src/bignum.cc +768 -0
  90. data/lib/libv8/v8/src/bignum.h +140 -0
  91. data/lib/libv8/v8/src/bootstrapper.cc +2184 -0
  92. data/lib/libv8/v8/src/bootstrapper.h +188 -0
  93. data/lib/libv8/v8/src/builtins.cc +1707 -0
  94. data/lib/libv8/v8/src/builtins.h +371 -0
  95. data/lib/libv8/v8/src/bytecodes-irregexp.h +105 -0
  96. data/lib/libv8/v8/src/cached-powers.cc +177 -0
  97. data/lib/libv8/v8/src/cached-powers.h +65 -0
  98. data/lib/libv8/v8/src/char-predicates-inl.h +94 -0
  99. data/lib/libv8/v8/src/char-predicates.h +67 -0
  100. data/lib/libv8/v8/src/checks.cc +110 -0
  101. data/lib/libv8/v8/src/checks.h +296 -0
  102. data/lib/libv8/v8/src/circular-queue-inl.h +53 -0
  103. data/lib/libv8/v8/src/circular-queue.cc +122 -0
  104. data/lib/libv8/v8/src/circular-queue.h +103 -0
  105. data/lib/libv8/v8/src/code-stubs.cc +267 -0
  106. data/lib/libv8/v8/src/code-stubs.h +1011 -0
  107. data/lib/libv8/v8/src/code.h +70 -0
  108. data/lib/libv8/v8/src/codegen.cc +231 -0
  109. data/lib/libv8/v8/src/codegen.h +84 -0
  110. data/lib/libv8/v8/src/compilation-cache.cc +540 -0
  111. data/lib/libv8/v8/src/compilation-cache.h +287 -0
  112. data/lib/libv8/v8/src/compiler.cc +786 -0
  113. data/lib/libv8/v8/src/compiler.h +312 -0
  114. data/lib/libv8/v8/src/contexts.cc +347 -0
  115. data/lib/libv8/v8/src/contexts.h +391 -0
  116. data/lib/libv8/v8/src/conversions-inl.h +106 -0
  117. data/lib/libv8/v8/src/conversions.cc +1131 -0
  118. data/lib/libv8/v8/src/conversions.h +135 -0
  119. data/lib/libv8/v8/src/counters.cc +93 -0
  120. data/lib/libv8/v8/src/counters.h +254 -0
  121. data/lib/libv8/v8/src/cpu-profiler-inl.h +101 -0
  122. data/lib/libv8/v8/src/cpu-profiler.cc +609 -0
  123. data/lib/libv8/v8/src/cpu-profiler.h +302 -0
  124. data/lib/libv8/v8/src/cpu.h +69 -0
  125. data/lib/libv8/v8/src/d8-debug.cc +367 -0
  126. data/lib/libv8/v8/src/d8-debug.h +158 -0
  127. data/lib/libv8/v8/src/d8-posix.cc +695 -0
  128. data/lib/libv8/v8/src/d8-readline.cc +130 -0
  129. data/lib/libv8/v8/src/d8-windows.cc +42 -0
  130. data/lib/libv8/v8/src/d8.cc +803 -0
  131. data/lib/libv8/v8/src/d8.gyp +91 -0
  132. data/lib/libv8/v8/src/d8.h +235 -0
  133. data/lib/libv8/v8/src/d8.js +2798 -0
  134. data/lib/libv8/v8/src/data-flow.cc +66 -0
  135. data/lib/libv8/v8/src/data-flow.h +205 -0
  136. data/lib/libv8/v8/src/date.js +1103 -0
  137. data/lib/libv8/v8/src/dateparser-inl.h +127 -0
  138. data/lib/libv8/v8/src/dateparser.cc +178 -0
  139. data/lib/libv8/v8/src/dateparser.h +266 -0
  140. data/lib/libv8/v8/src/debug-agent.cc +447 -0
  141. data/lib/libv8/v8/src/debug-agent.h +129 -0
  142. data/lib/libv8/v8/src/debug-debugger.js +2569 -0
  143. data/lib/libv8/v8/src/debug.cc +3165 -0
  144. data/lib/libv8/v8/src/debug.h +1057 -0
  145. data/lib/libv8/v8/src/deoptimizer.cc +1256 -0
  146. data/lib/libv8/v8/src/deoptimizer.h +602 -0
  147. data/lib/libv8/v8/src/disasm.h +80 -0
  148. data/lib/libv8/v8/src/disassembler.cc +343 -0
  149. data/lib/libv8/v8/src/disassembler.h +58 -0
  150. data/lib/libv8/v8/src/diy-fp.cc +58 -0
  151. data/lib/libv8/v8/src/diy-fp.h +117 -0
  152. data/lib/libv8/v8/src/double.h +238 -0
  153. data/lib/libv8/v8/src/dtoa.cc +103 -0
  154. data/lib/libv8/v8/src/dtoa.h +85 -0
  155. data/lib/libv8/v8/src/execution.cc +849 -0
  156. data/lib/libv8/v8/src/execution.h +297 -0
  157. data/lib/libv8/v8/src/extensions/experimental/break-iterator.cc +250 -0
  158. data/lib/libv8/v8/src/extensions/experimental/break-iterator.h +89 -0
  159. data/lib/libv8/v8/src/extensions/experimental/collator.cc +218 -0
  160. data/lib/libv8/v8/src/extensions/experimental/collator.h +69 -0
  161. data/lib/libv8/v8/src/extensions/experimental/experimental.gyp +94 -0
  162. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.cc +78 -0
  163. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.h +54 -0
  164. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.cc +112 -0
  165. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.h +60 -0
  166. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.cc +43 -0
  167. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.h +49 -0
  168. data/lib/libv8/v8/src/extensions/experimental/i18n.js +180 -0
  169. data/lib/libv8/v8/src/extensions/experimental/language-matcher.cc +251 -0
  170. data/lib/libv8/v8/src/extensions/experimental/language-matcher.h +95 -0
  171. data/lib/libv8/v8/src/extensions/externalize-string-extension.cc +141 -0
  172. data/lib/libv8/v8/src/extensions/externalize-string-extension.h +50 -0
  173. data/lib/libv8/v8/src/extensions/gc-extension.cc +58 -0
  174. data/lib/libv8/v8/src/extensions/gc-extension.h +49 -0
  175. data/lib/libv8/v8/src/factory.cc +1222 -0
  176. data/lib/libv8/v8/src/factory.h +442 -0
  177. data/lib/libv8/v8/src/fast-dtoa.cc +736 -0
  178. data/lib/libv8/v8/src/fast-dtoa.h +83 -0
  179. data/lib/libv8/v8/src/fixed-dtoa.cc +405 -0
  180. data/lib/libv8/v8/src/fixed-dtoa.h +55 -0
  181. data/lib/libv8/v8/src/flag-definitions.h +560 -0
  182. data/lib/libv8/v8/src/flags.cc +551 -0
  183. data/lib/libv8/v8/src/flags.h +79 -0
  184. data/lib/libv8/v8/src/frames-inl.h +247 -0
  185. data/lib/libv8/v8/src/frames.cc +1243 -0
  186. data/lib/libv8/v8/src/frames.h +870 -0
  187. data/lib/libv8/v8/src/full-codegen.cc +1374 -0
  188. data/lib/libv8/v8/src/full-codegen.h +771 -0
  189. data/lib/libv8/v8/src/func-name-inferrer.cc +92 -0
  190. data/lib/libv8/v8/src/func-name-inferrer.h +111 -0
  191. data/lib/libv8/v8/src/gdb-jit.cc +1555 -0
  192. data/lib/libv8/v8/src/gdb-jit.h +143 -0
  193. data/lib/libv8/v8/src/global-handles.cc +665 -0
  194. data/lib/libv8/v8/src/global-handles.h +284 -0
  195. data/lib/libv8/v8/src/globals.h +325 -0
  196. data/lib/libv8/v8/src/handles-inl.h +177 -0
  197. data/lib/libv8/v8/src/handles.cc +987 -0
  198. data/lib/libv8/v8/src/handles.h +382 -0
  199. data/lib/libv8/v8/src/hashmap.cc +230 -0
  200. data/lib/libv8/v8/src/hashmap.h +123 -0
  201. data/lib/libv8/v8/src/heap-inl.h +704 -0
  202. data/lib/libv8/v8/src/heap-profiler.cc +1173 -0
  203. data/lib/libv8/v8/src/heap-profiler.h +397 -0
  204. data/lib/libv8/v8/src/heap.cc +5930 -0
  205. data/lib/libv8/v8/src/heap.h +2268 -0
  206. data/lib/libv8/v8/src/hydrogen-instructions.cc +1769 -0
  207. data/lib/libv8/v8/src/hydrogen-instructions.h +3971 -0
  208. data/lib/libv8/v8/src/hydrogen.cc +6239 -0
  209. data/lib/libv8/v8/src/hydrogen.h +1202 -0
  210. data/lib/libv8/v8/src/ia32/assembler-ia32-inl.h +446 -0
  211. data/lib/libv8/v8/src/ia32/assembler-ia32.cc +2487 -0
  212. data/lib/libv8/v8/src/ia32/assembler-ia32.h +1144 -0
  213. data/lib/libv8/v8/src/ia32/builtins-ia32.cc +1621 -0
  214. data/lib/libv8/v8/src/ia32/code-stubs-ia32.cc +6198 -0
  215. data/lib/libv8/v8/src/ia32/code-stubs-ia32.h +517 -0
  216. data/lib/libv8/v8/src/ia32/codegen-ia32.cc +265 -0
  217. data/lib/libv8/v8/src/ia32/codegen-ia32.h +79 -0
  218. data/lib/libv8/v8/src/ia32/cpu-ia32.cc +88 -0
  219. data/lib/libv8/v8/src/ia32/debug-ia32.cc +312 -0
  220. data/lib/libv8/v8/src/ia32/deoptimizer-ia32.cc +774 -0
  221. data/lib/libv8/v8/src/ia32/disasm-ia32.cc +1628 -0
  222. data/lib/libv8/v8/src/ia32/frames-ia32.cc +45 -0
  223. data/lib/libv8/v8/src/ia32/frames-ia32.h +142 -0
  224. data/lib/libv8/v8/src/ia32/full-codegen-ia32.cc +4338 -0
  225. data/lib/libv8/v8/src/ia32/ic-ia32.cc +1597 -0
  226. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.cc +4461 -0
  227. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.h +375 -0
  228. data/lib/libv8/v8/src/ia32/lithium-gap-resolver-ia32.cc +475 -0
  229. data/lib/libv8/v8/src/ia32/lithium-gap-resolver-ia32.h +110 -0
  230. data/lib/libv8/v8/src/ia32/lithium-ia32.cc +2261 -0
  231. data/lib/libv8/v8/src/ia32/lithium-ia32.h +2396 -0
  232. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.cc +2136 -0
  233. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.h +775 -0
  234. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.cc +1263 -0
  235. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.h +216 -0
  236. data/lib/libv8/v8/src/ia32/simulator-ia32.cc +30 -0
  237. data/lib/libv8/v8/src/ia32/simulator-ia32.h +74 -0
  238. data/lib/libv8/v8/src/ia32/stub-cache-ia32.cc +3847 -0
  239. data/lib/libv8/v8/src/ic-inl.h +130 -0
  240. data/lib/libv8/v8/src/ic.cc +2577 -0
  241. data/lib/libv8/v8/src/ic.h +736 -0
  242. data/lib/libv8/v8/src/inspector.cc +63 -0
  243. data/lib/libv8/v8/src/inspector.h +62 -0
  244. data/lib/libv8/v8/src/interpreter-irregexp.cc +659 -0
  245. data/lib/libv8/v8/src/interpreter-irregexp.h +49 -0
  246. data/lib/libv8/v8/src/isolate-inl.h +50 -0
  247. data/lib/libv8/v8/src/isolate.cc +1869 -0
  248. data/lib/libv8/v8/src/isolate.h +1382 -0
  249. data/lib/libv8/v8/src/json-parser.cc +504 -0
  250. data/lib/libv8/v8/src/json-parser.h +161 -0
  251. data/lib/libv8/v8/src/json.js +342 -0
  252. data/lib/libv8/v8/src/jsregexp.cc +5385 -0
  253. data/lib/libv8/v8/src/jsregexp.h +1492 -0
  254. data/lib/libv8/v8/src/list-inl.h +212 -0
  255. data/lib/libv8/v8/src/list.h +174 -0
  256. data/lib/libv8/v8/src/lithium-allocator-inl.h +142 -0
  257. data/lib/libv8/v8/src/lithium-allocator.cc +2123 -0
  258. data/lib/libv8/v8/src/lithium-allocator.h +630 -0
  259. data/lib/libv8/v8/src/lithium.cc +190 -0
  260. data/lib/libv8/v8/src/lithium.h +597 -0
  261. data/lib/libv8/v8/src/liveedit-debugger.js +1082 -0
  262. data/lib/libv8/v8/src/liveedit.cc +1691 -0
  263. data/lib/libv8/v8/src/liveedit.h +180 -0
  264. data/lib/libv8/v8/src/liveobjectlist-inl.h +126 -0
  265. data/lib/libv8/v8/src/liveobjectlist.cc +2589 -0
  266. data/lib/libv8/v8/src/liveobjectlist.h +322 -0
  267. data/lib/libv8/v8/src/log-inl.h +59 -0
  268. data/lib/libv8/v8/src/log-utils.cc +428 -0
  269. data/lib/libv8/v8/src/log-utils.h +231 -0
  270. data/lib/libv8/v8/src/log.cc +1993 -0
  271. data/lib/libv8/v8/src/log.h +476 -0
  272. data/lib/libv8/v8/src/macro-assembler.h +120 -0
  273. data/lib/libv8/v8/src/macros.py +178 -0
  274. data/lib/libv8/v8/src/mark-compact.cc +3143 -0
  275. data/lib/libv8/v8/src/mark-compact.h +506 -0
  276. data/lib/libv8/v8/src/math.js +264 -0
  277. data/lib/libv8/v8/src/messages.cc +179 -0
  278. data/lib/libv8/v8/src/messages.h +113 -0
  279. data/lib/libv8/v8/src/messages.js +1096 -0
  280. data/lib/libv8/v8/src/mips/assembler-mips-inl.h +312 -0
  281. data/lib/libv8/v8/src/mips/assembler-mips.cc +1960 -0
  282. data/lib/libv8/v8/src/mips/assembler-mips.h +1138 -0
  283. data/lib/libv8/v8/src/mips/builtins-mips.cc +1628 -0
  284. data/lib/libv8/v8/src/mips/code-stubs-mips.cc +6656 -0
  285. data/lib/libv8/v8/src/mips/code-stubs-mips.h +682 -0
  286. data/lib/libv8/v8/src/mips/codegen-mips.cc +52 -0
  287. data/lib/libv8/v8/src/mips/codegen-mips.h +98 -0
  288. data/lib/libv8/v8/src/mips/constants-mips.cc +352 -0
  289. data/lib/libv8/v8/src/mips/constants-mips.h +739 -0
  290. data/lib/libv8/v8/src/mips/cpu-mips.cc +96 -0
  291. data/lib/libv8/v8/src/mips/debug-mips.cc +308 -0
  292. data/lib/libv8/v8/src/mips/deoptimizer-mips.cc +91 -0
  293. data/lib/libv8/v8/src/mips/disasm-mips.cc +1050 -0
  294. data/lib/libv8/v8/src/mips/frames-mips.cc +47 -0
  295. data/lib/libv8/v8/src/mips/frames-mips.h +219 -0
  296. data/lib/libv8/v8/src/mips/full-codegen-mips.cc +4388 -0
  297. data/lib/libv8/v8/src/mips/ic-mips.cc +1580 -0
  298. data/lib/libv8/v8/src/mips/lithium-codegen-mips.h +65 -0
  299. data/lib/libv8/v8/src/mips/lithium-mips.h +307 -0
  300. data/lib/libv8/v8/src/mips/macro-assembler-mips.cc +4056 -0
  301. data/lib/libv8/v8/src/mips/macro-assembler-mips.h +1214 -0
  302. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.cc +1251 -0
  303. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.h +252 -0
  304. data/lib/libv8/v8/src/mips/simulator-mips.cc +2621 -0
  305. data/lib/libv8/v8/src/mips/simulator-mips.h +401 -0
  306. data/lib/libv8/v8/src/mips/stub-cache-mips.cc +4285 -0
  307. data/lib/libv8/v8/src/mirror-debugger.js +2382 -0
  308. data/lib/libv8/v8/src/mksnapshot.cc +328 -0
  309. data/lib/libv8/v8/src/natives.h +64 -0
  310. data/lib/libv8/v8/src/objects-debug.cc +738 -0
  311. data/lib/libv8/v8/src/objects-inl.h +4323 -0
  312. data/lib/libv8/v8/src/objects-printer.cc +829 -0
  313. data/lib/libv8/v8/src/objects-visiting.cc +148 -0
  314. data/lib/libv8/v8/src/objects-visiting.h +424 -0
  315. data/lib/libv8/v8/src/objects.cc +10585 -0
  316. data/lib/libv8/v8/src/objects.h +6838 -0
  317. data/lib/libv8/v8/src/parser.cc +4997 -0
  318. data/lib/libv8/v8/src/parser.h +765 -0
  319. data/lib/libv8/v8/src/platform-cygwin.cc +779 -0
  320. data/lib/libv8/v8/src/platform-freebsd.cc +826 -0
  321. data/lib/libv8/v8/src/platform-linux.cc +1149 -0
  322. data/lib/libv8/v8/src/platform-macos.cc +830 -0
  323. data/lib/libv8/v8/src/platform-nullos.cc +479 -0
  324. data/lib/libv8/v8/src/platform-openbsd.cc +640 -0
  325. data/lib/libv8/v8/src/platform-posix.cc +424 -0
  326. data/lib/libv8/v8/src/platform-solaris.cc +762 -0
  327. data/lib/libv8/v8/src/platform-tls-mac.h +62 -0
  328. data/lib/libv8/v8/src/platform-tls-win32.h +62 -0
  329. data/lib/libv8/v8/src/platform-tls.h +50 -0
  330. data/lib/libv8/v8/src/platform-win32.cc +2021 -0
  331. data/lib/libv8/v8/src/platform.h +667 -0
  332. data/lib/libv8/v8/src/preparse-data-format.h +62 -0
  333. data/lib/libv8/v8/src/preparse-data.cc +183 -0
  334. data/lib/libv8/v8/src/preparse-data.h +225 -0
  335. data/lib/libv8/v8/src/preparser-api.cc +220 -0
  336. data/lib/libv8/v8/src/preparser.cc +1450 -0
  337. data/lib/libv8/v8/src/preparser.h +493 -0
  338. data/lib/libv8/v8/src/prettyprinter.cc +1493 -0
  339. data/lib/libv8/v8/src/prettyprinter.h +223 -0
  340. data/lib/libv8/v8/src/profile-generator-inl.h +128 -0
  341. data/lib/libv8/v8/src/profile-generator.cc +3098 -0
  342. data/lib/libv8/v8/src/profile-generator.h +1126 -0
  343. data/lib/libv8/v8/src/property.cc +105 -0
  344. data/lib/libv8/v8/src/property.h +365 -0
  345. data/lib/libv8/v8/src/proxy.js +83 -0
  346. data/lib/libv8/v8/src/regexp-macro-assembler-irregexp-inl.h +78 -0
  347. data/lib/libv8/v8/src/regexp-macro-assembler-irregexp.cc +471 -0
  348. data/lib/libv8/v8/src/regexp-macro-assembler-irregexp.h +142 -0
  349. data/lib/libv8/v8/src/regexp-macro-assembler-tracer.cc +373 -0
  350. data/lib/libv8/v8/src/regexp-macro-assembler-tracer.h +104 -0
  351. data/lib/libv8/v8/src/regexp-macro-assembler.cc +267 -0
  352. data/lib/libv8/v8/src/regexp-macro-assembler.h +243 -0
  353. data/lib/libv8/v8/src/regexp-stack.cc +111 -0
  354. data/lib/libv8/v8/src/regexp-stack.h +147 -0
  355. data/lib/libv8/v8/src/regexp.js +483 -0
  356. data/lib/libv8/v8/src/rewriter.cc +360 -0
  357. data/lib/libv8/v8/src/rewriter.h +50 -0
  358. data/lib/libv8/v8/src/runtime-profiler.cc +489 -0
  359. data/lib/libv8/v8/src/runtime-profiler.h +201 -0
  360. data/lib/libv8/v8/src/runtime.cc +12227 -0
  361. data/lib/libv8/v8/src/runtime.h +652 -0
  362. data/lib/libv8/v8/src/runtime.js +649 -0
  363. data/lib/libv8/v8/src/safepoint-table.cc +256 -0
  364. data/lib/libv8/v8/src/safepoint-table.h +270 -0
  365. data/lib/libv8/v8/src/scanner-base.cc +952 -0
  366. data/lib/libv8/v8/src/scanner-base.h +670 -0
  367. data/lib/libv8/v8/src/scanner.cc +345 -0
  368. data/lib/libv8/v8/src/scanner.h +146 -0
  369. data/lib/libv8/v8/src/scopeinfo.cc +646 -0
  370. data/lib/libv8/v8/src/scopeinfo.h +254 -0
  371. data/lib/libv8/v8/src/scopes.cc +1150 -0
  372. data/lib/libv8/v8/src/scopes.h +507 -0
  373. data/lib/libv8/v8/src/serialize.cc +1574 -0
  374. data/lib/libv8/v8/src/serialize.h +589 -0
  375. data/lib/libv8/v8/src/shell.h +55 -0
  376. data/lib/libv8/v8/src/simulator.h +43 -0
  377. data/lib/libv8/v8/src/small-pointer-list.h +163 -0
  378. data/lib/libv8/v8/src/smart-pointer.h +109 -0
  379. data/lib/libv8/v8/src/snapshot-common.cc +83 -0
  380. data/lib/libv8/v8/src/snapshot-empty.cc +54 -0
  381. data/lib/libv8/v8/src/snapshot.h +91 -0
  382. data/lib/libv8/v8/src/spaces-inl.h +529 -0
  383. data/lib/libv8/v8/src/spaces.cc +3145 -0
  384. data/lib/libv8/v8/src/spaces.h +2369 -0
  385. data/lib/libv8/v8/src/splay-tree-inl.h +310 -0
  386. data/lib/libv8/v8/src/splay-tree.h +205 -0
  387. data/lib/libv8/v8/src/string-search.cc +41 -0
  388. data/lib/libv8/v8/src/string-search.h +568 -0
  389. data/lib/libv8/v8/src/string-stream.cc +592 -0
  390. data/lib/libv8/v8/src/string-stream.h +191 -0
  391. data/lib/libv8/v8/src/string.js +994 -0
  392. data/lib/libv8/v8/src/strtod.cc +440 -0
  393. data/lib/libv8/v8/src/strtod.h +40 -0
  394. data/lib/libv8/v8/src/stub-cache.cc +1965 -0
  395. data/lib/libv8/v8/src/stub-cache.h +924 -0
  396. data/lib/libv8/v8/src/third_party/valgrind/valgrind.h +3925 -0
  397. data/lib/libv8/v8/src/token.cc +63 -0
  398. data/lib/libv8/v8/src/token.h +288 -0
  399. data/lib/libv8/v8/src/type-info.cc +507 -0
  400. data/lib/libv8/v8/src/type-info.h +272 -0
  401. data/lib/libv8/v8/src/unbound-queue-inl.h +95 -0
  402. data/lib/libv8/v8/src/unbound-queue.h +69 -0
  403. data/lib/libv8/v8/src/unicode-inl.h +238 -0
  404. data/lib/libv8/v8/src/unicode.cc +1624 -0
  405. data/lib/libv8/v8/src/unicode.h +280 -0
  406. data/lib/libv8/v8/src/uri.js +408 -0
  407. data/lib/libv8/v8/src/utils-inl.h +48 -0
  408. data/lib/libv8/v8/src/utils.cc +371 -0
  409. data/lib/libv8/v8/src/utils.h +800 -0
  410. data/lib/libv8/v8/src/v8-counters.cc +62 -0
  411. data/lib/libv8/v8/src/v8-counters.h +314 -0
  412. data/lib/libv8/v8/src/v8.cc +213 -0
  413. data/lib/libv8/v8/src/v8.h +131 -0
  414. data/lib/libv8/v8/src/v8checks.h +64 -0
  415. data/lib/libv8/v8/src/v8dll-main.cc +44 -0
  416. data/lib/libv8/v8/src/v8globals.h +512 -0
  417. data/lib/libv8/v8/src/v8memory.h +82 -0
  418. data/lib/libv8/v8/src/v8natives.js +1310 -0
  419. data/lib/libv8/v8/src/v8preparserdll-main.cc +39 -0
  420. data/lib/libv8/v8/src/v8threads.cc +464 -0
  421. data/lib/libv8/v8/src/v8threads.h +165 -0
  422. data/lib/libv8/v8/src/v8utils.h +319 -0
  423. data/lib/libv8/v8/src/variables.cc +114 -0
  424. data/lib/libv8/v8/src/variables.h +167 -0
  425. data/lib/libv8/v8/src/version.cc +116 -0
  426. data/lib/libv8/v8/src/version.h +68 -0
  427. data/lib/libv8/v8/src/vm-state-inl.h +138 -0
  428. data/lib/libv8/v8/src/vm-state.h +71 -0
  429. data/lib/libv8/v8/src/win32-headers.h +96 -0
  430. data/lib/libv8/v8/src/x64/assembler-x64-inl.h +462 -0
  431. data/lib/libv8/v8/src/x64/assembler-x64.cc +3027 -0
  432. data/lib/libv8/v8/src/x64/assembler-x64.h +1633 -0
  433. data/lib/libv8/v8/src/x64/builtins-x64.cc +1520 -0
  434. data/lib/libv8/v8/src/x64/code-stubs-x64.cc +5132 -0
  435. data/lib/libv8/v8/src/x64/code-stubs-x64.h +514 -0
  436. data/lib/libv8/v8/src/x64/codegen-x64.cc +146 -0
  437. data/lib/libv8/v8/src/x64/codegen-x64.h +76 -0
  438. data/lib/libv8/v8/src/x64/cpu-x64.cc +88 -0
  439. data/lib/libv8/v8/src/x64/debug-x64.cc +319 -0
  440. data/lib/libv8/v8/src/x64/deoptimizer-x64.cc +815 -0
  441. data/lib/libv8/v8/src/x64/disasm-x64.cc +1832 -0
  442. data/lib/libv8/v8/src/x64/frames-x64.cc +45 -0
  443. data/lib/libv8/v8/src/x64/frames-x64.h +130 -0
  444. data/lib/libv8/v8/src/x64/full-codegen-x64.cc +4318 -0
  445. data/lib/libv8/v8/src/x64/ic-x64.cc +1608 -0
  446. data/lib/libv8/v8/src/x64/lithium-codegen-x64.cc +4267 -0
  447. data/lib/libv8/v8/src/x64/lithium-codegen-x64.h +367 -0
  448. data/lib/libv8/v8/src/x64/lithium-gap-resolver-x64.cc +320 -0
  449. data/lib/libv8/v8/src/x64/lithium-gap-resolver-x64.h +74 -0
  450. data/lib/libv8/v8/src/x64/lithium-x64.cc +2202 -0
  451. data/lib/libv8/v8/src/x64/lithium-x64.h +2333 -0
  452. data/lib/libv8/v8/src/x64/macro-assembler-x64.cc +3745 -0
  453. data/lib/libv8/v8/src/x64/macro-assembler-x64.h +1290 -0
  454. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.cc +1398 -0
  455. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.h +282 -0
  456. data/lib/libv8/v8/src/x64/simulator-x64.cc +27 -0
  457. data/lib/libv8/v8/src/x64/simulator-x64.h +72 -0
  458. data/lib/libv8/v8/src/x64/stub-cache-x64.cc +3610 -0
  459. data/lib/libv8/v8/src/zone-inl.h +140 -0
  460. data/lib/libv8/v8/src/zone.cc +196 -0
  461. data/lib/libv8/v8/src/zone.h +240 -0
  462. data/lib/libv8/v8/tools/codemap.js +265 -0
  463. data/lib/libv8/v8/tools/consarray.js +93 -0
  464. data/lib/libv8/v8/tools/csvparser.js +78 -0
  465. data/lib/libv8/v8/tools/disasm.py +92 -0
  466. data/lib/libv8/v8/tools/freebsd-tick-processor +10 -0
  467. data/lib/libv8/v8/tools/gc-nvp-trace-processor.py +342 -0
  468. data/lib/libv8/v8/tools/gcmole/README +62 -0
  469. data/lib/libv8/v8/tools/gcmole/gccause.lua +60 -0
  470. data/lib/libv8/v8/tools/gcmole/gcmole.cc +1261 -0
  471. data/lib/libv8/v8/tools/gcmole/gcmole.lua +378 -0
  472. data/lib/libv8/v8/tools/generate-ten-powers.scm +286 -0
  473. data/lib/libv8/v8/tools/grokdump.py +841 -0
  474. data/lib/libv8/v8/tools/gyp/v8.gyp +995 -0
  475. data/lib/libv8/v8/tools/js2c.py +364 -0
  476. data/lib/libv8/v8/tools/jsmin.py +280 -0
  477. data/lib/libv8/v8/tools/linux-tick-processor +35 -0
  478. data/lib/libv8/v8/tools/ll_prof.py +942 -0
  479. data/lib/libv8/v8/tools/logreader.js +185 -0
  480. data/lib/libv8/v8/tools/mac-nm +18 -0
  481. data/lib/libv8/v8/tools/mac-tick-processor +6 -0
  482. data/lib/libv8/v8/tools/oom_dump/README +31 -0
  483. data/lib/libv8/v8/tools/oom_dump/SConstruct +42 -0
  484. data/lib/libv8/v8/tools/oom_dump/oom_dump.cc +288 -0
  485. data/lib/libv8/v8/tools/presubmit.py +305 -0
  486. data/lib/libv8/v8/tools/process-heap-prof.py +120 -0
  487. data/lib/libv8/v8/tools/profile.js +751 -0
  488. data/lib/libv8/v8/tools/profile_view.js +219 -0
  489. data/lib/libv8/v8/tools/run-valgrind.py +77 -0
  490. data/lib/libv8/v8/tools/splaytree.js +316 -0
  491. data/lib/libv8/v8/tools/stats-viewer.py +468 -0
  492. data/lib/libv8/v8/tools/test.py +1510 -0
  493. data/lib/libv8/v8/tools/tickprocessor-driver.js +59 -0
  494. data/lib/libv8/v8/tools/tickprocessor.js +877 -0
  495. data/lib/libv8/v8/tools/utils.py +96 -0
  496. data/lib/libv8/v8/tools/visual_studio/README.txt +12 -0
  497. data/lib/libv8/v8/tools/windows-tick-processor.bat +30 -0
  498. data/lib/libv8/version.rb +5 -0
  499. data/libv8.gemspec +36 -0
  500. metadata +578 -0
@@ -0,0 +1,45 @@
1
+ // Copyright 2010 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #include "v8.h"
29
+
30
+ #if defined(V8_TARGET_ARCH_X64)
31
+
32
+ #include "frames-inl.h"
33
+
34
+ namespace v8 {
35
+ namespace internal {
36
+
37
+
38
+ Address ExitFrame::ComputeStackPointer(Address fp) {
39
+ return Memory::Address_at(fp + ExitFrameConstants::kSPOffset);
40
+ }
41
+
42
+
43
+ } } // namespace v8::internal
44
+
45
+ #endif // V8_TARGET_ARCH_X64
@@ -0,0 +1,130 @@
1
+ // Copyright 2010 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #ifndef V8_X64_FRAMES_X64_H_
29
+ #define V8_X64_FRAMES_X64_H_
30
+
31
+ namespace v8 {
32
+ namespace internal {
33
+
34
+ static const int kNumRegs = 16;
35
+ static const RegList kJSCallerSaved =
36
+ 1 << 0 | // rax
37
+ 1 << 1 | // rcx
38
+ 1 << 2 | // rdx
39
+ 1 << 3 | // rbx - used as a caller-saved register in JavaScript code
40
+ 1 << 7; // rdi - callee function
41
+
42
+ static const int kNumJSCallerSaved = 5;
43
+
44
+ typedef Object* JSCallerSavedBuffer[kNumJSCallerSaved];
45
+
46
+ // Number of registers for which space is reserved in safepoints.
47
+ static const int kNumSafepointRegisters = 16;
48
+
49
+ // ----------------------------------------------------
50
+
51
+ class StackHandlerConstants : public AllStatic {
52
+ public:
53
+ static const int kNextOffset = 0 * kPointerSize;
54
+ static const int kFPOffset = 1 * kPointerSize;
55
+ static const int kStateOffset = 2 * kPointerSize;
56
+ static const int kPCOffset = 3 * kPointerSize;
57
+
58
+ static const int kSize = 4 * kPointerSize;
59
+ };
60
+
61
+
62
+ class EntryFrameConstants : public AllStatic {
63
+ public:
64
+ #ifdef _WIN64
65
+ static const int kCallerFPOffset = -10 * kPointerSize;
66
+ #else
67
+ static const int kCallerFPOffset = -8 * kPointerSize;
68
+ #endif
69
+ static const int kArgvOffset = 6 * kPointerSize;
70
+ };
71
+
72
+
73
+ class ExitFrameConstants : public AllStatic {
74
+ public:
75
+ static const int kCodeOffset = -2 * kPointerSize;
76
+ static const int kSPOffset = -1 * kPointerSize;
77
+
78
+ static const int kCallerFPOffset = +0 * kPointerSize;
79
+ static const int kCallerPCOffset = +1 * kPointerSize;
80
+
81
+ // FP-relative displacement of the caller's SP. It points just
82
+ // below the saved PC.
83
+ static const int kCallerSPDisplacement = +2 * kPointerSize;
84
+ };
85
+
86
+
87
+ class StandardFrameConstants : public AllStatic {
88
+ public:
89
+ static const int kExpressionsOffset = -3 * kPointerSize;
90
+ static const int kMarkerOffset = -2 * kPointerSize;
91
+ static const int kContextOffset = -1 * kPointerSize;
92
+ static const int kCallerFPOffset = 0 * kPointerSize;
93
+ static const int kCallerPCOffset = +1 * kPointerSize;
94
+ static const int kCallerSPOffset = +2 * kPointerSize;
95
+ };
96
+
97
+
98
+ class JavaScriptFrameConstants : public AllStatic {
99
+ public:
100
+ // FP-relative.
101
+ static const int kLocal0Offset = StandardFrameConstants::kExpressionsOffset;
102
+ static const int kLastParameterOffset = +2 * kPointerSize;
103
+ static const int kFunctionOffset = StandardFrameConstants::kMarkerOffset;
104
+
105
+ // Caller SP-relative.
106
+ static const int kParam0Offset = -2 * kPointerSize;
107
+ static const int kReceiverOffset = -1 * kPointerSize;
108
+ };
109
+
110
+
111
+ class ArgumentsAdaptorFrameConstants : public AllStatic {
112
+ public:
113
+ static const int kLengthOffset = StandardFrameConstants::kExpressionsOffset;
114
+ };
115
+
116
+
117
+ class InternalFrameConstants : public AllStatic {
118
+ public:
119
+ static const int kCodeOffset = StandardFrameConstants::kExpressionsOffset;
120
+ };
121
+
122
+
123
+ inline Object* JavaScriptFrame::function_slot_object() const {
124
+ const int offset = JavaScriptFrameConstants::kFunctionOffset;
125
+ return Memory::Object_at(fp() + offset);
126
+ }
127
+
128
+ } } // namespace v8::internal
129
+
130
+ #endif // V8_X64_FRAMES_X64_H_
@@ -0,0 +1,4318 @@
1
+ // Copyright 2011 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #include "v8.h"
29
+
30
+ #if defined(V8_TARGET_ARCH_X64)
31
+
32
+ #include "code-stubs.h"
33
+ #include "codegen.h"
34
+ #include "compiler.h"
35
+ #include "debug.h"
36
+ #include "full-codegen.h"
37
+ #include "parser.h"
38
+ #include "scopes.h"
39
+ #include "stub-cache.h"
40
+
41
+ namespace v8 {
42
+ namespace internal {
43
+
44
+ #define __ ACCESS_MASM(masm_)
45
+
46
+
47
+ static unsigned GetPropertyId(Property* property) {
48
+ if (property->is_synthetic()) return AstNode::kNoNumber;
49
+ return property->id();
50
+ }
51
+
52
+
53
+ class JumpPatchSite BASE_EMBEDDED {
54
+ public:
55
+ explicit JumpPatchSite(MacroAssembler* masm)
56
+ : masm_(masm) {
57
+ #ifdef DEBUG
58
+ info_emitted_ = false;
59
+ #endif
60
+ }
61
+
62
+ ~JumpPatchSite() {
63
+ ASSERT(patch_site_.is_bound() == info_emitted_);
64
+ }
65
+
66
+ void EmitJumpIfNotSmi(Register reg,
67
+ Label* target,
68
+ Label::Distance near_jump = Label::kFar) {
69
+ __ testb(reg, Immediate(kSmiTagMask));
70
+ EmitJump(not_carry, target, near_jump); // Always taken before patched.
71
+ }
72
+
73
+ void EmitJumpIfSmi(Register reg,
74
+ Label* target,
75
+ Label::Distance near_jump = Label::kFar) {
76
+ __ testb(reg, Immediate(kSmiTagMask));
77
+ EmitJump(carry, target, near_jump); // Never taken before patched.
78
+ }
79
+
80
+ void EmitPatchInfo() {
81
+ int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
82
+ ASSERT(is_int8(delta_to_patch_site));
83
+ __ testl(rax, Immediate(delta_to_patch_site));
84
+ #ifdef DEBUG
85
+ info_emitted_ = true;
86
+ #endif
87
+ }
88
+
89
+ bool is_bound() const { return patch_site_.is_bound(); }
90
+
91
+ private:
92
+ // jc will be patched with jz, jnc will become jnz.
93
+ void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
94
+ ASSERT(!patch_site_.is_bound() && !info_emitted_);
95
+ ASSERT(cc == carry || cc == not_carry);
96
+ __ bind(&patch_site_);
97
+ __ j(cc, target, near_jump);
98
+ }
99
+
100
+ MacroAssembler* masm_;
101
+ Label patch_site_;
102
+ #ifdef DEBUG
103
+ bool info_emitted_;
104
+ #endif
105
+ };
106
+
107
+
108
+ // Generate code for a JS function. On entry to the function the receiver
109
+ // and arguments have been pushed on the stack left to right, with the
110
+ // return address on top of them. The actual argument count matches the
111
+ // formal parameter count expected by the function.
112
+ //
113
+ // The live registers are:
114
+ // o rdi: the JS function object being called (ie, ourselves)
115
+ // o rsi: our context
116
+ // o rbp: our caller's frame pointer
117
+ // o rsp: stack pointer (pointing to return address)
118
+ //
119
+ // The function builds a JS frame. Please see JavaScriptFrameConstants in
120
+ // frames-x64.h for its layout.
121
+ void FullCodeGenerator::Generate(CompilationInfo* info) {
122
+ ASSERT(info_ == NULL);
123
+ info_ = info;
124
+ SetFunctionPosition(function());
125
+ Comment cmnt(masm_, "[ function compiled by full code generator");
126
+
127
+ #ifdef DEBUG
128
+ if (strlen(FLAG_stop_at) > 0 &&
129
+ info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
130
+ __ int3();
131
+ }
132
+ #endif
133
+
134
+ // Strict mode functions need to replace the receiver with undefined
135
+ // when called as functions (without an explicit receiver
136
+ // object). rcx is zero for method calls and non-zero for function
137
+ // calls.
138
+ if (info->is_strict_mode()) {
139
+ Label ok;
140
+ __ testq(rcx, rcx);
141
+ __ j(zero, &ok, Label::kNear);
142
+ // +1 for return address.
143
+ int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
144
+ __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
145
+ __ movq(Operand(rsp, receiver_offset), kScratchRegister);
146
+ __ bind(&ok);
147
+ }
148
+
149
+ __ push(rbp); // Caller's frame pointer.
150
+ __ movq(rbp, rsp);
151
+ __ push(rsi); // Callee's context.
152
+ __ push(rdi); // Callee's JS Function.
153
+
154
+ { Comment cmnt(masm_, "[ Allocate locals");
155
+ int locals_count = scope()->num_stack_slots();
156
+ if (locals_count == 1) {
157
+ __ PushRoot(Heap::kUndefinedValueRootIndex);
158
+ } else if (locals_count > 1) {
159
+ __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
160
+ for (int i = 0; i < locals_count; i++) {
161
+ __ push(rdx);
162
+ }
163
+ }
164
+ }
165
+
166
+ bool function_in_register = true;
167
+
168
+ // Possibly allocate a local context.
169
+ int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
170
+ if (heap_slots > 0) {
171
+ Comment cmnt(masm_, "[ Allocate local context");
172
+ // Argument to NewContext is the function, which is still in rdi.
173
+ __ push(rdi);
174
+ if (heap_slots <= FastNewContextStub::kMaximumSlots) {
175
+ FastNewContextStub stub(heap_slots);
176
+ __ CallStub(&stub);
177
+ } else {
178
+ __ CallRuntime(Runtime::kNewContext, 1);
179
+ }
180
+ function_in_register = false;
181
+ // Context is returned in both rax and rsi. It replaces the context
182
+ // passed to us. It's saved in the stack and kept live in rsi.
183
+ __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
184
+
185
+ // Copy any necessary parameters into the context.
186
+ int num_parameters = scope()->num_parameters();
187
+ for (int i = 0; i < num_parameters; i++) {
188
+ Slot* slot = scope()->parameter(i)->AsSlot();
189
+ if (slot != NULL && slot->type() == Slot::CONTEXT) {
190
+ int parameter_offset = StandardFrameConstants::kCallerSPOffset +
191
+ (num_parameters - 1 - i) * kPointerSize;
192
+ // Load parameter from stack.
193
+ __ movq(rax, Operand(rbp, parameter_offset));
194
+ // Store it in the context.
195
+ int context_offset = Context::SlotOffset(slot->index());
196
+ __ movq(Operand(rsi, context_offset), rax);
197
+ // Update the write barrier. This clobbers all involved
198
+ // registers, so we have use a third register to avoid
199
+ // clobbering rsi.
200
+ __ movq(rcx, rsi);
201
+ __ RecordWrite(rcx, context_offset, rax, rbx);
202
+ }
203
+ }
204
+ }
205
+
206
+ // Possibly allocate an arguments object.
207
+ Variable* arguments = scope()->arguments();
208
+ if (arguments != NULL) {
209
+ // Arguments object must be allocated after the context object, in
210
+ // case the "arguments" or ".arguments" variables are in the context.
211
+ Comment cmnt(masm_, "[ Allocate arguments object");
212
+ if (function_in_register) {
213
+ __ push(rdi);
214
+ } else {
215
+ __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
216
+ }
217
+ // The receiver is just before the parameters on the caller's stack.
218
+ int offset = scope()->num_parameters() * kPointerSize;
219
+ __ lea(rdx,
220
+ Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
221
+ __ push(rdx);
222
+ __ Push(Smi::FromInt(scope()->num_parameters()));
223
+ // Arguments to ArgumentsAccessStub:
224
+ // function, receiver address, parameter count.
225
+ // The stub will rewrite receiver and parameter count if the previous
226
+ // stack frame was an arguments adapter frame.
227
+ ArgumentsAccessStub stub(
228
+ is_strict_mode() ? ArgumentsAccessStub::NEW_STRICT
229
+ : ArgumentsAccessStub::NEW_NON_STRICT);
230
+ __ CallStub(&stub);
231
+
232
+ Variable* arguments_shadow = scope()->arguments_shadow();
233
+ if (arguments_shadow != NULL) {
234
+ // Store new arguments object in both "arguments" and ".arguments" slots.
235
+ __ movq(rcx, rax);
236
+ Move(arguments_shadow->AsSlot(), rcx, rbx, rdx);
237
+ }
238
+ Move(arguments->AsSlot(), rax, rbx, rdx);
239
+ }
240
+
241
+ if (FLAG_trace) {
242
+ __ CallRuntime(Runtime::kTraceEnter, 0);
243
+ }
244
+
245
+ // Visit the declarations and body unless there is an illegal
246
+ // redeclaration.
247
+ if (scope()->HasIllegalRedeclaration()) {
248
+ Comment cmnt(masm_, "[ Declarations");
249
+ scope()->VisitIllegalRedeclaration(this);
250
+ } else {
251
+ { Comment cmnt(masm_, "[ Declarations");
252
+ // For named function expressions, declare the function name as a
253
+ // constant.
254
+ if (scope()->is_function_scope() && scope()->function() != NULL) {
255
+ EmitDeclaration(scope()->function(), Variable::CONST, NULL);
256
+ }
257
+ VisitDeclarations(scope()->declarations());
258
+ }
259
+
260
+ { Comment cmnt(masm_, "[ Stack check");
261
+ PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
262
+ Label ok;
263
+ __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
264
+ __ j(above_equal, &ok, Label::kNear);
265
+ StackCheckStub stub;
266
+ __ CallStub(&stub);
267
+ __ bind(&ok);
268
+ }
269
+
270
+ { Comment cmnt(masm_, "[ Body");
271
+ ASSERT(loop_depth() == 0);
272
+ VisitStatements(function()->body());
273
+ ASSERT(loop_depth() == 0);
274
+ }
275
+ }
276
+
277
+ // Always emit a 'return undefined' in case control fell off the end of
278
+ // the body.
279
+ { Comment cmnt(masm_, "[ return <undefined>;");
280
+ __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
281
+ EmitReturnSequence();
282
+ }
283
+ }
284
+
285
+
286
+ void FullCodeGenerator::ClearAccumulator() {
287
+ __ Set(rax, 0);
288
+ }
289
+
290
+
291
+ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
292
+ Comment cmnt(masm_, "[ Stack check");
293
+ Label ok;
294
+ __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
295
+ __ j(above_equal, &ok, Label::kNear);
296
+ StackCheckStub stub;
297
+ __ CallStub(&stub);
298
+ // Record a mapping of this PC offset to the OSR id. This is used to find
299
+ // the AST id from the unoptimized code in order to use it as a key into
300
+ // the deoptimization input data found in the optimized code.
301
+ RecordStackCheck(stmt->OsrEntryId());
302
+
303
+ // Loop stack checks can be patched to perform on-stack replacement. In
304
+ // order to decide whether or not to perform OSR we embed the loop depth
305
+ // in a test instruction after the call so we can extract it from the OSR
306
+ // builtin.
307
+ ASSERT(loop_depth() > 0);
308
+ __ testl(rax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
309
+
310
+ __ bind(&ok);
311
+ PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
312
+ // Record a mapping of the OSR id to this PC. This is used if the OSR
313
+ // entry becomes the target of a bailout. We don't expect it to be, but
314
+ // we want it to work if it is.
315
+ PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
316
+ }
317
+
318
+
319
+ void FullCodeGenerator::EmitReturnSequence() {
320
+ Comment cmnt(masm_, "[ Return sequence");
321
+ if (return_label_.is_bound()) {
322
+ __ jmp(&return_label_);
323
+ } else {
324
+ __ bind(&return_label_);
325
+ if (FLAG_trace) {
326
+ __ push(rax);
327
+ __ CallRuntime(Runtime::kTraceExit, 1);
328
+ }
329
+ #ifdef DEBUG
330
+ // Add a label for checking the size of the code used for returning.
331
+ Label check_exit_codesize;
332
+ masm_->bind(&check_exit_codesize);
333
+ #endif
334
+ CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
335
+ __ RecordJSReturn();
336
+ // Do not use the leave instruction here because it is too short to
337
+ // patch with the code required by the debugger.
338
+ __ movq(rsp, rbp);
339
+ __ pop(rbp);
340
+
341
+ int arguments_bytes = (scope()->num_parameters() + 1) * kPointerSize;
342
+ __ Ret(arguments_bytes, rcx);
343
+
344
+ #ifdef ENABLE_DEBUGGER_SUPPORT
345
+ // Add padding that will be overwritten by a debugger breakpoint. We
346
+ // have just generated at least 7 bytes: "movq rsp, rbp; pop rbp; ret k"
347
+ // (3 + 1 + 3).
348
+ const int kPadding = Assembler::kJSReturnSequenceLength - 7;
349
+ for (int i = 0; i < kPadding; ++i) {
350
+ masm_->int3();
351
+ }
352
+ // Check that the size of the code used for returning is large enough
353
+ // for the debugger's requirements.
354
+ ASSERT(Assembler::kJSReturnSequenceLength <=
355
+ masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
356
+ #endif
357
+ }
358
+ }
359
+
360
+
361
+ void FullCodeGenerator::EffectContext::Plug(Slot* slot) const {
362
+ }
363
+
364
+
365
+ void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const {
366
+ MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register());
367
+ __ movq(result_register(), slot_operand);
368
+ }
369
+
370
+
371
+ void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const {
372
+ MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register());
373
+ __ push(slot_operand);
374
+ }
375
+
376
+
377
+ void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
378
+ codegen()->Move(result_register(), slot);
379
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
380
+ codegen()->DoTest(true_label_, false_label_, fall_through_);
381
+ }
382
+
383
+
384
+ void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
385
+ }
386
+
387
+
388
+ void FullCodeGenerator::AccumulatorValueContext::Plug(
389
+ Heap::RootListIndex index) const {
390
+ __ LoadRoot(result_register(), index);
391
+ }
392
+
393
+
394
+ void FullCodeGenerator::StackValueContext::Plug(
395
+ Heap::RootListIndex index) const {
396
+ __ PushRoot(index);
397
+ }
398
+
399
+
400
+ void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
401
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
402
+ true,
403
+ true_label_,
404
+ false_label_);
405
+ if (index == Heap::kUndefinedValueRootIndex ||
406
+ index == Heap::kNullValueRootIndex ||
407
+ index == Heap::kFalseValueRootIndex) {
408
+ if (false_label_ != fall_through_) __ jmp(false_label_);
409
+ } else if (index == Heap::kTrueValueRootIndex) {
410
+ if (true_label_ != fall_through_) __ jmp(true_label_);
411
+ } else {
412
+ __ LoadRoot(result_register(), index);
413
+ codegen()->DoTest(true_label_, false_label_, fall_through_);
414
+ }
415
+ }
416
+
417
+
418
+ void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
419
+ }
420
+
421
+
422
+ void FullCodeGenerator::AccumulatorValueContext::Plug(
423
+ Handle<Object> lit) const {
424
+ __ Move(result_register(), lit);
425
+ }
426
+
427
+
428
+ void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
429
+ __ Push(lit);
430
+ }
431
+
432
+
433
+ void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
434
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
435
+ true,
436
+ true_label_,
437
+ false_label_);
438
+ ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
439
+ if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
440
+ if (false_label_ != fall_through_) __ jmp(false_label_);
441
+ } else if (lit->IsTrue() || lit->IsJSObject()) {
442
+ if (true_label_ != fall_through_) __ jmp(true_label_);
443
+ } else if (lit->IsString()) {
444
+ if (String::cast(*lit)->length() == 0) {
445
+ if (false_label_ != fall_through_) __ jmp(false_label_);
446
+ } else {
447
+ if (true_label_ != fall_through_) __ jmp(true_label_);
448
+ }
449
+ } else if (lit->IsSmi()) {
450
+ if (Smi::cast(*lit)->value() == 0) {
451
+ if (false_label_ != fall_through_) __ jmp(false_label_);
452
+ } else {
453
+ if (true_label_ != fall_through_) __ jmp(true_label_);
454
+ }
455
+ } else {
456
+ // For simplicity we always test the accumulator register.
457
+ __ Move(result_register(), lit);
458
+ codegen()->DoTest(true_label_, false_label_, fall_through_);
459
+ }
460
+ }
461
+
462
+
463
+ void FullCodeGenerator::EffectContext::DropAndPlug(int count,
464
+ Register reg) const {
465
+ ASSERT(count > 0);
466
+ __ Drop(count);
467
+ }
468
+
469
+
470
+ void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
471
+ int count,
472
+ Register reg) const {
473
+ ASSERT(count > 0);
474
+ __ Drop(count);
475
+ __ Move(result_register(), reg);
476
+ }
477
+
478
+
479
+ void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
480
+ Register reg) const {
481
+ ASSERT(count > 0);
482
+ if (count > 1) __ Drop(count - 1);
483
+ __ movq(Operand(rsp, 0), reg);
484
+ }
485
+
486
+
487
+ void FullCodeGenerator::TestContext::DropAndPlug(int count,
488
+ Register reg) const {
489
+ ASSERT(count > 0);
490
+ // For simplicity we always test the accumulator register.
491
+ __ Drop(count);
492
+ __ Move(result_register(), reg);
493
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
494
+ codegen()->DoTest(true_label_, false_label_, fall_through_);
495
+ }
496
+
497
+
498
+ void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
499
+ Label* materialize_false) const {
500
+ ASSERT(materialize_true == materialize_false);
501
+ __ bind(materialize_true);
502
+ }
503
+
504
+
505
+ void FullCodeGenerator::AccumulatorValueContext::Plug(
506
+ Label* materialize_true,
507
+ Label* materialize_false) const {
508
+ Label done;
509
+ __ bind(materialize_true);
510
+ __ Move(result_register(), isolate()->factory()->true_value());
511
+ __ jmp(&done, Label::kNear);
512
+ __ bind(materialize_false);
513
+ __ Move(result_register(), isolate()->factory()->false_value());
514
+ __ bind(&done);
515
+ }
516
+
517
+
518
+ void FullCodeGenerator::StackValueContext::Plug(
519
+ Label* materialize_true,
520
+ Label* materialize_false) const {
521
+ Label done;
522
+ __ bind(materialize_true);
523
+ __ Push(isolate()->factory()->true_value());
524
+ __ jmp(&done, Label::kNear);
525
+ __ bind(materialize_false);
526
+ __ Push(isolate()->factory()->false_value());
527
+ __ bind(&done);
528
+ }
529
+
530
+
531
+ void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
532
+ Label* materialize_false) const {
533
+ ASSERT(materialize_true == true_label_);
534
+ ASSERT(materialize_false == false_label_);
535
+ }
536
+
537
+
538
+ void FullCodeGenerator::EffectContext::Plug(bool flag) const {
539
+ }
540
+
541
+
542
+ void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
543
+ Heap::RootListIndex value_root_index =
544
+ flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
545
+ __ LoadRoot(result_register(), value_root_index);
546
+ }
547
+
548
+
549
+ void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
550
+ Heap::RootListIndex value_root_index =
551
+ flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
552
+ __ PushRoot(value_root_index);
553
+ }
554
+
555
+
556
+ void FullCodeGenerator::TestContext::Plug(bool flag) const {
557
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
558
+ true,
559
+ true_label_,
560
+ false_label_);
561
+ if (flag) {
562
+ if (true_label_ != fall_through_) __ jmp(true_label_);
563
+ } else {
564
+ if (false_label_ != fall_through_) __ jmp(false_label_);
565
+ }
566
+ }
567
+
568
+
569
+ void FullCodeGenerator::DoTest(Label* if_true,
570
+ Label* if_false,
571
+ Label* fall_through) {
572
+ ToBooleanStub stub;
573
+ __ push(result_register());
574
+ __ CallStub(&stub);
575
+ __ testq(rax, rax);
576
+ // The stub returns nonzero for true.
577
+ Split(not_zero, if_true, if_false, fall_through);
578
+ }
579
+
580
+
581
+ void FullCodeGenerator::Split(Condition cc,
582
+ Label* if_true,
583
+ Label* if_false,
584
+ Label* fall_through) {
585
+ if (if_false == fall_through) {
586
+ __ j(cc, if_true);
587
+ } else if (if_true == fall_through) {
588
+ __ j(NegateCondition(cc), if_false);
589
+ } else {
590
+ __ j(cc, if_true);
591
+ __ jmp(if_false);
592
+ }
593
+ }
594
+
595
+
596
+ MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) {
597
+ switch (slot->type()) {
598
+ case Slot::PARAMETER:
599
+ case Slot::LOCAL:
600
+ return Operand(rbp, SlotOffset(slot));
601
+ case Slot::CONTEXT: {
602
+ int context_chain_length =
603
+ scope()->ContextChainLength(slot->var()->scope());
604
+ __ LoadContext(scratch, context_chain_length);
605
+ return ContextOperand(scratch, slot->index());
606
+ }
607
+ case Slot::LOOKUP:
608
+ UNREACHABLE();
609
+ }
610
+ UNREACHABLE();
611
+ return Operand(rax, 0);
612
+ }
613
+
614
+
615
+ void FullCodeGenerator::Move(Register destination, Slot* source) {
616
+ MemOperand location = EmitSlotSearch(source, destination);
617
+ __ movq(destination, location);
618
+ }
619
+
620
+
621
+ void FullCodeGenerator::Move(Slot* dst,
622
+ Register src,
623
+ Register scratch1,
624
+ Register scratch2) {
625
+ ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented.
626
+ ASSERT(!scratch1.is(src) && !scratch2.is(src));
627
+ MemOperand location = EmitSlotSearch(dst, scratch1);
628
+ __ movq(location, src);
629
+ // Emit the write barrier code if the location is in the heap.
630
+ if (dst->type() == Slot::CONTEXT) {
631
+ int offset = FixedArray::kHeaderSize + dst->index() * kPointerSize;
632
+ __ RecordWrite(scratch1, offset, src, scratch2);
633
+ }
634
+ }
635
+
636
+
637
+ void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
638
+ bool should_normalize,
639
+ Label* if_true,
640
+ Label* if_false) {
641
+ // Only prepare for bailouts before splits if we're in a test
642
+ // context. Otherwise, we let the Visit function deal with the
643
+ // preparation to avoid preparing with the same AST id twice.
644
+ if (!context()->IsTest() || !info_->IsOptimizable()) return;
645
+
646
+ Label skip;
647
+ if (should_normalize) __ jmp(&skip, Label::kNear);
648
+
649
+ ForwardBailoutStack* current = forward_bailout_stack_;
650
+ while (current != NULL) {
651
+ PrepareForBailout(current->expr(), state);
652
+ current = current->parent();
653
+ }
654
+
655
+ if (should_normalize) {
656
+ __ CompareRoot(rax, Heap::kTrueValueRootIndex);
657
+ Split(equal, if_true, if_false, NULL);
658
+ __ bind(&skip);
659
+ }
660
+ }
661
+
662
+
663
+ void FullCodeGenerator::EmitDeclaration(Variable* variable,
664
+ Variable::Mode mode,
665
+ FunctionLiteral* function) {
666
+ Comment cmnt(masm_, "[ Declaration");
667
+ ASSERT(variable != NULL); // Must have been resolved.
668
+ Slot* slot = variable->AsSlot();
669
+ Property* prop = variable->AsProperty();
670
+
671
+ if (slot != NULL) {
672
+ switch (slot->type()) {
673
+ case Slot::PARAMETER:
674
+ case Slot::LOCAL:
675
+ if (mode == Variable::CONST) {
676
+ __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
677
+ __ movq(Operand(rbp, SlotOffset(slot)), kScratchRegister);
678
+ } else if (function != NULL) {
679
+ VisitForAccumulatorValue(function);
680
+ __ movq(Operand(rbp, SlotOffset(slot)), result_register());
681
+ }
682
+ break;
683
+
684
+ case Slot::CONTEXT:
685
+ // We bypass the general EmitSlotSearch because we know more about
686
+ // this specific context.
687
+
688
+ // The variable in the decl always resides in the current context.
689
+ ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
690
+ if (FLAG_debug_code) {
691
+ // Check if we have the correct context pointer.
692
+ __ movq(rbx, ContextOperand(rsi, Context::FCONTEXT_INDEX));
693
+ __ cmpq(rbx, rsi);
694
+ __ Check(equal, "Unexpected declaration in current context.");
695
+ }
696
+ if (mode == Variable::CONST) {
697
+ __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
698
+ __ movq(ContextOperand(rsi, slot->index()), kScratchRegister);
699
+ // No write barrier since the hole value is in old space.
700
+ } else if (function != NULL) {
701
+ VisitForAccumulatorValue(function);
702
+ __ movq(ContextOperand(rsi, slot->index()), result_register());
703
+ int offset = Context::SlotOffset(slot->index());
704
+ __ movq(rbx, rsi);
705
+ __ RecordWrite(rbx, offset, result_register(), rcx);
706
+ }
707
+ break;
708
+
709
+ case Slot::LOOKUP: {
710
+ __ push(rsi);
711
+ __ Push(variable->name());
712
+ // Declaration nodes are always introduced in one of two modes.
713
+ ASSERT(mode == Variable::VAR || mode == Variable::CONST);
714
+ PropertyAttributes attr = (mode == Variable::VAR) ? NONE : READ_ONLY;
715
+ __ Push(Smi::FromInt(attr));
716
+ // Push initial value, if any.
717
+ // Note: For variables we must not push an initial value (such as
718
+ // 'undefined') because we may have a (legal) redeclaration and we
719
+ // must not destroy the current value.
720
+ if (mode == Variable::CONST) {
721
+ __ PushRoot(Heap::kTheHoleValueRootIndex);
722
+ } else if (function != NULL) {
723
+ VisitForStackValue(function);
724
+ } else {
725
+ __ Push(Smi::FromInt(0)); // no initial value!
726
+ }
727
+ __ CallRuntime(Runtime::kDeclareContextSlot, 4);
728
+ break;
729
+ }
730
+ }
731
+
732
+ } else if (prop != NULL) {
733
+ // A const declaration aliasing a parameter is an illegal redeclaration.
734
+ ASSERT(mode != Variable::CONST);
735
+ if (function != NULL) {
736
+ // We are declaring a function that rewrites to a property.
737
+ // Use (keyed) IC to set the initial value. We cannot visit the
738
+ // rewrite because it's shared and we risk recording duplicate AST
739
+ // IDs for bailouts from optimized code.
740
+ ASSERT(prop->obj()->AsVariableProxy() != NULL);
741
+ { AccumulatorValueContext for_object(this);
742
+ EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
743
+ }
744
+ __ push(rax);
745
+ VisitForAccumulatorValue(function);
746
+ __ pop(rdx);
747
+ ASSERT(prop->key()->AsLiteral() != NULL &&
748
+ prop->key()->AsLiteral()->handle()->IsSmi());
749
+ __ Move(rcx, prop->key()->AsLiteral()->handle());
750
+
751
+ Handle<Code> ic = is_strict_mode()
752
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
753
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
754
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
755
+ }
756
+ }
757
+ }
758
+
759
+
760
+ void FullCodeGenerator::VisitDeclaration(Declaration* decl) {
761
+ EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun());
762
+ }
763
+
764
+
765
+ void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
766
+ // Call the runtime to declare the globals.
767
+ __ push(rsi); // The context is the first argument.
768
+ __ Push(pairs);
769
+ __ Push(Smi::FromInt(is_eval() ? 1 : 0));
770
+ __ Push(Smi::FromInt(strict_mode_flag()));
771
+ __ CallRuntime(Runtime::kDeclareGlobals, 4);
772
+ // Return value is ignored.
773
+ }
774
+
775
+
776
+ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
777
+ Comment cmnt(masm_, "[ SwitchStatement");
778
+ Breakable nested_statement(this, stmt);
779
+ SetStatementPosition(stmt);
780
+
781
+ // Keep the switch value on the stack until a case matches.
782
+ VisitForStackValue(stmt->tag());
783
+ PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
784
+
785
+ ZoneList<CaseClause*>* clauses = stmt->cases();
786
+ CaseClause* default_clause = NULL; // Can occur anywhere in the list.
787
+
788
+ Label next_test; // Recycled for each test.
789
+ // Compile all the tests with branches to their bodies.
790
+ for (int i = 0; i < clauses->length(); i++) {
791
+ CaseClause* clause = clauses->at(i);
792
+ clause->body_target()->Unuse();
793
+
794
+ // The default is not a test, but remember it as final fall through.
795
+ if (clause->is_default()) {
796
+ default_clause = clause;
797
+ continue;
798
+ }
799
+
800
+ Comment cmnt(masm_, "[ Case comparison");
801
+ __ bind(&next_test);
802
+ next_test.Unuse();
803
+
804
+ // Compile the label expression.
805
+ VisitForAccumulatorValue(clause->label());
806
+
807
+ // Perform the comparison as if via '==='.
808
+ __ movq(rdx, Operand(rsp, 0)); // Switch value.
809
+ bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
810
+ JumpPatchSite patch_site(masm_);
811
+ if (inline_smi_code) {
812
+ Label slow_case;
813
+ __ movq(rcx, rdx);
814
+ __ or_(rcx, rax);
815
+ patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
816
+
817
+ __ cmpq(rdx, rax);
818
+ __ j(not_equal, &next_test);
819
+ __ Drop(1); // Switch value is no longer needed.
820
+ __ jmp(clause->body_target());
821
+ __ bind(&slow_case);
822
+ }
823
+
824
+ // Record position before stub call for type feedback.
825
+ SetSourcePosition(clause->position());
826
+ Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
827
+ EmitCallIC(ic, &patch_site, clause->CompareId());
828
+
829
+ __ testq(rax, rax);
830
+ __ j(not_equal, &next_test);
831
+ __ Drop(1); // Switch value is no longer needed.
832
+ __ jmp(clause->body_target());
833
+ }
834
+
835
+ // Discard the test value and jump to the default if present, otherwise to
836
+ // the end of the statement.
837
+ __ bind(&next_test);
838
+ __ Drop(1); // Switch value is no longer needed.
839
+ if (default_clause == NULL) {
840
+ __ jmp(nested_statement.break_target());
841
+ } else {
842
+ __ jmp(default_clause->body_target());
843
+ }
844
+
845
+ // Compile all the case bodies.
846
+ for (int i = 0; i < clauses->length(); i++) {
847
+ Comment cmnt(masm_, "[ Case body");
848
+ CaseClause* clause = clauses->at(i);
849
+ __ bind(clause->body_target());
850
+ PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
851
+ VisitStatements(clause->statements());
852
+ }
853
+
854
+ __ bind(nested_statement.break_target());
855
+ PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
856
+ }
857
+
858
+
859
+ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
860
+ Comment cmnt(masm_, "[ ForInStatement");
861
+ SetStatementPosition(stmt);
862
+
863
+ Label loop, exit;
864
+ ForIn loop_statement(this, stmt);
865
+ increment_loop_depth();
866
+
867
+ // Get the object to enumerate over. Both SpiderMonkey and JSC
868
+ // ignore null and undefined in contrast to the specification; see
869
+ // ECMA-262 section 12.6.4.
870
+ VisitForAccumulatorValue(stmt->enumerable());
871
+ __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
872
+ __ j(equal, &exit);
873
+ Register null_value = rdi;
874
+ __ LoadRoot(null_value, Heap::kNullValueRootIndex);
875
+ __ cmpq(rax, null_value);
876
+ __ j(equal, &exit);
877
+
878
+ // Convert the object to a JS object.
879
+ Label convert, done_convert;
880
+ __ JumpIfSmi(rax, &convert);
881
+ __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
882
+ __ j(above_equal, &done_convert);
883
+ __ bind(&convert);
884
+ __ push(rax);
885
+ __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
886
+ __ bind(&done_convert);
887
+ __ push(rax);
888
+
889
+ // Check cache validity in generated code. This is a fast case for
890
+ // the JSObject::IsSimpleEnum cache validity checks. If we cannot
891
+ // guarantee cache validity, call the runtime system to check cache
892
+ // validity or get the property names in a fixed array.
893
+ Label next, call_runtime;
894
+ Register empty_fixed_array_value = r8;
895
+ __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
896
+ Register empty_descriptor_array_value = r9;
897
+ __ LoadRoot(empty_descriptor_array_value,
898
+ Heap::kEmptyDescriptorArrayRootIndex);
899
+ __ movq(rcx, rax);
900
+ __ bind(&next);
901
+
902
+ // Check that there are no elements. Register rcx contains the
903
+ // current JS object we've reached through the prototype chain.
904
+ __ cmpq(empty_fixed_array_value,
905
+ FieldOperand(rcx, JSObject::kElementsOffset));
906
+ __ j(not_equal, &call_runtime);
907
+
908
+ // Check that instance descriptors are not empty so that we can
909
+ // check for an enum cache. Leave the map in rbx for the subsequent
910
+ // prototype load.
911
+ __ movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
912
+ __ movq(rdx, FieldOperand(rbx, Map::kInstanceDescriptorsOrBitField3Offset));
913
+ __ JumpIfSmi(rdx, &call_runtime);
914
+
915
+ // Check that there is an enum cache in the non-empty instance
916
+ // descriptors (rdx). This is the case if the next enumeration
917
+ // index field does not contain a smi.
918
+ __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumerationIndexOffset));
919
+ __ JumpIfSmi(rdx, &call_runtime);
920
+
921
+ // For all objects but the receiver, check that the cache is empty.
922
+ Label check_prototype;
923
+ __ cmpq(rcx, rax);
924
+ __ j(equal, &check_prototype, Label::kNear);
925
+ __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumCacheBridgeCacheOffset));
926
+ __ cmpq(rdx, empty_fixed_array_value);
927
+ __ j(not_equal, &call_runtime);
928
+
929
+ // Load the prototype from the map and loop if non-null.
930
+ __ bind(&check_prototype);
931
+ __ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
932
+ __ cmpq(rcx, null_value);
933
+ __ j(not_equal, &next);
934
+
935
+ // The enum cache is valid. Load the map of the object being
936
+ // iterated over and use the cache for the iteration.
937
+ Label use_cache;
938
+ __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
939
+ __ jmp(&use_cache, Label::kNear);
940
+
941
+ // Get the set of properties to enumerate.
942
+ __ bind(&call_runtime);
943
+ __ push(rax); // Duplicate the enumerable object on the stack.
944
+ __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
945
+
946
+ // If we got a map from the runtime call, we can do a fast
947
+ // modification check. Otherwise, we got a fixed array, and we have
948
+ // to do a slow check.
949
+ Label fixed_array;
950
+ __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
951
+ Heap::kMetaMapRootIndex);
952
+ __ j(not_equal, &fixed_array, Label::kNear);
953
+
954
+ // We got a map in register rax. Get the enumeration cache from it.
955
+ __ bind(&use_cache);
956
+ __ LoadInstanceDescriptors(rax, rcx);
957
+ __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset));
958
+ __ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
959
+
960
+ // Setup the four remaining stack slots.
961
+ __ push(rax); // Map.
962
+ __ push(rdx); // Enumeration cache.
963
+ __ movq(rax, FieldOperand(rdx, FixedArray::kLengthOffset));
964
+ __ push(rax); // Enumeration cache length (as smi).
965
+ __ Push(Smi::FromInt(0)); // Initial index.
966
+ __ jmp(&loop);
967
+
968
+ // We got a fixed array in register rax. Iterate through that.
969
+ __ bind(&fixed_array);
970
+ __ Push(Smi::FromInt(0)); // Map (0) - force slow check.
971
+ __ push(rax);
972
+ __ movq(rax, FieldOperand(rax, FixedArray::kLengthOffset));
973
+ __ push(rax); // Fixed array length (as smi).
974
+ __ Push(Smi::FromInt(0)); // Initial index.
975
+
976
+ // Generate code for doing the condition check.
977
+ __ bind(&loop);
978
+ __ movq(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index.
979
+ __ cmpq(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length.
980
+ __ j(above_equal, loop_statement.break_target());
981
+
982
+ // Get the current entry of the array into register rbx.
983
+ __ movq(rbx, Operand(rsp, 2 * kPointerSize));
984
+ SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
985
+ __ movq(rbx, FieldOperand(rbx,
986
+ index.reg,
987
+ index.scale,
988
+ FixedArray::kHeaderSize));
989
+
990
+ // Get the expected map from the stack or a zero map in the
991
+ // permanent slow case into register rdx.
992
+ __ movq(rdx, Operand(rsp, 3 * kPointerSize));
993
+
994
+ // Check if the expected map still matches that of the enumerable.
995
+ // If not, we have to filter the key.
996
+ Label update_each;
997
+ __ movq(rcx, Operand(rsp, 4 * kPointerSize));
998
+ __ cmpq(rdx, FieldOperand(rcx, HeapObject::kMapOffset));
999
+ __ j(equal, &update_each, Label::kNear);
1000
+
1001
+ // Convert the entry to a string or null if it isn't a property
1002
+ // anymore. If the property has been removed while iterating, we
1003
+ // just skip it.
1004
+ __ push(rcx); // Enumerable.
1005
+ __ push(rbx); // Current entry.
1006
+ __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1007
+ __ Cmp(rax, Smi::FromInt(0));
1008
+ __ j(equal, loop_statement.continue_target());
1009
+ __ movq(rbx, rax);
1010
+
1011
+ // Update the 'each' property or variable from the possibly filtered
1012
+ // entry in register rbx.
1013
+ __ bind(&update_each);
1014
+ __ movq(result_register(), rbx);
1015
+ // Perform the assignment as if via '='.
1016
+ { EffectContext context(this);
1017
+ EmitAssignment(stmt->each(), stmt->AssignmentId());
1018
+ }
1019
+
1020
+ // Generate code for the body of the loop.
1021
+ Visit(stmt->body());
1022
+
1023
+ // Generate code for going to the next element by incrementing the
1024
+ // index (smi) stored on top of the stack.
1025
+ __ bind(loop_statement.continue_target());
1026
+ __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
1027
+
1028
+ EmitStackCheck(stmt);
1029
+ __ jmp(&loop);
1030
+
1031
+ // Remove the pointers stored on the stack.
1032
+ __ bind(loop_statement.break_target());
1033
+ __ addq(rsp, Immediate(5 * kPointerSize));
1034
+
1035
+ // Exit and decrement the loop depth.
1036
+ __ bind(&exit);
1037
+ decrement_loop_depth();
1038
+ }
1039
+
1040
+
1041
+ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1042
+ bool pretenure) {
1043
+ // Use the fast case closure allocation code that allocates in new
1044
+ // space for nested functions that don't need literals cloning. If
1045
+ // we're running with the --always-opt or the --prepare-always-opt
1046
+ // flag, we need to use the runtime function so that the new function
1047
+ // we are creating here gets a chance to have its code optimized and
1048
+ // doesn't just get a copy of the existing unoptimized code.
1049
+ if (!FLAG_always_opt &&
1050
+ !FLAG_prepare_always_opt &&
1051
+ !pretenure &&
1052
+ scope()->is_function_scope() &&
1053
+ info->num_literals() == 0) {
1054
+ FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode);
1055
+ __ Push(info);
1056
+ __ CallStub(&stub);
1057
+ } else {
1058
+ __ push(rsi);
1059
+ __ Push(info);
1060
+ __ Push(pretenure
1061
+ ? isolate()->factory()->true_value()
1062
+ : isolate()->factory()->false_value());
1063
+ __ CallRuntime(Runtime::kNewClosure, 3);
1064
+ }
1065
+ context()->Plug(rax);
1066
+ }
1067
+
1068
+
1069
+ void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1070
+ Comment cmnt(masm_, "[ VariableProxy");
1071
+ EmitVariableLoad(expr->var());
1072
+ }
1073
+
1074
+
1075
+ void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
1076
+ Slot* slot,
1077
+ TypeofState typeof_state,
1078
+ Label* slow) {
1079
+ Register context = rsi;
1080
+ Register temp = rdx;
1081
+
1082
+ Scope* s = scope();
1083
+ while (s != NULL) {
1084
+ if (s->num_heap_slots() > 0) {
1085
+ if (s->calls_eval()) {
1086
+ // Check that extension is NULL.
1087
+ __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
1088
+ Immediate(0));
1089
+ __ j(not_equal, slow);
1090
+ }
1091
+ // Load next context in chain.
1092
+ __ movq(temp, ContextOperand(context, Context::CLOSURE_INDEX));
1093
+ __ movq(temp, FieldOperand(temp, JSFunction::kContextOffset));
1094
+ // Walk the rest of the chain without clobbering rsi.
1095
+ context = temp;
1096
+ }
1097
+ // If no outer scope calls eval, we do not need to check more
1098
+ // context extensions. If we have reached an eval scope, we check
1099
+ // all extensions from this point.
1100
+ if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
1101
+ s = s->outer_scope();
1102
+ }
1103
+
1104
+ if (s != NULL && s->is_eval_scope()) {
1105
+ // Loop up the context chain. There is no frame effect so it is
1106
+ // safe to use raw labels here.
1107
+ Label next, fast;
1108
+ if (!context.is(temp)) {
1109
+ __ movq(temp, context);
1110
+ }
1111
+ // Load map for comparison into register, outside loop.
1112
+ __ LoadRoot(kScratchRegister, Heap::kGlobalContextMapRootIndex);
1113
+ __ bind(&next);
1114
+ // Terminate at global context.
1115
+ __ cmpq(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset));
1116
+ __ j(equal, &fast, Label::kNear);
1117
+ // Check that extension is NULL.
1118
+ __ cmpq(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1119
+ __ j(not_equal, slow);
1120
+ // Load next context in chain.
1121
+ __ movq(temp, ContextOperand(temp, Context::CLOSURE_INDEX));
1122
+ __ movq(temp, FieldOperand(temp, JSFunction::kContextOffset));
1123
+ __ jmp(&next);
1124
+ __ bind(&fast);
1125
+ }
1126
+
1127
+ // All extension objects were empty and it is safe to use a global
1128
+ // load IC call.
1129
+ __ movq(rax, GlobalObjectOperand());
1130
+ __ Move(rcx, slot->var()->name());
1131
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1132
+ RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1133
+ ? RelocInfo::CODE_TARGET
1134
+ : RelocInfo::CODE_TARGET_CONTEXT;
1135
+ EmitCallIC(ic, mode, AstNode::kNoNumber);
1136
+ }
1137
+
1138
+
1139
+ MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
1140
+ Slot* slot,
1141
+ Label* slow) {
1142
+ ASSERT(slot->type() == Slot::CONTEXT);
1143
+ Register context = rsi;
1144
+ Register temp = rbx;
1145
+
1146
+ for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
1147
+ if (s->num_heap_slots() > 0) {
1148
+ if (s->calls_eval()) {
1149
+ // Check that extension is NULL.
1150
+ __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
1151
+ Immediate(0));
1152
+ __ j(not_equal, slow);
1153
+ }
1154
+ __ movq(temp, ContextOperand(context, Context::CLOSURE_INDEX));
1155
+ __ movq(temp, FieldOperand(temp, JSFunction::kContextOffset));
1156
+ // Walk the rest of the chain without clobbering rsi.
1157
+ context = temp;
1158
+ }
1159
+ }
1160
+ // Check that last extension is NULL.
1161
+ __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1162
+ __ j(not_equal, slow);
1163
+
1164
+ // This function is used only for loads, not stores, so it's safe to
1165
+ // return an rsi-based operand (the write barrier cannot be allowed to
1166
+ // destroy the rsi register).
1167
+ return ContextOperand(context, slot->index());
1168
+ }
1169
+
1170
+
1171
+ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
1172
+ Slot* slot,
1173
+ TypeofState typeof_state,
1174
+ Label* slow,
1175
+ Label* done) {
1176
+ // Generate fast-case code for variables that might be shadowed by
1177
+ // eval-introduced variables. Eval is used a lot without
1178
+ // introducing variables. In those cases, we do not want to
1179
+ // perform a runtime call for all variables in the scope
1180
+ // containing the eval.
1181
+ if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
1182
+ EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow);
1183
+ __ jmp(done);
1184
+ } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
1185
+ Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
1186
+ Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
1187
+ if (potential_slot != NULL) {
1188
+ // Generate fast case for locals that rewrite to slots.
1189
+ __ movq(rax,
1190
+ ContextSlotOperandCheckExtensions(potential_slot, slow));
1191
+ if (potential_slot->var()->mode() == Variable::CONST) {
1192
+ __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1193
+ __ j(not_equal, done);
1194
+ __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1195
+ }
1196
+ __ jmp(done);
1197
+ } else if (rewrite != NULL) {
1198
+ // Generate fast case for calls of an argument function.
1199
+ Property* property = rewrite->AsProperty();
1200
+ if (property != NULL) {
1201
+ VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1202
+ Literal* key_literal = property->key()->AsLiteral();
1203
+ if (obj_proxy != NULL &&
1204
+ key_literal != NULL &&
1205
+ obj_proxy->IsArguments() &&
1206
+ key_literal->handle()->IsSmi()) {
1207
+ // Load arguments object if there are no eval-introduced
1208
+ // variables. Then load the argument from the arguments
1209
+ // object using keyed load.
1210
+ __ movq(rdx,
1211
+ ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
1212
+ slow));
1213
+ __ Move(rax, key_literal->handle());
1214
+ Handle<Code> ic =
1215
+ isolate()->builtins()->KeyedLoadIC_Initialize();
1216
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
1217
+ __ jmp(done);
1218
+ }
1219
+ }
1220
+ }
1221
+ }
1222
+ }
1223
+
1224
+
1225
+ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
1226
+ // Four cases: non-this global variables, lookup slots, all other
1227
+ // types of slots, and parameters that rewrite to explicit property
1228
+ // accesses on the arguments object.
1229
+ Slot* slot = var->AsSlot();
1230
+ Property* property = var->AsProperty();
1231
+
1232
+ if (var->is_global() && !var->is_this()) {
1233
+ Comment cmnt(masm_, "Global variable");
1234
+ // Use inline caching. Variable name is passed in rcx and the global
1235
+ // object on the stack.
1236
+ __ Move(rcx, var->name());
1237
+ __ movq(rax, GlobalObjectOperand());
1238
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1239
+ EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
1240
+ context()->Plug(rax);
1241
+
1242
+ } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
1243
+ Label done, slow;
1244
+
1245
+ // Generate code for loading from variables potentially shadowed
1246
+ // by eval-introduced variables.
1247
+ EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done);
1248
+
1249
+ __ bind(&slow);
1250
+ Comment cmnt(masm_, "Lookup slot");
1251
+ __ push(rsi); // Context.
1252
+ __ Push(var->name());
1253
+ __ CallRuntime(Runtime::kLoadContextSlot, 2);
1254
+ __ bind(&done);
1255
+
1256
+ context()->Plug(rax);
1257
+
1258
+ } else if (slot != NULL) {
1259
+ Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
1260
+ ? "Context slot"
1261
+ : "Stack slot");
1262
+ if (var->mode() == Variable::CONST) {
1263
+ // Constants may be the hole value if they have not been initialized.
1264
+ // Unhole them.
1265
+ Label done;
1266
+ MemOperand slot_operand = EmitSlotSearch(slot, rax);
1267
+ __ movq(rax, slot_operand);
1268
+ __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1269
+ __ j(not_equal, &done, Label::kNear);
1270
+ __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1271
+ __ bind(&done);
1272
+ context()->Plug(rax);
1273
+ } else {
1274
+ context()->Plug(slot);
1275
+ }
1276
+
1277
+ } else {
1278
+ Comment cmnt(masm_, "Rewritten parameter");
1279
+ ASSERT_NOT_NULL(property);
1280
+ // Rewritten parameter accesses are of the form "slot[literal]".
1281
+
1282
+ // Assert that the object is in a slot.
1283
+ Variable* object_var = property->obj()->AsVariableProxy()->AsVariable();
1284
+ ASSERT_NOT_NULL(object_var);
1285
+ Slot* object_slot = object_var->AsSlot();
1286
+ ASSERT_NOT_NULL(object_slot);
1287
+
1288
+ // Load the object.
1289
+ MemOperand object_loc = EmitSlotSearch(object_slot, rax);
1290
+ __ movq(rdx, object_loc);
1291
+
1292
+ // Assert that the key is a smi.
1293
+ Literal* key_literal = property->key()->AsLiteral();
1294
+ ASSERT_NOT_NULL(key_literal);
1295
+ ASSERT(key_literal->handle()->IsSmi());
1296
+
1297
+ // Load the key.
1298
+ __ Move(rax, key_literal->handle());
1299
+
1300
+ // Do a keyed property load.
1301
+ Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1302
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
1303
+ context()->Plug(rax);
1304
+ }
1305
+ }
1306
+
1307
+
1308
+ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1309
+ Comment cmnt(masm_, "[ RegExpLiteral");
1310
+ Label materialized;
1311
+ // Registers will be used as follows:
1312
+ // rdi = JS function.
1313
+ // rcx = literals array.
1314
+ // rbx = regexp literal.
1315
+ // rax = regexp literal clone.
1316
+ __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1317
+ __ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1318
+ int literal_offset =
1319
+ FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1320
+ __ movq(rbx, FieldOperand(rcx, literal_offset));
1321
+ __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1322
+ __ j(not_equal, &materialized);
1323
+
1324
+ // Create regexp literal using runtime function
1325
+ // Result will be in rax.
1326
+ __ push(rcx);
1327
+ __ Push(Smi::FromInt(expr->literal_index()));
1328
+ __ Push(expr->pattern());
1329
+ __ Push(expr->flags());
1330
+ __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1331
+ __ movq(rbx, rax);
1332
+
1333
+ __ bind(&materialized);
1334
+ int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1335
+ Label allocated, runtime_allocate;
1336
+ __ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
1337
+ __ jmp(&allocated);
1338
+
1339
+ __ bind(&runtime_allocate);
1340
+ __ push(rbx);
1341
+ __ Push(Smi::FromInt(size));
1342
+ __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1343
+ __ pop(rbx);
1344
+
1345
+ __ bind(&allocated);
1346
+ // Copy the content into the newly allocated memory.
1347
+ // (Unroll copy loop once for better throughput).
1348
+ for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1349
+ __ movq(rdx, FieldOperand(rbx, i));
1350
+ __ movq(rcx, FieldOperand(rbx, i + kPointerSize));
1351
+ __ movq(FieldOperand(rax, i), rdx);
1352
+ __ movq(FieldOperand(rax, i + kPointerSize), rcx);
1353
+ }
1354
+ if ((size % (2 * kPointerSize)) != 0) {
1355
+ __ movq(rdx, FieldOperand(rbx, size - kPointerSize));
1356
+ __ movq(FieldOperand(rax, size - kPointerSize), rdx);
1357
+ }
1358
+ context()->Plug(rax);
1359
+ }
1360
+
1361
+
1362
+ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1363
+ Comment cmnt(masm_, "[ ObjectLiteral");
1364
+ __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1365
+ __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
1366
+ __ Push(Smi::FromInt(expr->literal_index()));
1367
+ __ Push(expr->constant_properties());
1368
+ int flags = expr->fast_elements()
1369
+ ? ObjectLiteral::kFastElements
1370
+ : ObjectLiteral::kNoFlags;
1371
+ flags |= expr->has_function()
1372
+ ? ObjectLiteral::kHasFunction
1373
+ : ObjectLiteral::kNoFlags;
1374
+ __ Push(Smi::FromInt(flags));
1375
+ if (expr->depth() > 1) {
1376
+ __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1377
+ } else {
1378
+ __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1379
+ }
1380
+
1381
+ // If result_saved is true the result is on top of the stack. If
1382
+ // result_saved is false the result is in rax.
1383
+ bool result_saved = false;
1384
+
1385
+ // Mark all computed expressions that are bound to a key that
1386
+ // is shadowed by a later occurrence of the same key. For the
1387
+ // marked expressions, no store code is emitted.
1388
+ expr->CalculateEmitStore();
1389
+
1390
+ for (int i = 0; i < expr->properties()->length(); i++) {
1391
+ ObjectLiteral::Property* property = expr->properties()->at(i);
1392
+ if (property->IsCompileTimeValue()) continue;
1393
+
1394
+ Literal* key = property->key();
1395
+ Expression* value = property->value();
1396
+ if (!result_saved) {
1397
+ __ push(rax); // Save result on the stack
1398
+ result_saved = true;
1399
+ }
1400
+ switch (property->kind()) {
1401
+ case ObjectLiteral::Property::CONSTANT:
1402
+ UNREACHABLE();
1403
+ case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1404
+ ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
1405
+ // Fall through.
1406
+ case ObjectLiteral::Property::COMPUTED:
1407
+ if (key->handle()->IsSymbol()) {
1408
+ if (property->emit_store()) {
1409
+ VisitForAccumulatorValue(value);
1410
+ __ Move(rcx, key->handle());
1411
+ __ movq(rdx, Operand(rsp, 0));
1412
+ Handle<Code> ic = is_strict_mode()
1413
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
1414
+ : isolate()->builtins()->StoreIC_Initialize();
1415
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, key->id());
1416
+ PrepareForBailoutForId(key->id(), NO_REGISTERS);
1417
+ } else {
1418
+ VisitForEffect(value);
1419
+ }
1420
+ break;
1421
+ }
1422
+ // Fall through.
1423
+ case ObjectLiteral::Property::PROTOTYPE:
1424
+ __ push(Operand(rsp, 0)); // Duplicate receiver.
1425
+ VisitForStackValue(key);
1426
+ VisitForStackValue(value);
1427
+ if (property->emit_store()) {
1428
+ __ Push(Smi::FromInt(NONE)); // PropertyAttributes
1429
+ __ CallRuntime(Runtime::kSetProperty, 4);
1430
+ } else {
1431
+ __ Drop(3);
1432
+ }
1433
+ break;
1434
+ case ObjectLiteral::Property::SETTER:
1435
+ case ObjectLiteral::Property::GETTER:
1436
+ __ push(Operand(rsp, 0)); // Duplicate receiver.
1437
+ VisitForStackValue(key);
1438
+ __ Push(property->kind() == ObjectLiteral::Property::SETTER ?
1439
+ Smi::FromInt(1) :
1440
+ Smi::FromInt(0));
1441
+ VisitForStackValue(value);
1442
+ __ CallRuntime(Runtime::kDefineAccessor, 4);
1443
+ break;
1444
+ }
1445
+ }
1446
+
1447
+ if (expr->has_function()) {
1448
+ ASSERT(result_saved);
1449
+ __ push(Operand(rsp, 0));
1450
+ __ CallRuntime(Runtime::kToFastProperties, 1);
1451
+ }
1452
+
1453
+ if (result_saved) {
1454
+ context()->PlugTOS();
1455
+ } else {
1456
+ context()->Plug(rax);
1457
+ }
1458
+ }
1459
+
1460
+
1461
+ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1462
+ Comment cmnt(masm_, "[ ArrayLiteral");
1463
+
1464
+ ZoneList<Expression*>* subexprs = expr->values();
1465
+ int length = subexprs->length();
1466
+
1467
+ __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1468
+ __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
1469
+ __ Push(Smi::FromInt(expr->literal_index()));
1470
+ __ Push(expr->constant_elements());
1471
+ if (expr->constant_elements()->map() ==
1472
+ isolate()->heap()->fixed_cow_array_map()) {
1473
+ FastCloneShallowArrayStub stub(
1474
+ FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
1475
+ __ CallStub(&stub);
1476
+ __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1477
+ } else if (expr->depth() > 1) {
1478
+ __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1479
+ } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1480
+ __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1481
+ } else {
1482
+ FastCloneShallowArrayStub stub(
1483
+ FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
1484
+ __ CallStub(&stub);
1485
+ }
1486
+
1487
+ bool result_saved = false; // Is the result saved to the stack?
1488
+
1489
+ // Emit code to evaluate all the non-constant subexpressions and to store
1490
+ // them into the newly cloned array.
1491
+ for (int i = 0; i < length; i++) {
1492
+ Expression* subexpr = subexprs->at(i);
1493
+ // If the subexpression is a literal or a simple materialized literal it
1494
+ // is already set in the cloned array.
1495
+ if (subexpr->AsLiteral() != NULL ||
1496
+ CompileTimeValue::IsCompileTimeValue(subexpr)) {
1497
+ continue;
1498
+ }
1499
+
1500
+ if (!result_saved) {
1501
+ __ push(rax);
1502
+ result_saved = true;
1503
+ }
1504
+ VisitForAccumulatorValue(subexpr);
1505
+
1506
+ // Store the subexpression value in the array's elements.
1507
+ __ movq(rbx, Operand(rsp, 0)); // Copy of array literal.
1508
+ __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
1509
+ int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1510
+ __ movq(FieldOperand(rbx, offset), result_register());
1511
+
1512
+ // Update the write barrier for the array store.
1513
+ __ RecordWrite(rbx, offset, result_register(), rcx);
1514
+
1515
+ PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1516
+ }
1517
+
1518
+ if (result_saved) {
1519
+ context()->PlugTOS();
1520
+ } else {
1521
+ context()->Plug(rax);
1522
+ }
1523
+ }
1524
+
1525
+
1526
+ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1527
+ Comment cmnt(masm_, "[ Assignment");
1528
+ // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1529
+ // on the left-hand side.
1530
+ if (!expr->target()->IsValidLeftHandSide()) {
1531
+ VisitForEffect(expr->target());
1532
+ return;
1533
+ }
1534
+
1535
+ // Left-hand side can only be a property, a global or a (parameter or local)
1536
+ // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
1537
+ enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1538
+ LhsKind assign_type = VARIABLE;
1539
+ Property* property = expr->target()->AsProperty();
1540
+ if (property != NULL) {
1541
+ assign_type = (property->key()->IsPropertyName())
1542
+ ? NAMED_PROPERTY
1543
+ : KEYED_PROPERTY;
1544
+ }
1545
+
1546
+ // Evaluate LHS expression.
1547
+ switch (assign_type) {
1548
+ case VARIABLE:
1549
+ // Nothing to do here.
1550
+ break;
1551
+ case NAMED_PROPERTY:
1552
+ if (expr->is_compound()) {
1553
+ // We need the receiver both on the stack and in the accumulator.
1554
+ VisitForAccumulatorValue(property->obj());
1555
+ __ push(result_register());
1556
+ } else {
1557
+ VisitForStackValue(property->obj());
1558
+ }
1559
+ break;
1560
+ case KEYED_PROPERTY: {
1561
+ if (expr->is_compound()) {
1562
+ if (property->is_arguments_access()) {
1563
+ VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1564
+ MemOperand slot_operand =
1565
+ EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx);
1566
+ __ push(slot_operand);
1567
+ __ Move(rax, property->key()->AsLiteral()->handle());
1568
+ } else {
1569
+ VisitForStackValue(property->obj());
1570
+ VisitForAccumulatorValue(property->key());
1571
+ }
1572
+ __ movq(rdx, Operand(rsp, 0));
1573
+ __ push(rax);
1574
+ } else {
1575
+ if (property->is_arguments_access()) {
1576
+ VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1577
+ MemOperand slot_operand =
1578
+ EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx);
1579
+ __ push(slot_operand);
1580
+ __ Push(property->key()->AsLiteral()->handle());
1581
+ } else {
1582
+ VisitForStackValue(property->obj());
1583
+ VisitForStackValue(property->key());
1584
+ }
1585
+ }
1586
+ break;
1587
+ }
1588
+ }
1589
+
1590
+ // For compound assignments we need another deoptimization point after the
1591
+ // variable/property load.
1592
+ if (expr->is_compound()) {
1593
+ { AccumulatorValueContext context(this);
1594
+ switch (assign_type) {
1595
+ case VARIABLE:
1596
+ EmitVariableLoad(expr->target()->AsVariableProxy()->var());
1597
+ PrepareForBailout(expr->target(), TOS_REG);
1598
+ break;
1599
+ case NAMED_PROPERTY:
1600
+ EmitNamedPropertyLoad(property);
1601
+ PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1602
+ break;
1603
+ case KEYED_PROPERTY:
1604
+ EmitKeyedPropertyLoad(property);
1605
+ PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1606
+ break;
1607
+ }
1608
+ }
1609
+
1610
+ Token::Value op = expr->binary_op();
1611
+ __ push(rax); // Left operand goes on the stack.
1612
+ VisitForAccumulatorValue(expr->value());
1613
+
1614
+ OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1615
+ ? OVERWRITE_RIGHT
1616
+ : NO_OVERWRITE;
1617
+ SetSourcePosition(expr->position() + 1);
1618
+ AccumulatorValueContext context(this);
1619
+ if (ShouldInlineSmiCase(op)) {
1620
+ EmitInlineSmiBinaryOp(expr->binary_operation(),
1621
+ op,
1622
+ mode,
1623
+ expr->target(),
1624
+ expr->value());
1625
+ } else {
1626
+ EmitBinaryOp(expr->binary_operation(), op, mode);
1627
+ }
1628
+ // Deoptimization point in case the binary operation may have side effects.
1629
+ PrepareForBailout(expr->binary_operation(), TOS_REG);
1630
+ } else {
1631
+ VisitForAccumulatorValue(expr->value());
1632
+ }
1633
+
1634
+ // Record source position before possible IC call.
1635
+ SetSourcePosition(expr->position());
1636
+
1637
+ // Store the value.
1638
+ switch (assign_type) {
1639
+ case VARIABLE:
1640
+ EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1641
+ expr->op());
1642
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1643
+ context()->Plug(rax);
1644
+ break;
1645
+ case NAMED_PROPERTY:
1646
+ EmitNamedPropertyAssignment(expr);
1647
+ break;
1648
+ case KEYED_PROPERTY:
1649
+ EmitKeyedPropertyAssignment(expr);
1650
+ break;
1651
+ }
1652
+ }
1653
+
1654
+
1655
+ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1656
+ SetSourcePosition(prop->position());
1657
+ Literal* key = prop->key()->AsLiteral();
1658
+ __ Move(rcx, key->handle());
1659
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1660
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
1661
+ }
1662
+
1663
+
1664
+ void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1665
+ SetSourcePosition(prop->position());
1666
+ Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1667
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
1668
+ }
1669
+
1670
+
1671
+ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1672
+ Token::Value op,
1673
+ OverwriteMode mode,
1674
+ Expression* left,
1675
+ Expression* right) {
1676
+ // Do combined smi check of the operands. Left operand is on the
1677
+ // stack (popped into rdx). Right operand is in rax but moved into
1678
+ // rcx to make the shifts easier.
1679
+ Label done, stub_call, smi_case;
1680
+ __ pop(rdx);
1681
+ __ movq(rcx, rax);
1682
+ __ or_(rax, rdx);
1683
+ JumpPatchSite patch_site(masm_);
1684
+ patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
1685
+
1686
+ __ bind(&stub_call);
1687
+ __ movq(rax, rcx);
1688
+ BinaryOpStub stub(op, mode);
1689
+ EmitCallIC(stub.GetCode(), &patch_site, expr->id());
1690
+ __ jmp(&done, Label::kNear);
1691
+
1692
+ __ bind(&smi_case);
1693
+ switch (op) {
1694
+ case Token::SAR:
1695
+ __ SmiShiftArithmeticRight(rax, rdx, rcx);
1696
+ break;
1697
+ case Token::SHL:
1698
+ __ SmiShiftLeft(rax, rdx, rcx);
1699
+ break;
1700
+ case Token::SHR:
1701
+ __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
1702
+ break;
1703
+ case Token::ADD:
1704
+ __ SmiAdd(rax, rdx, rcx, &stub_call);
1705
+ break;
1706
+ case Token::SUB:
1707
+ __ SmiSub(rax, rdx, rcx, &stub_call);
1708
+ break;
1709
+ case Token::MUL:
1710
+ __ SmiMul(rax, rdx, rcx, &stub_call);
1711
+ break;
1712
+ case Token::BIT_OR:
1713
+ __ SmiOr(rax, rdx, rcx);
1714
+ break;
1715
+ case Token::BIT_AND:
1716
+ __ SmiAnd(rax, rdx, rcx);
1717
+ break;
1718
+ case Token::BIT_XOR:
1719
+ __ SmiXor(rax, rdx, rcx);
1720
+ break;
1721
+ default:
1722
+ UNREACHABLE();
1723
+ break;
1724
+ }
1725
+
1726
+ __ bind(&done);
1727
+ context()->Plug(rax);
1728
+ }
1729
+
1730
+
1731
+ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
1732
+ Token::Value op,
1733
+ OverwriteMode mode) {
1734
+ __ pop(rdx);
1735
+ BinaryOpStub stub(op, mode);
1736
+ // NULL signals no inlined smi code.
1737
+ EmitCallIC(stub.GetCode(), NULL, expr->id());
1738
+ context()->Plug(rax);
1739
+ }
1740
+
1741
+
1742
+ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
1743
+ // Invalid left-hand sides are rewritten to have a 'throw
1744
+ // ReferenceError' on the left-hand side.
1745
+ if (!expr->IsValidLeftHandSide()) {
1746
+ VisitForEffect(expr);
1747
+ return;
1748
+ }
1749
+
1750
+ // Left-hand side can only be a property, a global or a (parameter or local)
1751
+ // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
1752
+ enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1753
+ LhsKind assign_type = VARIABLE;
1754
+ Property* prop = expr->AsProperty();
1755
+ if (prop != NULL) {
1756
+ assign_type = (prop->key()->IsPropertyName())
1757
+ ? NAMED_PROPERTY
1758
+ : KEYED_PROPERTY;
1759
+ }
1760
+
1761
+ switch (assign_type) {
1762
+ case VARIABLE: {
1763
+ Variable* var = expr->AsVariableProxy()->var();
1764
+ EffectContext context(this);
1765
+ EmitVariableAssignment(var, Token::ASSIGN);
1766
+ break;
1767
+ }
1768
+ case NAMED_PROPERTY: {
1769
+ __ push(rax); // Preserve value.
1770
+ VisitForAccumulatorValue(prop->obj());
1771
+ __ movq(rdx, rax);
1772
+ __ pop(rax); // Restore value.
1773
+ __ Move(rcx, prop->key()->AsLiteral()->handle());
1774
+ Handle<Code> ic = is_strict_mode()
1775
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
1776
+ : isolate()->builtins()->StoreIC_Initialize();
1777
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
1778
+ break;
1779
+ }
1780
+ case KEYED_PROPERTY: {
1781
+ __ push(rax); // Preserve value.
1782
+ if (prop->is_synthetic()) {
1783
+ ASSERT(prop->obj()->AsVariableProxy() != NULL);
1784
+ ASSERT(prop->key()->AsLiteral() != NULL);
1785
+ { AccumulatorValueContext for_object(this);
1786
+ EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
1787
+ }
1788
+ __ movq(rdx, rax);
1789
+ __ Move(rcx, prop->key()->AsLiteral()->handle());
1790
+ } else {
1791
+ VisitForStackValue(prop->obj());
1792
+ VisitForAccumulatorValue(prop->key());
1793
+ __ movq(rcx, rax);
1794
+ __ pop(rdx);
1795
+ }
1796
+ __ pop(rax); // Restore value.
1797
+ Handle<Code> ic = is_strict_mode()
1798
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
1799
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
1800
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
1801
+ break;
1802
+ }
1803
+ }
1804
+ PrepareForBailoutForId(bailout_ast_id, TOS_REG);
1805
+ context()->Plug(rax);
1806
+ }
1807
+
1808
+
1809
+ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1810
+ Token::Value op) {
1811
+ // Left-hand sides that rewrite to explicit property accesses do not reach
1812
+ // here.
1813
+ ASSERT(var != NULL);
1814
+ ASSERT(var->is_global() || var->AsSlot() != NULL);
1815
+
1816
+ if (var->is_global()) {
1817
+ ASSERT(!var->is_this());
1818
+ // Assignment to a global variable. Use inline caching for the
1819
+ // assignment. Right-hand-side value is passed in rax, variable name in
1820
+ // rcx, and the global object on the stack.
1821
+ __ Move(rcx, var->name());
1822
+ __ movq(rdx, GlobalObjectOperand());
1823
+ Handle<Code> ic = is_strict_mode()
1824
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
1825
+ : isolate()->builtins()->StoreIC_Initialize();
1826
+ EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
1827
+
1828
+ } else if (op == Token::INIT_CONST) {
1829
+ // Like var declarations, const declarations are hoisted to function
1830
+ // scope. However, unlike var initializers, const initializers are able
1831
+ // to drill a hole to that function context, even from inside a 'with'
1832
+ // context. We thus bypass the normal static scope lookup.
1833
+ Slot* slot = var->AsSlot();
1834
+ Label skip;
1835
+ switch (slot->type()) {
1836
+ case Slot::PARAMETER:
1837
+ // No const parameters.
1838
+ UNREACHABLE();
1839
+ break;
1840
+ case Slot::LOCAL:
1841
+ __ movq(rdx, Operand(rbp, SlotOffset(slot)));
1842
+ __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1843
+ __ j(not_equal, &skip);
1844
+ __ movq(Operand(rbp, SlotOffset(slot)), rax);
1845
+ break;
1846
+ case Slot::CONTEXT: {
1847
+ __ movq(rcx, ContextOperand(rsi, Context::FCONTEXT_INDEX));
1848
+ __ movq(rdx, ContextOperand(rcx, slot->index()));
1849
+ __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1850
+ __ j(not_equal, &skip);
1851
+ __ movq(ContextOperand(rcx, slot->index()), rax);
1852
+ int offset = Context::SlotOffset(slot->index());
1853
+ __ movq(rdx, rax); // Preserve the stored value in eax.
1854
+ __ RecordWrite(rcx, offset, rdx, rbx);
1855
+ break;
1856
+ }
1857
+ case Slot::LOOKUP:
1858
+ __ push(rax);
1859
+ __ push(rsi);
1860
+ __ Push(var->name());
1861
+ __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
1862
+ break;
1863
+ }
1864
+ __ bind(&skip);
1865
+
1866
+ } else if (var->mode() != Variable::CONST) {
1867
+ // Perform the assignment for non-const variables. Const assignments
1868
+ // are simply skipped.
1869
+ Slot* slot = var->AsSlot();
1870
+ switch (slot->type()) {
1871
+ case Slot::PARAMETER:
1872
+ case Slot::LOCAL:
1873
+ // Perform the assignment.
1874
+ __ movq(Operand(rbp, SlotOffset(slot)), rax);
1875
+ break;
1876
+
1877
+ case Slot::CONTEXT: {
1878
+ MemOperand target = EmitSlotSearch(slot, rcx);
1879
+ // Perform the assignment and issue the write barrier.
1880
+ __ movq(target, rax);
1881
+ // The value of the assignment is in rax. RecordWrite clobbers its
1882
+ // register arguments.
1883
+ __ movq(rdx, rax);
1884
+ int offset = Context::SlotOffset(slot->index());
1885
+ __ RecordWrite(rcx, offset, rdx, rbx);
1886
+ break;
1887
+ }
1888
+
1889
+ case Slot::LOOKUP:
1890
+ // Call the runtime for the assignment.
1891
+ __ push(rax); // Value.
1892
+ __ push(rsi); // Context.
1893
+ __ Push(var->name());
1894
+ __ Push(Smi::FromInt(strict_mode_flag()));
1895
+ __ CallRuntime(Runtime::kStoreContextSlot, 4);
1896
+ break;
1897
+ }
1898
+ }
1899
+ }
1900
+
1901
+
1902
+ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
1903
+ // Assignment to a property, using a named store IC.
1904
+ Property* prop = expr->target()->AsProperty();
1905
+ ASSERT(prop != NULL);
1906
+ ASSERT(prop->key()->AsLiteral() != NULL);
1907
+
1908
+ // If the assignment starts a block of assignments to the same object,
1909
+ // change to slow case to avoid the quadratic behavior of repeatedly
1910
+ // adding fast properties.
1911
+ if (expr->starts_initialization_block()) {
1912
+ __ push(result_register());
1913
+ __ push(Operand(rsp, kPointerSize)); // Receiver is now under value.
1914
+ __ CallRuntime(Runtime::kToSlowProperties, 1);
1915
+ __ pop(result_register());
1916
+ }
1917
+
1918
+ // Record source code position before IC call.
1919
+ SetSourcePosition(expr->position());
1920
+ __ Move(rcx, prop->key()->AsLiteral()->handle());
1921
+ if (expr->ends_initialization_block()) {
1922
+ __ movq(rdx, Operand(rsp, 0));
1923
+ } else {
1924
+ __ pop(rdx);
1925
+ }
1926
+ Handle<Code> ic = is_strict_mode()
1927
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
1928
+ : isolate()->builtins()->StoreIC_Initialize();
1929
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
1930
+
1931
+ // If the assignment ends an initialization block, revert to fast case.
1932
+ if (expr->ends_initialization_block()) {
1933
+ __ push(rax); // Result of assignment, saved even if not needed.
1934
+ __ push(Operand(rsp, kPointerSize)); // Receiver is under value.
1935
+ __ CallRuntime(Runtime::kToFastProperties, 1);
1936
+ __ pop(rax);
1937
+ __ Drop(1);
1938
+ }
1939
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1940
+ context()->Plug(rax);
1941
+ }
1942
+
1943
+
1944
+ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
1945
+ // Assignment to a property, using a keyed store IC.
1946
+
1947
+ // If the assignment starts a block of assignments to the same object,
1948
+ // change to slow case to avoid the quadratic behavior of repeatedly
1949
+ // adding fast properties.
1950
+ if (expr->starts_initialization_block()) {
1951
+ __ push(result_register());
1952
+ // Receiver is now under the key and value.
1953
+ __ push(Operand(rsp, 2 * kPointerSize));
1954
+ __ CallRuntime(Runtime::kToSlowProperties, 1);
1955
+ __ pop(result_register());
1956
+ }
1957
+
1958
+ __ pop(rcx);
1959
+ if (expr->ends_initialization_block()) {
1960
+ __ movq(rdx, Operand(rsp, 0)); // Leave receiver on the stack for later.
1961
+ } else {
1962
+ __ pop(rdx);
1963
+ }
1964
+ // Record source code position before IC call.
1965
+ SetSourcePosition(expr->position());
1966
+ Handle<Code> ic = is_strict_mode()
1967
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
1968
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
1969
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
1970
+
1971
+ // If the assignment ends an initialization block, revert to fast case.
1972
+ if (expr->ends_initialization_block()) {
1973
+ __ pop(rdx);
1974
+ __ push(rax); // Result of assignment, saved even if not needed.
1975
+ __ push(rdx);
1976
+ __ CallRuntime(Runtime::kToFastProperties, 1);
1977
+ __ pop(rax);
1978
+ }
1979
+
1980
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1981
+ context()->Plug(rax);
1982
+ }
1983
+
1984
+
1985
+ void FullCodeGenerator::VisitProperty(Property* expr) {
1986
+ Comment cmnt(masm_, "[ Property");
1987
+ Expression* key = expr->key();
1988
+
1989
+ if (key->IsPropertyName()) {
1990
+ VisitForAccumulatorValue(expr->obj());
1991
+ EmitNamedPropertyLoad(expr);
1992
+ context()->Plug(rax);
1993
+ } else {
1994
+ VisitForStackValue(expr->obj());
1995
+ VisitForAccumulatorValue(expr->key());
1996
+ __ pop(rdx);
1997
+ EmitKeyedPropertyLoad(expr);
1998
+ context()->Plug(rax);
1999
+ }
2000
+ }
2001
+
2002
+
2003
+ void FullCodeGenerator::EmitCallWithIC(Call* expr,
2004
+ Handle<Object> name,
2005
+ RelocInfo::Mode mode) {
2006
+ // Code common for calls using the IC.
2007
+ ZoneList<Expression*>* args = expr->arguments();
2008
+ int arg_count = args->length();
2009
+ { PreservePositionScope scope(masm()->positions_recorder());
2010
+ for (int i = 0; i < arg_count; i++) {
2011
+ VisitForStackValue(args->at(i));
2012
+ }
2013
+ __ Move(rcx, name);
2014
+ }
2015
+ // Record source position for debugger.
2016
+ SetSourcePosition(expr->position());
2017
+ // Call the IC initialization code.
2018
+ InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2019
+ Handle<Code> ic =
2020
+ ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop, mode);
2021
+ EmitCallIC(ic, mode, expr->id());
2022
+ RecordJSReturnSite(expr);
2023
+ // Restore context register.
2024
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2025
+ context()->Plug(rax);
2026
+ }
2027
+
2028
+
2029
+ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2030
+ Expression* key) {
2031
+ // Load the key.
2032
+ VisitForAccumulatorValue(key);
2033
+
2034
+ // Swap the name of the function and the receiver on the stack to follow
2035
+ // the calling convention for call ICs.
2036
+ __ pop(rcx);
2037
+ __ push(rax);
2038
+ __ push(rcx);
2039
+
2040
+ // Load the arguments.
2041
+ ZoneList<Expression*>* args = expr->arguments();
2042
+ int arg_count = args->length();
2043
+ { PreservePositionScope scope(masm()->positions_recorder());
2044
+ for (int i = 0; i < arg_count; i++) {
2045
+ VisitForStackValue(args->at(i));
2046
+ }
2047
+ }
2048
+ // Record source position for debugger.
2049
+ SetSourcePosition(expr->position());
2050
+ // Call the IC initialization code.
2051
+ InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2052
+ Handle<Code> ic =
2053
+ ISOLATE->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
2054
+ __ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize)); // Key.
2055
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2056
+ RecordJSReturnSite(expr);
2057
+ // Restore context register.
2058
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2059
+ context()->DropAndPlug(1, rax); // Drop the key still on the stack.
2060
+ }
2061
+
2062
+
2063
+ void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
2064
+ // Code common for calls using the call stub.
2065
+ ZoneList<Expression*>* args = expr->arguments();
2066
+ int arg_count = args->length();
2067
+ { PreservePositionScope scope(masm()->positions_recorder());
2068
+ for (int i = 0; i < arg_count; i++) {
2069
+ VisitForStackValue(args->at(i));
2070
+ }
2071
+ }
2072
+ // Record source position for debugger.
2073
+ SetSourcePosition(expr->position());
2074
+ InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2075
+ CallFunctionStub stub(arg_count, in_loop, flags);
2076
+ __ CallStub(&stub);
2077
+ RecordJSReturnSite(expr);
2078
+ // Restore context register.
2079
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2080
+ // Discard the function left on TOS.
2081
+ context()->DropAndPlug(1, rax);
2082
+ }
2083
+
2084
+
2085
+ void FullCodeGenerator::EmitResolvePossiblyDirectEval(ResolveEvalFlag flag,
2086
+ int arg_count) {
2087
+ // Push copy of the first argument or undefined if it doesn't exist.
2088
+ if (arg_count > 0) {
2089
+ __ push(Operand(rsp, arg_count * kPointerSize));
2090
+ } else {
2091
+ __ PushRoot(Heap::kUndefinedValueRootIndex);
2092
+ }
2093
+
2094
+ // Push the receiver of the enclosing function and do runtime call.
2095
+ __ push(Operand(rbp, (2 + scope()->num_parameters()) * kPointerSize));
2096
+
2097
+ // Push the strict mode flag.
2098
+ __ Push(Smi::FromInt(strict_mode_flag()));
2099
+
2100
+ __ CallRuntime(flag == SKIP_CONTEXT_LOOKUP
2101
+ ? Runtime::kResolvePossiblyDirectEvalNoLookup
2102
+ : Runtime::kResolvePossiblyDirectEval, 4);
2103
+ }
2104
+
2105
+
2106
+ void FullCodeGenerator::VisitCall(Call* expr) {
2107
+ #ifdef DEBUG
2108
+ // We want to verify that RecordJSReturnSite gets called on all paths
2109
+ // through this function. Avoid early returns.
2110
+ expr->return_is_recorded_ = false;
2111
+ #endif
2112
+
2113
+ Comment cmnt(masm_, "[ Call");
2114
+ Expression* fun = expr->expression();
2115
+ Variable* var = fun->AsVariableProxy()->AsVariable();
2116
+
2117
+ if (var != NULL && var->is_possibly_eval()) {
2118
+ // In a call to eval, we first call %ResolvePossiblyDirectEval to
2119
+ // resolve the function we need to call and the receiver of the
2120
+ // call. Then we call the resolved function using the given
2121
+ // arguments.
2122
+ ZoneList<Expression*>* args = expr->arguments();
2123
+ int arg_count = args->length();
2124
+ { PreservePositionScope pos_scope(masm()->positions_recorder());
2125
+ VisitForStackValue(fun);
2126
+ __ PushRoot(Heap::kUndefinedValueRootIndex); // Reserved receiver slot.
2127
+
2128
+ // Push the arguments.
2129
+ for (int i = 0; i < arg_count; i++) {
2130
+ VisitForStackValue(args->at(i));
2131
+ }
2132
+
2133
+ // If we know that eval can only be shadowed by eval-introduced
2134
+ // variables we attempt to load the global eval function directly
2135
+ // in generated code. If we succeed, there is no need to perform a
2136
+ // context lookup in the runtime system.
2137
+ Label done;
2138
+ if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
2139
+ Label slow;
2140
+ EmitLoadGlobalSlotCheckExtensions(var->AsSlot(),
2141
+ NOT_INSIDE_TYPEOF,
2142
+ &slow);
2143
+ // Push the function and resolve eval.
2144
+ __ push(rax);
2145
+ EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
2146
+ __ jmp(&done);
2147
+ __ bind(&slow);
2148
+ }
2149
+
2150
+ // Push copy of the function (found below the arguments) and
2151
+ // resolve eval.
2152
+ __ push(Operand(rsp, (arg_count + 1) * kPointerSize));
2153
+ EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
2154
+ if (done.is_linked()) {
2155
+ __ bind(&done);
2156
+ }
2157
+
2158
+ // The runtime call returns a pair of values in rax (function) and
2159
+ // rdx (receiver). Touch up the stack with the right values.
2160
+ __ movq(Operand(rsp, (arg_count + 0) * kPointerSize), rdx);
2161
+ __ movq(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
2162
+ }
2163
+ // Record source position for debugger.
2164
+ SetSourcePosition(expr->position());
2165
+ InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2166
+ CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_IMPLICIT);
2167
+ __ CallStub(&stub);
2168
+ RecordJSReturnSite(expr);
2169
+ // Restore context register.
2170
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2171
+ context()->DropAndPlug(1, rax);
2172
+ } else if (var != NULL && !var->is_this() && var->is_global()) {
2173
+ // Call to a global variable.
2174
+ // Push global object as receiver for the call IC lookup.
2175
+ __ push(GlobalObjectOperand());
2176
+ EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
2177
+ } else if (var != NULL && var->AsSlot() != NULL &&
2178
+ var->AsSlot()->type() == Slot::LOOKUP) {
2179
+ // Call to a lookup slot (dynamically introduced variable).
2180
+ Label slow, done;
2181
+
2182
+ { PreservePositionScope scope(masm()->positions_recorder());
2183
+ // Generate code for loading from variables potentially shadowed
2184
+ // by eval-introduced variables.
2185
+ EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
2186
+ NOT_INSIDE_TYPEOF,
2187
+ &slow,
2188
+ &done);
2189
+
2190
+ __ bind(&slow);
2191
+ }
2192
+ // Call the runtime to find the function to call (returned in rax)
2193
+ // and the object holding it (returned in rdx).
2194
+ __ push(context_register());
2195
+ __ Push(var->name());
2196
+ __ CallRuntime(Runtime::kLoadContextSlot, 2);
2197
+ __ push(rax); // Function.
2198
+ __ push(rdx); // Receiver.
2199
+
2200
+ // If fast case code has been generated, emit code to push the
2201
+ // function and receiver and have the slow path jump around this
2202
+ // code.
2203
+ if (done.is_linked()) {
2204
+ Label call;
2205
+ __ jmp(&call, Label::kNear);
2206
+ __ bind(&done);
2207
+ // Push function.
2208
+ __ push(rax);
2209
+ // Push global receiver.
2210
+ __ movq(rbx, GlobalObjectOperand());
2211
+ __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
2212
+ __ bind(&call);
2213
+ }
2214
+
2215
+ // The receiver is either the global receiver or an object found
2216
+ // by LoadContextSlot. That object could be the hole if the
2217
+ // receiver is implicitly the global object.
2218
+ EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2219
+ } else if (fun->AsProperty() != NULL) {
2220
+ // Call to an object property.
2221
+ Property* prop = fun->AsProperty();
2222
+ Literal* key = prop->key()->AsLiteral();
2223
+ if (key != NULL && key->handle()->IsSymbol()) {
2224
+ // Call to a named property, use call IC.
2225
+ { PreservePositionScope scope(masm()->positions_recorder());
2226
+ VisitForStackValue(prop->obj());
2227
+ }
2228
+ EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
2229
+ } else {
2230
+ // Call to a keyed property.
2231
+ // For a synthetic property use keyed load IC followed by function call,
2232
+ // for a regular property use keyed EmitCallIC.
2233
+ if (prop->is_synthetic()) {
2234
+ // Do not visit the object and key subexpressions (they are shared
2235
+ // by all occurrences of the same rewritten parameter).
2236
+ ASSERT(prop->obj()->AsVariableProxy() != NULL);
2237
+ ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL);
2238
+ Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot();
2239
+ MemOperand operand = EmitSlotSearch(slot, rdx);
2240
+ __ movq(rdx, operand);
2241
+
2242
+ ASSERT(prop->key()->AsLiteral() != NULL);
2243
+ ASSERT(prop->key()->AsLiteral()->handle()->IsSmi());
2244
+ __ Move(rax, prop->key()->AsLiteral()->handle());
2245
+
2246
+ // Record source code position for IC call.
2247
+ SetSourcePosition(prop->position());
2248
+
2249
+ Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2250
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
2251
+ // Push result (function).
2252
+ __ push(rax);
2253
+ // Push Global receiver.
2254
+ __ movq(rcx, GlobalObjectOperand());
2255
+ __ push(FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset));
2256
+ EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2257
+ } else {
2258
+ { PreservePositionScope scope(masm()->positions_recorder());
2259
+ VisitForStackValue(prop->obj());
2260
+ }
2261
+ EmitKeyedCallWithIC(expr, prop->key());
2262
+ }
2263
+ }
2264
+ } else {
2265
+ { PreservePositionScope scope(masm()->positions_recorder());
2266
+ VisitForStackValue(fun);
2267
+ }
2268
+ // Load global receiver object.
2269
+ __ movq(rbx, GlobalObjectOperand());
2270
+ __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
2271
+ // Emit function call.
2272
+ EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2273
+ }
2274
+
2275
+ #ifdef DEBUG
2276
+ // RecordJSReturnSite should have been called.
2277
+ ASSERT(expr->return_is_recorded_);
2278
+ #endif
2279
+ }
2280
+
2281
+
2282
+ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2283
+ Comment cmnt(masm_, "[ CallNew");
2284
+ // According to ECMA-262, section 11.2.2, page 44, the function
2285
+ // expression in new calls must be evaluated before the
2286
+ // arguments.
2287
+
2288
+ // Push constructor on the stack. If it's not a function it's used as
2289
+ // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2290
+ // ignored.
2291
+ VisitForStackValue(expr->expression());
2292
+
2293
+ // Push the arguments ("left-to-right") on the stack.
2294
+ ZoneList<Expression*>* args = expr->arguments();
2295
+ int arg_count = args->length();
2296
+ for (int i = 0; i < arg_count; i++) {
2297
+ VisitForStackValue(args->at(i));
2298
+ }
2299
+
2300
+ // Call the construct call builtin that handles allocation and
2301
+ // constructor invocation.
2302
+ SetSourcePosition(expr->position());
2303
+
2304
+ // Load function and argument count into rdi and rax.
2305
+ __ Set(rax, arg_count);
2306
+ __ movq(rdi, Operand(rsp, arg_count * kPointerSize));
2307
+
2308
+ Handle<Code> construct_builtin =
2309
+ isolate()->builtins()->JSConstructCall();
2310
+ __ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
2311
+ context()->Plug(rax);
2312
+ }
2313
+
2314
+
2315
+ void FullCodeGenerator::EmitIsSmi(ZoneList<Expression*>* args) {
2316
+ ASSERT(args->length() == 1);
2317
+
2318
+ VisitForAccumulatorValue(args->at(0));
2319
+
2320
+ Label materialize_true, materialize_false;
2321
+ Label* if_true = NULL;
2322
+ Label* if_false = NULL;
2323
+ Label* fall_through = NULL;
2324
+ context()->PrepareTest(&materialize_true, &materialize_false,
2325
+ &if_true, &if_false, &fall_through);
2326
+
2327
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2328
+ __ JumpIfSmi(rax, if_true);
2329
+ __ jmp(if_false);
2330
+
2331
+ context()->Plug(if_true, if_false);
2332
+ }
2333
+
2334
+
2335
+ void FullCodeGenerator::EmitIsNonNegativeSmi(ZoneList<Expression*>* args) {
2336
+ ASSERT(args->length() == 1);
2337
+
2338
+ VisitForAccumulatorValue(args->at(0));
2339
+
2340
+ Label materialize_true, materialize_false;
2341
+ Label* if_true = NULL;
2342
+ Label* if_false = NULL;
2343
+ Label* fall_through = NULL;
2344
+ context()->PrepareTest(&materialize_true, &materialize_false,
2345
+ &if_true, &if_false, &fall_through);
2346
+
2347
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2348
+ Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax);
2349
+ Split(non_negative_smi, if_true, if_false, fall_through);
2350
+
2351
+ context()->Plug(if_true, if_false);
2352
+ }
2353
+
2354
+
2355
+ void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) {
2356
+ ASSERT(args->length() == 1);
2357
+
2358
+ VisitForAccumulatorValue(args->at(0));
2359
+
2360
+ Label materialize_true, materialize_false;
2361
+ Label* if_true = NULL;
2362
+ Label* if_false = NULL;
2363
+ Label* fall_through = NULL;
2364
+ context()->PrepareTest(&materialize_true, &materialize_false,
2365
+ &if_true, &if_false, &fall_through);
2366
+
2367
+ __ JumpIfSmi(rax, if_false);
2368
+ __ CompareRoot(rax, Heap::kNullValueRootIndex);
2369
+ __ j(equal, if_true);
2370
+ __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2371
+ // Undetectable objects behave like undefined when tested with typeof.
2372
+ __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2373
+ Immediate(1 << Map::kIsUndetectable));
2374
+ __ j(not_zero, if_false);
2375
+ __ movzxbq(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
2376
+ __ cmpq(rbx, Immediate(FIRST_JS_OBJECT_TYPE));
2377
+ __ j(below, if_false);
2378
+ __ cmpq(rbx, Immediate(LAST_JS_OBJECT_TYPE));
2379
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2380
+ Split(below_equal, if_true, if_false, fall_through);
2381
+
2382
+ context()->Plug(if_true, if_false);
2383
+ }
2384
+
2385
+
2386
+ void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) {
2387
+ ASSERT(args->length() == 1);
2388
+
2389
+ VisitForAccumulatorValue(args->at(0));
2390
+
2391
+ Label materialize_true, materialize_false;
2392
+ Label* if_true = NULL;
2393
+ Label* if_false = NULL;
2394
+ Label* fall_through = NULL;
2395
+ context()->PrepareTest(&materialize_true, &materialize_false,
2396
+ &if_true, &if_false, &fall_through);
2397
+
2398
+ __ JumpIfSmi(rax, if_false);
2399
+ __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rbx);
2400
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2401
+ Split(above_equal, if_true, if_false, fall_through);
2402
+
2403
+ context()->Plug(if_true, if_false);
2404
+ }
2405
+
2406
+
2407
+ void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) {
2408
+ ASSERT(args->length() == 1);
2409
+
2410
+ VisitForAccumulatorValue(args->at(0));
2411
+
2412
+ Label materialize_true, materialize_false;
2413
+ Label* if_true = NULL;
2414
+ Label* if_false = NULL;
2415
+ Label* fall_through = NULL;
2416
+ context()->PrepareTest(&materialize_true, &materialize_false,
2417
+ &if_true, &if_false, &fall_through);
2418
+
2419
+ __ JumpIfSmi(rax, if_false);
2420
+ __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2421
+ __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2422
+ Immediate(1 << Map::kIsUndetectable));
2423
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2424
+ Split(not_zero, if_true, if_false, fall_through);
2425
+
2426
+ context()->Plug(if_true, if_false);
2427
+ }
2428
+
2429
+
2430
+ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2431
+ ZoneList<Expression*>* args) {
2432
+ ASSERT(args->length() == 1);
2433
+
2434
+ VisitForAccumulatorValue(args->at(0));
2435
+
2436
+ Label materialize_true, materialize_false;
2437
+ Label* if_true = NULL;
2438
+ Label* if_false = NULL;
2439
+ Label* fall_through = NULL;
2440
+ context()->PrepareTest(&materialize_true, &materialize_false,
2441
+ &if_true, &if_false, &fall_through);
2442
+
2443
+ if (FLAG_debug_code) __ AbortIfSmi(rax);
2444
+
2445
+ // Check whether this map has already been checked to be safe for default
2446
+ // valueOf.
2447
+ __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2448
+ __ testb(FieldOperand(rbx, Map::kBitField2Offset),
2449
+ Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
2450
+ __ j(not_zero, if_true);
2451
+
2452
+ // Check for fast case object. Generate false result for slow case object.
2453
+ __ movq(rcx, FieldOperand(rax, JSObject::kPropertiesOffset));
2454
+ __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
2455
+ __ CompareRoot(rcx, Heap::kHashTableMapRootIndex);
2456
+ __ j(equal, if_false);
2457
+
2458
+ // Look for valueOf symbol in the descriptor array, and indicate false if
2459
+ // found. The type is not checked, so if it is a transition it is a false
2460
+ // negative.
2461
+ __ LoadInstanceDescriptors(rbx, rbx);
2462
+ __ movq(rcx, FieldOperand(rbx, FixedArray::kLengthOffset));
2463
+ // rbx: descriptor array
2464
+ // rcx: length of descriptor array
2465
+ // Calculate the end of the descriptor array.
2466
+ SmiIndex index = masm_->SmiToIndex(rdx, rcx, kPointerSizeLog2);
2467
+ __ lea(rcx,
2468
+ Operand(
2469
+ rbx, index.reg, index.scale, FixedArray::kHeaderSize));
2470
+ // Calculate location of the first key name.
2471
+ __ addq(rbx,
2472
+ Immediate(FixedArray::kHeaderSize +
2473
+ DescriptorArray::kFirstIndex * kPointerSize));
2474
+ // Loop through all the keys in the descriptor array. If one of these is the
2475
+ // symbol valueOf the result is false.
2476
+ Label entry, loop;
2477
+ __ jmp(&entry);
2478
+ __ bind(&loop);
2479
+ __ movq(rdx, FieldOperand(rbx, 0));
2480
+ __ Cmp(rdx, FACTORY->value_of_symbol());
2481
+ __ j(equal, if_false);
2482
+ __ addq(rbx, Immediate(kPointerSize));
2483
+ __ bind(&entry);
2484
+ __ cmpq(rbx, rcx);
2485
+ __ j(not_equal, &loop);
2486
+
2487
+ // Reload map as register rbx was used as temporary above.
2488
+ __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2489
+
2490
+ // If a valueOf property is not found on the object check that it's
2491
+ // prototype is the un-modified String prototype. If not result is false.
2492
+ __ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
2493
+ __ testq(rcx, Immediate(kSmiTagMask));
2494
+ __ j(zero, if_false);
2495
+ __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
2496
+ __ movq(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2497
+ __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset));
2498
+ __ cmpq(rcx,
2499
+ ContextOperand(rdx, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2500
+ __ j(not_equal, if_false);
2501
+ // Set the bit in the map to indicate that it has been checked safe for
2502
+ // default valueOf and set true result.
2503
+ __ or_(FieldOperand(rbx, Map::kBitField2Offset),
2504
+ Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
2505
+ __ jmp(if_true);
2506
+
2507
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2508
+ context()->Plug(if_true, if_false);
2509
+ }
2510
+
2511
+
2512
+ void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) {
2513
+ ASSERT(args->length() == 1);
2514
+
2515
+ VisitForAccumulatorValue(args->at(0));
2516
+
2517
+ Label materialize_true, materialize_false;
2518
+ Label* if_true = NULL;
2519
+ Label* if_false = NULL;
2520
+ Label* fall_through = NULL;
2521
+ context()->PrepareTest(&materialize_true, &materialize_false,
2522
+ &if_true, &if_false, &fall_through);
2523
+
2524
+ __ JumpIfSmi(rax, if_false);
2525
+ __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
2526
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2527
+ Split(equal, if_true, if_false, fall_through);
2528
+
2529
+ context()->Plug(if_true, if_false);
2530
+ }
2531
+
2532
+
2533
+ void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) {
2534
+ ASSERT(args->length() == 1);
2535
+
2536
+ VisitForAccumulatorValue(args->at(0));
2537
+
2538
+ Label materialize_true, materialize_false;
2539
+ Label* if_true = NULL;
2540
+ Label* if_false = NULL;
2541
+ Label* fall_through = NULL;
2542
+ context()->PrepareTest(&materialize_true, &materialize_false,
2543
+ &if_true, &if_false, &fall_through);
2544
+
2545
+ __ JumpIfSmi(rax, if_false);
2546
+ __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
2547
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2548
+ Split(equal, if_true, if_false, fall_through);
2549
+
2550
+ context()->Plug(if_true, if_false);
2551
+ }
2552
+
2553
+
2554
+ void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) {
2555
+ ASSERT(args->length() == 1);
2556
+
2557
+ VisitForAccumulatorValue(args->at(0));
2558
+
2559
+ Label materialize_true, materialize_false;
2560
+ Label* if_true = NULL;
2561
+ Label* if_false = NULL;
2562
+ Label* fall_through = NULL;
2563
+ context()->PrepareTest(&materialize_true, &materialize_false,
2564
+ &if_true, &if_false, &fall_through);
2565
+
2566
+ __ JumpIfSmi(rax, if_false);
2567
+ __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
2568
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2569
+ Split(equal, if_true, if_false, fall_through);
2570
+
2571
+ context()->Plug(if_true, if_false);
2572
+ }
2573
+
2574
+
2575
+
2576
+ void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) {
2577
+ ASSERT(args->length() == 0);
2578
+
2579
+ Label materialize_true, materialize_false;
2580
+ Label* if_true = NULL;
2581
+ Label* if_false = NULL;
2582
+ Label* fall_through = NULL;
2583
+ context()->PrepareTest(&materialize_true, &materialize_false,
2584
+ &if_true, &if_false, &fall_through);
2585
+
2586
+ // Get the frame pointer for the calling frame.
2587
+ __ movq(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2588
+
2589
+ // Skip the arguments adaptor frame if it exists.
2590
+ Label check_frame_marker;
2591
+ __ Cmp(Operand(rax, StandardFrameConstants::kContextOffset),
2592
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2593
+ __ j(not_equal, &check_frame_marker);
2594
+ __ movq(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset));
2595
+
2596
+ // Check the marker in the calling frame.
2597
+ __ bind(&check_frame_marker);
2598
+ __ Cmp(Operand(rax, StandardFrameConstants::kMarkerOffset),
2599
+ Smi::FromInt(StackFrame::CONSTRUCT));
2600
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2601
+ Split(equal, if_true, if_false, fall_through);
2602
+
2603
+ context()->Plug(if_true, if_false);
2604
+ }
2605
+
2606
+
2607
+ void FullCodeGenerator::EmitObjectEquals(ZoneList<Expression*>* args) {
2608
+ ASSERT(args->length() == 2);
2609
+
2610
+ // Load the two objects into registers and perform the comparison.
2611
+ VisitForStackValue(args->at(0));
2612
+ VisitForAccumulatorValue(args->at(1));
2613
+
2614
+ Label materialize_true, materialize_false;
2615
+ Label* if_true = NULL;
2616
+ Label* if_false = NULL;
2617
+ Label* fall_through = NULL;
2618
+ context()->PrepareTest(&materialize_true, &materialize_false,
2619
+ &if_true, &if_false, &fall_through);
2620
+
2621
+ __ pop(rbx);
2622
+ __ cmpq(rax, rbx);
2623
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2624
+ Split(equal, if_true, if_false, fall_through);
2625
+
2626
+ context()->Plug(if_true, if_false);
2627
+ }
2628
+
2629
+
2630
+ void FullCodeGenerator::EmitArguments(ZoneList<Expression*>* args) {
2631
+ ASSERT(args->length() == 1);
2632
+
2633
+ // ArgumentsAccessStub expects the key in rdx and the formal
2634
+ // parameter count in rax.
2635
+ VisitForAccumulatorValue(args->at(0));
2636
+ __ movq(rdx, rax);
2637
+ __ Move(rax, Smi::FromInt(scope()->num_parameters()));
2638
+ ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2639
+ __ CallStub(&stub);
2640
+ context()->Plug(rax);
2641
+ }
2642
+
2643
+
2644
+ void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) {
2645
+ ASSERT(args->length() == 0);
2646
+
2647
+ Label exit;
2648
+ // Get the number of formal parameters.
2649
+ __ Move(rax, Smi::FromInt(scope()->num_parameters()));
2650
+
2651
+ // Check if the calling frame is an arguments adaptor frame.
2652
+ __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2653
+ __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
2654
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2655
+ __ j(not_equal, &exit, Label::kNear);
2656
+
2657
+ // Arguments adaptor case: Read the arguments length from the
2658
+ // adaptor frame.
2659
+ __ movq(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2660
+
2661
+ __ bind(&exit);
2662
+ if (FLAG_debug_code) __ AbortIfNotSmi(rax);
2663
+ context()->Plug(rax);
2664
+ }
2665
+
2666
+
2667
+ void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) {
2668
+ ASSERT(args->length() == 1);
2669
+ Label done, null, function, non_function_constructor;
2670
+
2671
+ VisitForAccumulatorValue(args->at(0));
2672
+
2673
+ // If the object is a smi, we return null.
2674
+ __ JumpIfSmi(rax, &null);
2675
+
2676
+ // Check that the object is a JS object but take special care of JS
2677
+ // functions to make sure they have 'Function' as their class.
2678
+ __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax); // Map is now in rax.
2679
+ __ j(below, &null);
2680
+
2681
+ // As long as JS_FUNCTION_TYPE is the last instance type and it is
2682
+ // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
2683
+ // LAST_JS_OBJECT_TYPE.
2684
+ ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
2685
+ ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
2686
+ __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
2687
+ __ j(equal, &function);
2688
+
2689
+ // Check if the constructor in the map is a function.
2690
+ __ movq(rax, FieldOperand(rax, Map::kConstructorOffset));
2691
+ __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
2692
+ __ j(not_equal, &non_function_constructor);
2693
+
2694
+ // rax now contains the constructor function. Grab the
2695
+ // instance class name from there.
2696
+ __ movq(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
2697
+ __ movq(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset));
2698
+ __ jmp(&done);
2699
+
2700
+ // Functions have class 'Function'.
2701
+ __ bind(&function);
2702
+ __ Move(rax, isolate()->factory()->function_class_symbol());
2703
+ __ jmp(&done);
2704
+
2705
+ // Objects with a non-function constructor have class 'Object'.
2706
+ __ bind(&non_function_constructor);
2707
+ __ Move(rax, isolate()->factory()->Object_symbol());
2708
+ __ jmp(&done);
2709
+
2710
+ // Non-JS objects have class null.
2711
+ __ bind(&null);
2712
+ __ LoadRoot(rax, Heap::kNullValueRootIndex);
2713
+
2714
+ // All done.
2715
+ __ bind(&done);
2716
+
2717
+ context()->Plug(rax);
2718
+ }
2719
+
2720
+
2721
+ void FullCodeGenerator::EmitLog(ZoneList<Expression*>* args) {
2722
+ // Conditionally generate a log call.
2723
+ // Args:
2724
+ // 0 (literal string): The type of logging (corresponds to the flags).
2725
+ // This is used to determine whether or not to generate the log call.
2726
+ // 1 (string): Format string. Access the string at argument index 2
2727
+ // with '%2s' (see Logger::LogRuntime for all the formats).
2728
+ // 2 (array): Arguments to the format string.
2729
+ ASSERT_EQ(args->length(), 3);
2730
+ #ifdef ENABLE_LOGGING_AND_PROFILING
2731
+ if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
2732
+ VisitForStackValue(args->at(1));
2733
+ VisitForStackValue(args->at(2));
2734
+ __ CallRuntime(Runtime::kLog, 2);
2735
+ }
2736
+ #endif
2737
+ // Finally, we're expected to leave a value on the top of the stack.
2738
+ __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2739
+ context()->Plug(rax);
2740
+ }
2741
+
2742
+
2743
+ void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) {
2744
+ ASSERT(args->length() == 0);
2745
+
2746
+ Label slow_allocate_heapnumber;
2747
+ Label heapnumber_allocated;
2748
+
2749
+ __ AllocateHeapNumber(rbx, rcx, &slow_allocate_heapnumber);
2750
+ __ jmp(&heapnumber_allocated);
2751
+
2752
+ __ bind(&slow_allocate_heapnumber);
2753
+ // Allocate a heap number.
2754
+ __ CallRuntime(Runtime::kNumberAlloc, 0);
2755
+ __ movq(rbx, rax);
2756
+
2757
+ __ bind(&heapnumber_allocated);
2758
+
2759
+ // Return a random uint32 number in rax.
2760
+ // The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs.
2761
+ __ PrepareCallCFunction(1);
2762
+ #ifdef _WIN64
2763
+ __ LoadAddress(rcx, ExternalReference::isolate_address());
2764
+ #else
2765
+ __ LoadAddress(rdi, ExternalReference::isolate_address());
2766
+ #endif
2767
+ __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2768
+
2769
+ // Convert 32 random bits in rax to 0.(32 random bits) in a double
2770
+ // by computing:
2771
+ // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
2772
+ __ movl(rcx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
2773
+ __ movd(xmm1, rcx);
2774
+ __ movd(xmm0, rax);
2775
+ __ cvtss2sd(xmm1, xmm1);
2776
+ __ xorps(xmm0, xmm1);
2777
+ __ subsd(xmm0, xmm1);
2778
+ __ movsd(FieldOperand(rbx, HeapNumber::kValueOffset), xmm0);
2779
+
2780
+ __ movq(rax, rbx);
2781
+ context()->Plug(rax);
2782
+ }
2783
+
2784
+
2785
+ void FullCodeGenerator::EmitSubString(ZoneList<Expression*>* args) {
2786
+ // Load the arguments on the stack and call the stub.
2787
+ SubStringStub stub;
2788
+ ASSERT(args->length() == 3);
2789
+ VisitForStackValue(args->at(0));
2790
+ VisitForStackValue(args->at(1));
2791
+ VisitForStackValue(args->at(2));
2792
+ __ CallStub(&stub);
2793
+ context()->Plug(rax);
2794
+ }
2795
+
2796
+
2797
+ void FullCodeGenerator::EmitRegExpExec(ZoneList<Expression*>* args) {
2798
+ // Load the arguments on the stack and call the stub.
2799
+ RegExpExecStub stub;
2800
+ ASSERT(args->length() == 4);
2801
+ VisitForStackValue(args->at(0));
2802
+ VisitForStackValue(args->at(1));
2803
+ VisitForStackValue(args->at(2));
2804
+ VisitForStackValue(args->at(3));
2805
+ __ CallStub(&stub);
2806
+ context()->Plug(rax);
2807
+ }
2808
+
2809
+
2810
+ void FullCodeGenerator::EmitValueOf(ZoneList<Expression*>* args) {
2811
+ ASSERT(args->length() == 1);
2812
+
2813
+ VisitForAccumulatorValue(args->at(0)); // Load the object.
2814
+
2815
+ Label done;
2816
+ // If the object is a smi return the object.
2817
+ __ JumpIfSmi(rax, &done);
2818
+ // If the object is not a value type, return the object.
2819
+ __ CmpObjectType(rax, JS_VALUE_TYPE, rbx);
2820
+ __ j(not_equal, &done);
2821
+ __ movq(rax, FieldOperand(rax, JSValue::kValueOffset));
2822
+
2823
+ __ bind(&done);
2824
+ context()->Plug(rax);
2825
+ }
2826
+
2827
+
2828
+ void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) {
2829
+ // Load the arguments on the stack and call the runtime function.
2830
+ ASSERT(args->length() == 2);
2831
+ VisitForStackValue(args->at(0));
2832
+ VisitForStackValue(args->at(1));
2833
+ MathPowStub stub;
2834
+ __ CallStub(&stub);
2835
+ context()->Plug(rax);
2836
+ }
2837
+
2838
+
2839
+ void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) {
2840
+ ASSERT(args->length() == 2);
2841
+
2842
+ VisitForStackValue(args->at(0)); // Load the object.
2843
+ VisitForAccumulatorValue(args->at(1)); // Load the value.
2844
+ __ pop(rbx); // rax = value. rbx = object.
2845
+
2846
+ Label done;
2847
+ // If the object is a smi, return the value.
2848
+ __ JumpIfSmi(rbx, &done);
2849
+
2850
+ // If the object is not a value type, return the value.
2851
+ __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx);
2852
+ __ j(not_equal, &done);
2853
+
2854
+ // Store the value.
2855
+ __ movq(FieldOperand(rbx, JSValue::kValueOffset), rax);
2856
+ // Update the write barrier. Save the value as it will be
2857
+ // overwritten by the write barrier code and is needed afterward.
2858
+ __ movq(rdx, rax);
2859
+ __ RecordWrite(rbx, JSValue::kValueOffset, rdx, rcx);
2860
+
2861
+ __ bind(&done);
2862
+ context()->Plug(rax);
2863
+ }
2864
+
2865
+
2866
+ void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) {
2867
+ ASSERT_EQ(args->length(), 1);
2868
+
2869
+ // Load the argument on the stack and call the stub.
2870
+ VisitForStackValue(args->at(0));
2871
+
2872
+ NumberToStringStub stub;
2873
+ __ CallStub(&stub);
2874
+ context()->Plug(rax);
2875
+ }
2876
+
2877
+
2878
+ void FullCodeGenerator::EmitStringCharFromCode(ZoneList<Expression*>* args) {
2879
+ ASSERT(args->length() == 1);
2880
+
2881
+ VisitForAccumulatorValue(args->at(0));
2882
+
2883
+ Label done;
2884
+ StringCharFromCodeGenerator generator(rax, rbx);
2885
+ generator.GenerateFast(masm_);
2886
+ __ jmp(&done);
2887
+
2888
+ NopRuntimeCallHelper call_helper;
2889
+ generator.GenerateSlow(masm_, call_helper);
2890
+
2891
+ __ bind(&done);
2892
+ context()->Plug(rbx);
2893
+ }
2894
+
2895
+
2896
+ void FullCodeGenerator::EmitStringCharCodeAt(ZoneList<Expression*>* args) {
2897
+ ASSERT(args->length() == 2);
2898
+
2899
+ VisitForStackValue(args->at(0));
2900
+ VisitForAccumulatorValue(args->at(1));
2901
+
2902
+ Register object = rbx;
2903
+ Register index = rax;
2904
+ Register scratch = rcx;
2905
+ Register result = rdx;
2906
+
2907
+ __ pop(object);
2908
+
2909
+ Label need_conversion;
2910
+ Label index_out_of_range;
2911
+ Label done;
2912
+ StringCharCodeAtGenerator generator(object,
2913
+ index,
2914
+ scratch,
2915
+ result,
2916
+ &need_conversion,
2917
+ &need_conversion,
2918
+ &index_out_of_range,
2919
+ STRING_INDEX_IS_NUMBER);
2920
+ generator.GenerateFast(masm_);
2921
+ __ jmp(&done);
2922
+
2923
+ __ bind(&index_out_of_range);
2924
+ // When the index is out of range, the spec requires us to return
2925
+ // NaN.
2926
+ __ LoadRoot(result, Heap::kNanValueRootIndex);
2927
+ __ jmp(&done);
2928
+
2929
+ __ bind(&need_conversion);
2930
+ // Move the undefined value into the result register, which will
2931
+ // trigger conversion.
2932
+ __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2933
+ __ jmp(&done);
2934
+
2935
+ NopRuntimeCallHelper call_helper;
2936
+ generator.GenerateSlow(masm_, call_helper);
2937
+
2938
+ __ bind(&done);
2939
+ context()->Plug(result);
2940
+ }
2941
+
2942
+
2943
+ void FullCodeGenerator::EmitStringCharAt(ZoneList<Expression*>* args) {
2944
+ ASSERT(args->length() == 2);
2945
+
2946
+ VisitForStackValue(args->at(0));
2947
+ VisitForAccumulatorValue(args->at(1));
2948
+
2949
+ Register object = rbx;
2950
+ Register index = rax;
2951
+ Register scratch1 = rcx;
2952
+ Register scratch2 = rdx;
2953
+ Register result = rax;
2954
+
2955
+ __ pop(object);
2956
+
2957
+ Label need_conversion;
2958
+ Label index_out_of_range;
2959
+ Label done;
2960
+ StringCharAtGenerator generator(object,
2961
+ index,
2962
+ scratch1,
2963
+ scratch2,
2964
+ result,
2965
+ &need_conversion,
2966
+ &need_conversion,
2967
+ &index_out_of_range,
2968
+ STRING_INDEX_IS_NUMBER);
2969
+ generator.GenerateFast(masm_);
2970
+ __ jmp(&done);
2971
+
2972
+ __ bind(&index_out_of_range);
2973
+ // When the index is out of range, the spec requires us to return
2974
+ // the empty string.
2975
+ __ LoadRoot(result, Heap::kEmptyStringRootIndex);
2976
+ __ jmp(&done);
2977
+
2978
+ __ bind(&need_conversion);
2979
+ // Move smi zero into the result register, which will trigger
2980
+ // conversion.
2981
+ __ Move(result, Smi::FromInt(0));
2982
+ __ jmp(&done);
2983
+
2984
+ NopRuntimeCallHelper call_helper;
2985
+ generator.GenerateSlow(masm_, call_helper);
2986
+
2987
+ __ bind(&done);
2988
+ context()->Plug(result);
2989
+ }
2990
+
2991
+
2992
+ void FullCodeGenerator::EmitStringAdd(ZoneList<Expression*>* args) {
2993
+ ASSERT_EQ(2, args->length());
2994
+
2995
+ VisitForStackValue(args->at(0));
2996
+ VisitForStackValue(args->at(1));
2997
+
2998
+ StringAddStub stub(NO_STRING_ADD_FLAGS);
2999
+ __ CallStub(&stub);
3000
+ context()->Plug(rax);
3001
+ }
3002
+
3003
+
3004
+ void FullCodeGenerator::EmitStringCompare(ZoneList<Expression*>* args) {
3005
+ ASSERT_EQ(2, args->length());
3006
+
3007
+ VisitForStackValue(args->at(0));
3008
+ VisitForStackValue(args->at(1));
3009
+
3010
+ StringCompareStub stub;
3011
+ __ CallStub(&stub);
3012
+ context()->Plug(rax);
3013
+ }
3014
+
3015
+
3016
+ void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) {
3017
+ // Load the argument on the stack and call the stub.
3018
+ TranscendentalCacheStub stub(TranscendentalCache::SIN,
3019
+ TranscendentalCacheStub::TAGGED);
3020
+ ASSERT(args->length() == 1);
3021
+ VisitForStackValue(args->at(0));
3022
+ __ CallStub(&stub);
3023
+ context()->Plug(rax);
3024
+ }
3025
+
3026
+
3027
+ void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) {
3028
+ // Load the argument on the stack and call the stub.
3029
+ TranscendentalCacheStub stub(TranscendentalCache::COS,
3030
+ TranscendentalCacheStub::TAGGED);
3031
+ ASSERT(args->length() == 1);
3032
+ VisitForStackValue(args->at(0));
3033
+ __ CallStub(&stub);
3034
+ context()->Plug(rax);
3035
+ }
3036
+
3037
+
3038
+ void FullCodeGenerator::EmitMathLog(ZoneList<Expression*>* args) {
3039
+ // Load the argument on the stack and call the stub.
3040
+ TranscendentalCacheStub stub(TranscendentalCache::LOG,
3041
+ TranscendentalCacheStub::TAGGED);
3042
+ ASSERT(args->length() == 1);
3043
+ VisitForStackValue(args->at(0));
3044
+ __ CallStub(&stub);
3045
+ context()->Plug(rax);
3046
+ }
3047
+
3048
+
3049
+ void FullCodeGenerator::EmitMathSqrt(ZoneList<Expression*>* args) {
3050
+ // Load the argument on the stack and call the runtime function.
3051
+ ASSERT(args->length() == 1);
3052
+ VisitForStackValue(args->at(0));
3053
+ __ CallRuntime(Runtime::kMath_sqrt, 1);
3054
+ context()->Plug(rax);
3055
+ }
3056
+
3057
+
3058
+ void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) {
3059
+ ASSERT(args->length() >= 2);
3060
+
3061
+ int arg_count = args->length() - 2; // 2 ~ receiver and function.
3062
+ for (int i = 0; i < arg_count + 1; i++) {
3063
+ VisitForStackValue(args->at(i));
3064
+ }
3065
+ VisitForAccumulatorValue(args->last()); // Function.
3066
+
3067
+ // InvokeFunction requires the function in rdi. Move it in there.
3068
+ __ movq(rdi, result_register());
3069
+ ParameterCount count(arg_count);
3070
+ __ InvokeFunction(rdi, count, CALL_FUNCTION);
3071
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3072
+ context()->Plug(rax);
3073
+ }
3074
+
3075
+
3076
+ void FullCodeGenerator::EmitRegExpConstructResult(ZoneList<Expression*>* args) {
3077
+ RegExpConstructResultStub stub;
3078
+ ASSERT(args->length() == 3);
3079
+ VisitForStackValue(args->at(0));
3080
+ VisitForStackValue(args->at(1));
3081
+ VisitForStackValue(args->at(2));
3082
+ __ CallStub(&stub);
3083
+ context()->Plug(rax);
3084
+ }
3085
+
3086
+
3087
+ void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) {
3088
+ ASSERT(args->length() == 3);
3089
+ VisitForStackValue(args->at(0));
3090
+ VisitForStackValue(args->at(1));
3091
+ VisitForStackValue(args->at(2));
3092
+ Label done;
3093
+ Label slow_case;
3094
+ Register object = rax;
3095
+ Register index_1 = rbx;
3096
+ Register index_2 = rcx;
3097
+ Register elements = rdi;
3098
+ Register temp = rdx;
3099
+ __ movq(object, Operand(rsp, 2 * kPointerSize));
3100
+ // Fetch the map and check if array is in fast case.
3101
+ // Check that object doesn't require security checks and
3102
+ // has no indexed interceptor.
3103
+ __ CmpObjectType(object, JS_ARRAY_TYPE, temp);
3104
+ __ j(not_equal, &slow_case);
3105
+ __ testb(FieldOperand(temp, Map::kBitFieldOffset),
3106
+ Immediate(KeyedLoadIC::kSlowCaseBitFieldMask));
3107
+ __ j(not_zero, &slow_case);
3108
+
3109
+ // Check the object's elements are in fast case and writable.
3110
+ __ movq(elements, FieldOperand(object, JSObject::kElementsOffset));
3111
+ __ CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
3112
+ Heap::kFixedArrayMapRootIndex);
3113
+ __ j(not_equal, &slow_case);
3114
+
3115
+ // Check that both indices are smis.
3116
+ __ movq(index_1, Operand(rsp, 1 * kPointerSize));
3117
+ __ movq(index_2, Operand(rsp, 0 * kPointerSize));
3118
+ __ JumpIfNotBothSmi(index_1, index_2, &slow_case);
3119
+
3120
+ // Check that both indices are valid.
3121
+ // The JSArray length field is a smi since the array is in fast case mode.
3122
+ __ movq(temp, FieldOperand(object, JSArray::kLengthOffset));
3123
+ __ SmiCompare(temp, index_1);
3124
+ __ j(below_equal, &slow_case);
3125
+ __ SmiCompare(temp, index_2);
3126
+ __ j(below_equal, &slow_case);
3127
+
3128
+ __ SmiToInteger32(index_1, index_1);
3129
+ __ SmiToInteger32(index_2, index_2);
3130
+ // Bring addresses into index1 and index2.
3131
+ __ lea(index_1, FieldOperand(elements, index_1, times_pointer_size,
3132
+ FixedArray::kHeaderSize));
3133
+ __ lea(index_2, FieldOperand(elements, index_2, times_pointer_size,
3134
+ FixedArray::kHeaderSize));
3135
+
3136
+ // Swap elements. Use object and temp as scratch registers.
3137
+ __ movq(object, Operand(index_1, 0));
3138
+ __ movq(temp, Operand(index_2, 0));
3139
+ __ movq(Operand(index_2, 0), object);
3140
+ __ movq(Operand(index_1, 0), temp);
3141
+
3142
+ Label new_space;
3143
+ __ InNewSpace(elements, temp, equal, &new_space);
3144
+
3145
+ __ movq(object, elements);
3146
+ __ RecordWriteHelper(object, index_1, temp);
3147
+ __ RecordWriteHelper(elements, index_2, temp);
3148
+
3149
+ __ bind(&new_space);
3150
+ // We are done. Drop elements from the stack, and return undefined.
3151
+ __ addq(rsp, Immediate(3 * kPointerSize));
3152
+ __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3153
+ __ jmp(&done);
3154
+
3155
+ __ bind(&slow_case);
3156
+ __ CallRuntime(Runtime::kSwapElements, 3);
3157
+
3158
+ __ bind(&done);
3159
+ context()->Plug(rax);
3160
+ }
3161
+
3162
+
3163
+ void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
3164
+ ASSERT_EQ(2, args->length());
3165
+
3166
+ ASSERT_NE(NULL, args->at(0)->AsLiteral());
3167
+ int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3168
+
3169
+ Handle<FixedArray> jsfunction_result_caches(
3170
+ isolate()->global_context()->jsfunction_result_caches());
3171
+ if (jsfunction_result_caches->length() <= cache_id) {
3172
+ __ Abort("Attempt to use undefined cache.");
3173
+ __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3174
+ context()->Plug(rax);
3175
+ return;
3176
+ }
3177
+
3178
+ VisitForAccumulatorValue(args->at(1));
3179
+
3180
+ Register key = rax;
3181
+ Register cache = rbx;
3182
+ Register tmp = rcx;
3183
+ __ movq(cache, ContextOperand(rsi, Context::GLOBAL_INDEX));
3184
+ __ movq(cache,
3185
+ FieldOperand(cache, GlobalObject::kGlobalContextOffset));
3186
+ __ movq(cache,
3187
+ ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3188
+ __ movq(cache,
3189
+ FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3190
+
3191
+ Label done, not_found;
3192
+ // tmp now holds finger offset as a smi.
3193
+ ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3194
+ __ movq(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
3195
+ SmiIndex index =
3196
+ __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2);
3197
+ __ cmpq(key, FieldOperand(cache,
3198
+ index.reg,
3199
+ index.scale,
3200
+ FixedArray::kHeaderSize));
3201
+ __ j(not_equal, &not_found, Label::kNear);
3202
+ __ movq(rax, FieldOperand(cache,
3203
+ index.reg,
3204
+ index.scale,
3205
+ FixedArray::kHeaderSize + kPointerSize));
3206
+ __ jmp(&done, Label::kNear);
3207
+
3208
+ __ bind(&not_found);
3209
+ // Call runtime to perform the lookup.
3210
+ __ push(cache);
3211
+ __ push(key);
3212
+ __ CallRuntime(Runtime::kGetFromCache, 2);
3213
+
3214
+ __ bind(&done);
3215
+ context()->Plug(rax);
3216
+ }
3217
+
3218
+
3219
+ void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
3220
+ ASSERT_EQ(2, args->length());
3221
+
3222
+ Register right = rax;
3223
+ Register left = rbx;
3224
+ Register tmp = rcx;
3225
+
3226
+ VisitForStackValue(args->at(0));
3227
+ VisitForAccumulatorValue(args->at(1));
3228
+ __ pop(left);
3229
+
3230
+ Label done, fail, ok;
3231
+ __ cmpq(left, right);
3232
+ __ j(equal, &ok, Label::kNear);
3233
+ // Fail if either is a non-HeapObject.
3234
+ Condition either_smi = masm()->CheckEitherSmi(left, right, tmp);
3235
+ __ j(either_smi, &fail, Label::kNear);
3236
+ __ j(zero, &fail, Label::kNear);
3237
+ __ movq(tmp, FieldOperand(left, HeapObject::kMapOffset));
3238
+ __ cmpb(FieldOperand(tmp, Map::kInstanceTypeOffset),
3239
+ Immediate(JS_REGEXP_TYPE));
3240
+ __ j(not_equal, &fail, Label::kNear);
3241
+ __ cmpq(tmp, FieldOperand(right, HeapObject::kMapOffset));
3242
+ __ j(not_equal, &fail, Label::kNear);
3243
+ __ movq(tmp, FieldOperand(left, JSRegExp::kDataOffset));
3244
+ __ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset));
3245
+ __ j(equal, &ok, Label::kNear);
3246
+ __ bind(&fail);
3247
+ __ Move(rax, isolate()->factory()->false_value());
3248
+ __ jmp(&done, Label::kNear);
3249
+ __ bind(&ok);
3250
+ __ Move(rax, isolate()->factory()->true_value());
3251
+ __ bind(&done);
3252
+
3253
+ context()->Plug(rax);
3254
+ }
3255
+
3256
+
3257
+ void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) {
3258
+ ASSERT(args->length() == 1);
3259
+
3260
+ VisitForAccumulatorValue(args->at(0));
3261
+
3262
+ Label materialize_true, materialize_false;
3263
+ Label* if_true = NULL;
3264
+ Label* if_false = NULL;
3265
+ Label* fall_through = NULL;
3266
+ context()->PrepareTest(&materialize_true, &materialize_false,
3267
+ &if_true, &if_false, &fall_through);
3268
+
3269
+ __ testl(FieldOperand(rax, String::kHashFieldOffset),
3270
+ Immediate(String::kContainsCachedArrayIndexMask));
3271
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3272
+ __ j(zero, if_true);
3273
+ __ jmp(if_false);
3274
+
3275
+ context()->Plug(if_true, if_false);
3276
+ }
3277
+
3278
+
3279
+ void FullCodeGenerator::EmitGetCachedArrayIndex(ZoneList<Expression*>* args) {
3280
+ ASSERT(args->length() == 1);
3281
+ VisitForAccumulatorValue(args->at(0));
3282
+
3283
+ if (FLAG_debug_code) {
3284
+ __ AbortIfNotString(rax);
3285
+ }
3286
+
3287
+ __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
3288
+ ASSERT(String::kHashShift >= kSmiTagSize);
3289
+ __ IndexFromHash(rax, rax);
3290
+
3291
+ context()->Plug(rax);
3292
+ }
3293
+
3294
+
3295
+ void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
3296
+ Label bailout, return_result, done, one_char_separator, long_separator,
3297
+ non_trivial_array, not_size_one_array, loop,
3298
+ loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3299
+ ASSERT(args->length() == 2);
3300
+ // We will leave the separator on the stack until the end of the function.
3301
+ VisitForStackValue(args->at(1));
3302
+ // Load this to rax (= array)
3303
+ VisitForAccumulatorValue(args->at(0));
3304
+ // All aliases of the same register have disjoint lifetimes.
3305
+ Register array = rax;
3306
+ Register elements = no_reg; // Will be rax.
3307
+
3308
+ Register index = rdx;
3309
+
3310
+ Register string_length = rcx;
3311
+
3312
+ Register string = rsi;
3313
+
3314
+ Register scratch = rbx;
3315
+
3316
+ Register array_length = rdi;
3317
+ Register result_pos = no_reg; // Will be rdi.
3318
+
3319
+ Operand separator_operand = Operand(rsp, 2 * kPointerSize);
3320
+ Operand result_operand = Operand(rsp, 1 * kPointerSize);
3321
+ Operand array_length_operand = Operand(rsp, 0 * kPointerSize);
3322
+ // Separator operand is already pushed. Make room for the two
3323
+ // other stack fields, and clear the direction flag in anticipation
3324
+ // of calling CopyBytes.
3325
+ __ subq(rsp, Immediate(2 * kPointerSize));
3326
+ __ cld();
3327
+ // Check that the array is a JSArray
3328
+ __ JumpIfSmi(array, &bailout);
3329
+ __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3330
+ __ j(not_equal, &bailout);
3331
+
3332
+ // Check that the array has fast elements.
3333
+ __ testb(FieldOperand(scratch, Map::kBitField2Offset),
3334
+ Immediate(1 << Map::kHasFastElements));
3335
+ __ j(zero, &bailout);
3336
+
3337
+ // Array has fast elements, so its length must be a smi.
3338
+ // If the array has length zero, return the empty string.
3339
+ __ movq(array_length, FieldOperand(array, JSArray::kLengthOffset));
3340
+ __ SmiCompare(array_length, Smi::FromInt(0));
3341
+ __ j(not_zero, &non_trivial_array);
3342
+ __ LoadRoot(rax, Heap::kEmptyStringRootIndex);
3343
+ __ jmp(&return_result);
3344
+
3345
+ // Save the array length on the stack.
3346
+ __ bind(&non_trivial_array);
3347
+ __ SmiToInteger32(array_length, array_length);
3348
+ __ movl(array_length_operand, array_length);
3349
+
3350
+ // Save the FixedArray containing array's elements.
3351
+ // End of array's live range.
3352
+ elements = array;
3353
+ __ movq(elements, FieldOperand(array, JSArray::kElementsOffset));
3354
+ array = no_reg;
3355
+
3356
+
3357
+ // Check that all array elements are sequential ASCII strings, and
3358
+ // accumulate the sum of their lengths, as a smi-encoded value.
3359
+ __ Set(index, 0);
3360
+ __ Set(string_length, 0);
3361
+ // Loop condition: while (index < array_length).
3362
+ // Live loop registers: index(int32), array_length(int32), string(String*),
3363
+ // scratch, string_length(int32), elements(FixedArray*).
3364
+ if (FLAG_debug_code) {
3365
+ __ cmpq(index, array_length);
3366
+ __ Assert(below, "No empty arrays here in EmitFastAsciiArrayJoin");
3367
+ }
3368
+ __ bind(&loop);
3369
+ __ movq(string, FieldOperand(elements,
3370
+ index,
3371
+ times_pointer_size,
3372
+ FixedArray::kHeaderSize));
3373
+ __ JumpIfSmi(string, &bailout);
3374
+ __ movq(scratch, FieldOperand(string, HeapObject::kMapOffset));
3375
+ __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3376
+ __ andb(scratch, Immediate(
3377
+ kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3378
+ __ cmpb(scratch, Immediate(kStringTag | kAsciiStringTag | kSeqStringTag));
3379
+ __ j(not_equal, &bailout);
3380
+ __ AddSmiField(string_length,
3381
+ FieldOperand(string, SeqAsciiString::kLengthOffset));
3382
+ __ j(overflow, &bailout);
3383
+ __ incl(index);
3384
+ __ cmpl(index, array_length);
3385
+ __ j(less, &loop);
3386
+
3387
+ // Live registers:
3388
+ // string_length: Sum of string lengths.
3389
+ // elements: FixedArray of strings.
3390
+ // index: Array length.
3391
+ // array_length: Array length.
3392
+
3393
+ // If array_length is 1, return elements[0], a string.
3394
+ __ cmpl(array_length, Immediate(1));
3395
+ __ j(not_equal, &not_size_one_array);
3396
+ __ movq(rax, FieldOperand(elements, FixedArray::kHeaderSize));
3397
+ __ jmp(&return_result);
3398
+
3399
+ __ bind(&not_size_one_array);
3400
+
3401
+ // End of array_length live range.
3402
+ result_pos = array_length;
3403
+ array_length = no_reg;
3404
+
3405
+ // Live registers:
3406
+ // string_length: Sum of string lengths.
3407
+ // elements: FixedArray of strings.
3408
+ // index: Array length.
3409
+
3410
+ // Check that the separator is a sequential ASCII string.
3411
+ __ movq(string, separator_operand);
3412
+ __ JumpIfSmi(string, &bailout);
3413
+ __ movq(scratch, FieldOperand(string, HeapObject::kMapOffset));
3414
+ __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3415
+ __ andb(scratch, Immediate(
3416
+ kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3417
+ __ cmpb(scratch, Immediate(kStringTag | kAsciiStringTag | kSeqStringTag));
3418
+ __ j(not_equal, &bailout);
3419
+
3420
+ // Live registers:
3421
+ // string_length: Sum of string lengths.
3422
+ // elements: FixedArray of strings.
3423
+ // index: Array length.
3424
+ // string: Separator string.
3425
+
3426
+ // Add (separator length times (array_length - 1)) to string_length.
3427
+ __ SmiToInteger32(scratch,
3428
+ FieldOperand(string, SeqAsciiString::kLengthOffset));
3429
+ __ decl(index);
3430
+ __ imull(scratch, index);
3431
+ __ j(overflow, &bailout);
3432
+ __ addl(string_length, scratch);
3433
+ __ j(overflow, &bailout);
3434
+
3435
+ // Live registers and stack values:
3436
+ // string_length: Total length of result string.
3437
+ // elements: FixedArray of strings.
3438
+ __ AllocateAsciiString(result_pos, string_length, scratch,
3439
+ index, string, &bailout);
3440
+ __ movq(result_operand, result_pos);
3441
+ __ lea(result_pos, FieldOperand(result_pos, SeqAsciiString::kHeaderSize));
3442
+
3443
+ __ movq(string, separator_operand);
3444
+ __ SmiCompare(FieldOperand(string, SeqAsciiString::kLengthOffset),
3445
+ Smi::FromInt(1));
3446
+ __ j(equal, &one_char_separator);
3447
+ __ j(greater, &long_separator);
3448
+
3449
+
3450
+ // Empty separator case:
3451
+ __ Set(index, 0);
3452
+ __ movl(scratch, array_length_operand);
3453
+ __ jmp(&loop_1_condition);
3454
+ // Loop condition: while (index < array_length).
3455
+ __ bind(&loop_1);
3456
+ // Each iteration of the loop concatenates one string to the result.
3457
+ // Live values in registers:
3458
+ // index: which element of the elements array we are adding to the result.
3459
+ // result_pos: the position to which we are currently copying characters.
3460
+ // elements: the FixedArray of strings we are joining.
3461
+ // scratch: array length.
3462
+
3463
+ // Get string = array[index].
3464
+ __ movq(string, FieldOperand(elements, index,
3465
+ times_pointer_size,
3466
+ FixedArray::kHeaderSize));
3467
+ __ SmiToInteger32(string_length,
3468
+ FieldOperand(string, String::kLengthOffset));
3469
+ __ lea(string,
3470
+ FieldOperand(string, SeqAsciiString::kHeaderSize));
3471
+ __ CopyBytes(result_pos, string, string_length);
3472
+ __ incl(index);
3473
+ __ bind(&loop_1_condition);
3474
+ __ cmpl(index, scratch);
3475
+ __ j(less, &loop_1); // Loop while (index < array_length).
3476
+ __ jmp(&done);
3477
+
3478
+ // Generic bailout code used from several places.
3479
+ __ bind(&bailout);
3480
+ __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3481
+ __ jmp(&return_result);
3482
+
3483
+
3484
+ // One-character separator case
3485
+ __ bind(&one_char_separator);
3486
+ // Get the separator ascii character value.
3487
+ // Register "string" holds the separator.
3488
+ __ movzxbl(scratch, FieldOperand(string, SeqAsciiString::kHeaderSize));
3489
+ __ Set(index, 0);
3490
+ // Jump into the loop after the code that copies the separator, so the first
3491
+ // element is not preceded by a separator
3492
+ __ jmp(&loop_2_entry);
3493
+ // Loop condition: while (index < length).
3494
+ __ bind(&loop_2);
3495
+ // Each iteration of the loop concatenates one string to the result.
3496
+ // Live values in registers:
3497
+ // elements: The FixedArray of strings we are joining.
3498
+ // index: which element of the elements array we are adding to the result.
3499
+ // result_pos: the position to which we are currently copying characters.
3500
+ // scratch: Separator character.
3501
+
3502
+ // Copy the separator character to the result.
3503
+ __ movb(Operand(result_pos, 0), scratch);
3504
+ __ incq(result_pos);
3505
+
3506
+ __ bind(&loop_2_entry);
3507
+ // Get string = array[index].
3508
+ __ movq(string, FieldOperand(elements, index,
3509
+ times_pointer_size,
3510
+ FixedArray::kHeaderSize));
3511
+ __ SmiToInteger32(string_length,
3512
+ FieldOperand(string, String::kLengthOffset));
3513
+ __ lea(string,
3514
+ FieldOperand(string, SeqAsciiString::kHeaderSize));
3515
+ __ CopyBytes(result_pos, string, string_length);
3516
+ __ incl(index);
3517
+ __ cmpl(index, array_length_operand);
3518
+ __ j(less, &loop_2); // End while (index < length).
3519
+ __ jmp(&done);
3520
+
3521
+
3522
+ // Long separator case (separator is more than one character).
3523
+ __ bind(&long_separator);
3524
+
3525
+ // Make elements point to end of elements array, and index
3526
+ // count from -array_length to zero, so we don't need to maintain
3527
+ // a loop limit.
3528
+ __ movl(index, array_length_operand);
3529
+ __ lea(elements, FieldOperand(elements, index, times_pointer_size,
3530
+ FixedArray::kHeaderSize));
3531
+ __ neg(index);
3532
+
3533
+ // Replace separator string with pointer to its first character, and
3534
+ // make scratch be its length.
3535
+ __ movq(string, separator_operand);
3536
+ __ SmiToInteger32(scratch,
3537
+ FieldOperand(string, String::kLengthOffset));
3538
+ __ lea(string,
3539
+ FieldOperand(string, SeqAsciiString::kHeaderSize));
3540
+ __ movq(separator_operand, string);
3541
+
3542
+ // Jump into the loop after the code that copies the separator, so the first
3543
+ // element is not preceded by a separator
3544
+ __ jmp(&loop_3_entry);
3545
+ // Loop condition: while (index < length).
3546
+ __ bind(&loop_3);
3547
+ // Each iteration of the loop concatenates one string to the result.
3548
+ // Live values in registers:
3549
+ // index: which element of the elements array we are adding to the result.
3550
+ // result_pos: the position to which we are currently copying characters.
3551
+ // scratch: Separator length.
3552
+ // separator_operand (rsp[0x10]): Address of first char of separator.
3553
+
3554
+ // Copy the separator to the result.
3555
+ __ movq(string, separator_operand);
3556
+ __ movl(string_length, scratch);
3557
+ __ CopyBytes(result_pos, string, string_length, 2);
3558
+
3559
+ __ bind(&loop_3_entry);
3560
+ // Get string = array[index].
3561
+ __ movq(string, Operand(elements, index, times_pointer_size, 0));
3562
+ __ SmiToInteger32(string_length,
3563
+ FieldOperand(string, String::kLengthOffset));
3564
+ __ lea(string,
3565
+ FieldOperand(string, SeqAsciiString::kHeaderSize));
3566
+ __ CopyBytes(result_pos, string, string_length);
3567
+ __ incq(index);
3568
+ __ j(not_equal, &loop_3); // Loop while (index < 0).
3569
+
3570
+ __ bind(&done);
3571
+ __ movq(rax, result_operand);
3572
+
3573
+ __ bind(&return_result);
3574
+ // Drop temp values from the stack, and restore context register.
3575
+ __ addq(rsp, Immediate(3 * kPointerSize));
3576
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3577
+ context()->Plug(rax);
3578
+ }
3579
+
3580
+
3581
+ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3582
+ Handle<String> name = expr->name();
3583
+ if (name->length() > 0 && name->Get(0) == '_') {
3584
+ Comment cmnt(masm_, "[ InlineRuntimeCall");
3585
+ EmitInlineRuntimeCall(expr);
3586
+ return;
3587
+ }
3588
+
3589
+ Comment cmnt(masm_, "[ CallRuntime");
3590
+ ZoneList<Expression*>* args = expr->arguments();
3591
+
3592
+ if (expr->is_jsruntime()) {
3593
+ // Prepare for calling JS runtime function.
3594
+ __ movq(rax, GlobalObjectOperand());
3595
+ __ push(FieldOperand(rax, GlobalObject::kBuiltinsOffset));
3596
+ }
3597
+
3598
+ // Push the arguments ("left-to-right").
3599
+ int arg_count = args->length();
3600
+ for (int i = 0; i < arg_count; i++) {
3601
+ VisitForStackValue(args->at(i));
3602
+ }
3603
+
3604
+ if (expr->is_jsruntime()) {
3605
+ // Call the JS runtime function using a call IC.
3606
+ __ Move(rcx, expr->name());
3607
+ InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
3608
+ RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3609
+ Handle<Code> ic =
3610
+ ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop, mode);
3611
+ EmitCallIC(ic, mode, expr->id());
3612
+ // Restore context register.
3613
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3614
+ } else {
3615
+ __ CallRuntime(expr->function(), arg_count);
3616
+ }
3617
+ context()->Plug(rax);
3618
+ }
3619
+
3620
+
3621
+ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3622
+ switch (expr->op()) {
3623
+ case Token::DELETE: {
3624
+ Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3625
+ Property* prop = expr->expression()->AsProperty();
3626
+ Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
3627
+
3628
+ if (prop != NULL) {
3629
+ if (prop->is_synthetic()) {
3630
+ // Result of deleting parameters is false, even when they rewrite
3631
+ // to accesses on the arguments object.
3632
+ context()->Plug(false);
3633
+ } else {
3634
+ VisitForStackValue(prop->obj());
3635
+ VisitForStackValue(prop->key());
3636
+ __ Push(Smi::FromInt(strict_mode_flag()));
3637
+ __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3638
+ context()->Plug(rax);
3639
+ }
3640
+ } else if (var != NULL) {
3641
+ // Delete of an unqualified identifier is disallowed in strict mode
3642
+ // but "delete this" is.
3643
+ ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this());
3644
+ if (var->is_global()) {
3645
+ __ push(GlobalObjectOperand());
3646
+ __ Push(var->name());
3647
+ __ Push(Smi::FromInt(kNonStrictMode));
3648
+ __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3649
+ context()->Plug(rax);
3650
+ } else if (var->AsSlot() != NULL &&
3651
+ var->AsSlot()->type() != Slot::LOOKUP) {
3652
+ // Result of deleting non-global, non-dynamic variables is false.
3653
+ // The subexpression does not have side effects.
3654
+ context()->Plug(false);
3655
+ } else {
3656
+ // Non-global variable. Call the runtime to try to delete from the
3657
+ // context where the variable was introduced.
3658
+ __ push(context_register());
3659
+ __ Push(var->name());
3660
+ __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3661
+ context()->Plug(rax);
3662
+ }
3663
+ } else {
3664
+ // Result of deleting non-property, non-variable reference is true.
3665
+ // The subexpression may have side effects.
3666
+ VisitForEffect(expr->expression());
3667
+ context()->Plug(true);
3668
+ }
3669
+ break;
3670
+ }
3671
+
3672
+ case Token::VOID: {
3673
+ Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3674
+ VisitForEffect(expr->expression());
3675
+ context()->Plug(Heap::kUndefinedValueRootIndex);
3676
+ break;
3677
+ }
3678
+
3679
+ case Token::NOT: {
3680
+ Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3681
+ if (context()->IsEffect()) {
3682
+ // Unary NOT has no side effects so it's only necessary to visit the
3683
+ // subexpression. Match the optimizing compiler by not branching.
3684
+ VisitForEffect(expr->expression());
3685
+ } else {
3686
+ Label materialize_true, materialize_false;
3687
+ Label* if_true = NULL;
3688
+ Label* if_false = NULL;
3689
+ Label* fall_through = NULL;
3690
+ // Notice that the labels are swapped.
3691
+ context()->PrepareTest(&materialize_true, &materialize_false,
3692
+ &if_false, &if_true, &fall_through);
3693
+ if (context()->IsTest()) ForwardBailoutToChild(expr);
3694
+ VisitForControl(expr->expression(), if_true, if_false, fall_through);
3695
+ context()->Plug(if_false, if_true); // Labels swapped.
3696
+ }
3697
+ break;
3698
+ }
3699
+
3700
+ case Token::TYPEOF: {
3701
+ Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3702
+ { StackValueContext context(this);
3703
+ VisitForTypeofValue(expr->expression());
3704
+ }
3705
+ __ CallRuntime(Runtime::kTypeof, 1);
3706
+ context()->Plug(rax);
3707
+ break;
3708
+ }
3709
+
3710
+ case Token::ADD: {
3711
+ Comment cmt(masm_, "[ UnaryOperation (ADD)");
3712
+ VisitForAccumulatorValue(expr->expression());
3713
+ Label no_conversion;
3714
+ Condition is_smi = masm_->CheckSmi(result_register());
3715
+ __ j(is_smi, &no_conversion);
3716
+ ToNumberStub convert_stub;
3717
+ __ CallStub(&convert_stub);
3718
+ __ bind(&no_conversion);
3719
+ context()->Plug(result_register());
3720
+ break;
3721
+ }
3722
+
3723
+ case Token::SUB:
3724
+ EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
3725
+ break;
3726
+
3727
+ case Token::BIT_NOT:
3728
+ EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
3729
+ break;
3730
+
3731
+ default:
3732
+ UNREACHABLE();
3733
+ }
3734
+ }
3735
+
3736
+
3737
+ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
3738
+ const char* comment) {
3739
+ // TODO(svenpanne): Allowing format strings in Comment would be nice here...
3740
+ Comment cmt(masm_, comment);
3741
+ bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3742
+ UnaryOverwriteMode overwrite =
3743
+ can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3744
+ UnaryOpStub stub(expr->op(), overwrite);
3745
+ // UnaryOpStub expects the argument to be in the
3746
+ // accumulator register rax.
3747
+ VisitForAccumulatorValue(expr->expression());
3748
+ SetSourcePosition(expr->position());
3749
+ EmitCallIC(stub.GetCode(), NULL, expr->id());
3750
+ context()->Plug(rax);
3751
+ }
3752
+
3753
+
3754
+ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3755
+ Comment cmnt(masm_, "[ CountOperation");
3756
+ SetSourcePosition(expr->position());
3757
+
3758
+ // Invalid left-hand-sides are rewritten to have a 'throw
3759
+ // ReferenceError' as the left-hand side.
3760
+ if (!expr->expression()->IsValidLeftHandSide()) {
3761
+ VisitForEffect(expr->expression());
3762
+ return;
3763
+ }
3764
+
3765
+ // Expression can only be a property, a global or a (parameter or local)
3766
+ // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
3767
+ enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
3768
+ LhsKind assign_type = VARIABLE;
3769
+ Property* prop = expr->expression()->AsProperty();
3770
+ // In case of a property we use the uninitialized expression context
3771
+ // of the key to detect a named property.
3772
+ if (prop != NULL) {
3773
+ assign_type =
3774
+ (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
3775
+ }
3776
+
3777
+ // Evaluate expression and get value.
3778
+ if (assign_type == VARIABLE) {
3779
+ ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
3780
+ AccumulatorValueContext context(this);
3781
+ EmitVariableLoad(expr->expression()->AsVariableProxy()->var());
3782
+ } else {
3783
+ // Reserve space for result of postfix operation.
3784
+ if (expr->is_postfix() && !context()->IsEffect()) {
3785
+ __ Push(Smi::FromInt(0));
3786
+ }
3787
+ if (assign_type == NAMED_PROPERTY) {
3788
+ VisitForAccumulatorValue(prop->obj());
3789
+ __ push(rax); // Copy of receiver, needed for later store.
3790
+ EmitNamedPropertyLoad(prop);
3791
+ } else {
3792
+ if (prop->is_arguments_access()) {
3793
+ VariableProxy* obj_proxy = prop->obj()->AsVariableProxy();
3794
+ MemOperand slot_operand =
3795
+ EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx);
3796
+ __ push(slot_operand);
3797
+ __ Move(rax, prop->key()->AsLiteral()->handle());
3798
+ } else {
3799
+ VisitForStackValue(prop->obj());
3800
+ VisitForAccumulatorValue(prop->key());
3801
+ }
3802
+ __ movq(rdx, Operand(rsp, 0)); // Leave receiver on stack
3803
+ __ push(rax); // Copy of key, needed for later store.
3804
+ EmitKeyedPropertyLoad(prop);
3805
+ }
3806
+ }
3807
+
3808
+ // We need a second deoptimization point after loading the value
3809
+ // in case evaluating the property load my have a side effect.
3810
+ if (assign_type == VARIABLE) {
3811
+ PrepareForBailout(expr->expression(), TOS_REG);
3812
+ } else {
3813
+ PrepareForBailoutForId(expr->CountId(), TOS_REG);
3814
+ }
3815
+
3816
+ // Call ToNumber only if operand is not a smi.
3817
+ Label no_conversion;
3818
+ Condition is_smi;
3819
+ is_smi = masm_->CheckSmi(rax);
3820
+ __ j(is_smi, &no_conversion, Label::kNear);
3821
+ ToNumberStub convert_stub;
3822
+ __ CallStub(&convert_stub);
3823
+ __ bind(&no_conversion);
3824
+
3825
+ // Save result for postfix expressions.
3826
+ if (expr->is_postfix()) {
3827
+ if (!context()->IsEffect()) {
3828
+ // Save the result on the stack. If we have a named or keyed property
3829
+ // we store the result under the receiver that is currently on top
3830
+ // of the stack.
3831
+ switch (assign_type) {
3832
+ case VARIABLE:
3833
+ __ push(rax);
3834
+ break;
3835
+ case NAMED_PROPERTY:
3836
+ __ movq(Operand(rsp, kPointerSize), rax);
3837
+ break;
3838
+ case KEYED_PROPERTY:
3839
+ __ movq(Operand(rsp, 2 * kPointerSize), rax);
3840
+ break;
3841
+ }
3842
+ }
3843
+ }
3844
+
3845
+ // Inline smi case if we are in a loop.
3846
+ Label done, stub_call;
3847
+ JumpPatchSite patch_site(masm_);
3848
+
3849
+ if (ShouldInlineSmiCase(expr->op())) {
3850
+ if (expr->op() == Token::INC) {
3851
+ __ SmiAddConstant(rax, rax, Smi::FromInt(1));
3852
+ } else {
3853
+ __ SmiSubConstant(rax, rax, Smi::FromInt(1));
3854
+ }
3855
+ __ j(overflow, &stub_call, Label::kNear);
3856
+ // We could eliminate this smi check if we split the code at
3857
+ // the first smi check before calling ToNumber.
3858
+ patch_site.EmitJumpIfSmi(rax, &done, Label::kNear);
3859
+
3860
+ __ bind(&stub_call);
3861
+ // Call stub. Undo operation first.
3862
+ if (expr->op() == Token::INC) {
3863
+ __ SmiSubConstant(rax, rax, Smi::FromInt(1));
3864
+ } else {
3865
+ __ SmiAddConstant(rax, rax, Smi::FromInt(1));
3866
+ }
3867
+ }
3868
+
3869
+ // Record position before stub call.
3870
+ SetSourcePosition(expr->position());
3871
+
3872
+ // Call stub for +1/-1.
3873
+ BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
3874
+ if (expr->op() == Token::INC) {
3875
+ __ Move(rdx, Smi::FromInt(1));
3876
+ } else {
3877
+ __ movq(rdx, rax);
3878
+ __ Move(rax, Smi::FromInt(1));
3879
+ }
3880
+ EmitCallIC(stub.GetCode(), &patch_site, expr->CountId());
3881
+ __ bind(&done);
3882
+
3883
+ // Store the value returned in rax.
3884
+ switch (assign_type) {
3885
+ case VARIABLE:
3886
+ if (expr->is_postfix()) {
3887
+ // Perform the assignment as if via '='.
3888
+ { EffectContext context(this);
3889
+ EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3890
+ Token::ASSIGN);
3891
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3892
+ context.Plug(rax);
3893
+ }
3894
+ // For all contexts except kEffect: We have the result on
3895
+ // top of the stack.
3896
+ if (!context()->IsEffect()) {
3897
+ context()->PlugTOS();
3898
+ }
3899
+ } else {
3900
+ // Perform the assignment as if via '='.
3901
+ EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3902
+ Token::ASSIGN);
3903
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3904
+ context()->Plug(rax);
3905
+ }
3906
+ break;
3907
+ case NAMED_PROPERTY: {
3908
+ __ Move(rcx, prop->key()->AsLiteral()->handle());
3909
+ __ pop(rdx);
3910
+ Handle<Code> ic = is_strict_mode()
3911
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
3912
+ : isolate()->builtins()->StoreIC_Initialize();
3913
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
3914
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3915
+ if (expr->is_postfix()) {
3916
+ if (!context()->IsEffect()) {
3917
+ context()->PlugTOS();
3918
+ }
3919
+ } else {
3920
+ context()->Plug(rax);
3921
+ }
3922
+ break;
3923
+ }
3924
+ case KEYED_PROPERTY: {
3925
+ __ pop(rcx);
3926
+ __ pop(rdx);
3927
+ Handle<Code> ic = is_strict_mode()
3928
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3929
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
3930
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
3931
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3932
+ if (expr->is_postfix()) {
3933
+ if (!context()->IsEffect()) {
3934
+ context()->PlugTOS();
3935
+ }
3936
+ } else {
3937
+ context()->Plug(rax);
3938
+ }
3939
+ break;
3940
+ }
3941
+ }
3942
+ }
3943
+
3944
+
3945
+ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
3946
+ VariableProxy* proxy = expr->AsVariableProxy();
3947
+ ASSERT(!context()->IsEffect());
3948
+ ASSERT(!context()->IsTest());
3949
+
3950
+ if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) {
3951
+ Comment cmnt(masm_, "Global variable");
3952
+ __ Move(rcx, proxy->name());
3953
+ __ movq(rax, GlobalObjectOperand());
3954
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
3955
+ // Use a regular load, not a contextual load, to avoid a reference
3956
+ // error.
3957
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
3958
+ PrepareForBailout(expr, TOS_REG);
3959
+ context()->Plug(rax);
3960
+ } else if (proxy != NULL &&
3961
+ proxy->var()->AsSlot() != NULL &&
3962
+ proxy->var()->AsSlot()->type() == Slot::LOOKUP) {
3963
+ Label done, slow;
3964
+
3965
+ // Generate code for loading from variables potentially shadowed
3966
+ // by eval-introduced variables.
3967
+ Slot* slot = proxy->var()->AsSlot();
3968
+ EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done);
3969
+
3970
+ __ bind(&slow);
3971
+ __ push(rsi);
3972
+ __ Push(proxy->name());
3973
+ __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
3974
+ PrepareForBailout(expr, TOS_REG);
3975
+ __ bind(&done);
3976
+
3977
+ context()->Plug(rax);
3978
+ } else {
3979
+ // This expression cannot throw a reference error at the top level.
3980
+ context()->HandleExpression(expr);
3981
+ }
3982
+ }
3983
+
3984
+
3985
+ bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
3986
+ Expression* left,
3987
+ Expression* right,
3988
+ Label* if_true,
3989
+ Label* if_false,
3990
+ Label* fall_through) {
3991
+ if (op != Token::EQ && op != Token::EQ_STRICT) return false;
3992
+
3993
+ // Check for the pattern: typeof <expression> == <string literal>.
3994
+ Literal* right_literal = right->AsLiteral();
3995
+ if (right_literal == NULL) return false;
3996
+ Handle<Object> right_literal_value = right_literal->handle();
3997
+ if (!right_literal_value->IsString()) return false;
3998
+ UnaryOperation* left_unary = left->AsUnaryOperation();
3999
+ if (left_unary == NULL || left_unary->op() != Token::TYPEOF) return false;
4000
+ Handle<String> check = Handle<String>::cast(right_literal_value);
4001
+
4002
+ { AccumulatorValueContext context(this);
4003
+ VisitForTypeofValue(left_unary->expression());
4004
+ }
4005
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4006
+
4007
+ if (check->Equals(isolate()->heap()->number_symbol())) {
4008
+ __ JumpIfSmi(rax, if_true);
4009
+ __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
4010
+ __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
4011
+ Split(equal, if_true, if_false, fall_through);
4012
+ } else if (check->Equals(isolate()->heap()->string_symbol())) {
4013
+ __ JumpIfSmi(rax, if_false);
4014
+ // Check for undetectable objects => false.
4015
+ __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
4016
+ __ j(above_equal, if_false);
4017
+ __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4018
+ Immediate(1 << Map::kIsUndetectable));
4019
+ Split(zero, if_true, if_false, fall_through);
4020
+ } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4021
+ __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4022
+ __ j(equal, if_true);
4023
+ __ CompareRoot(rax, Heap::kFalseValueRootIndex);
4024
+ Split(equal, if_true, if_false, fall_through);
4025
+ } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4026
+ __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
4027
+ __ j(equal, if_true);
4028
+ __ JumpIfSmi(rax, if_false);
4029
+ // Check for undetectable objects => true.
4030
+ __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
4031
+ __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4032
+ Immediate(1 << Map::kIsUndetectable));
4033
+ Split(not_zero, if_true, if_false, fall_through);
4034
+ } else if (check->Equals(isolate()->heap()->function_symbol())) {
4035
+ __ JumpIfSmi(rax, if_false);
4036
+ __ CmpObjectType(rax, FIRST_FUNCTION_CLASS_TYPE, rdx);
4037
+ Split(above_equal, if_true, if_false, fall_through);
4038
+ } else if (check->Equals(isolate()->heap()->object_symbol())) {
4039
+ __ JumpIfSmi(rax, if_false);
4040
+ __ CompareRoot(rax, Heap::kNullValueRootIndex);
4041
+ __ j(equal, if_true);
4042
+ __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rdx);
4043
+ __ j(below, if_false);
4044
+ __ CmpInstanceType(rdx, FIRST_FUNCTION_CLASS_TYPE);
4045
+ __ j(above_equal, if_false);
4046
+ // Check for undetectable objects => false.
4047
+ __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4048
+ Immediate(1 << Map::kIsUndetectable));
4049
+ Split(zero, if_true, if_false, fall_through);
4050
+ } else {
4051
+ if (if_false != fall_through) __ jmp(if_false);
4052
+ }
4053
+
4054
+ return true;
4055
+ }
4056
+
4057
+
4058
+ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4059
+ Comment cmnt(masm_, "[ CompareOperation");
4060
+ SetSourcePosition(expr->position());
4061
+
4062
+ // Always perform the comparison for its control flow. Pack the result
4063
+ // into the expression's context after the comparison is performed.
4064
+ Label materialize_true, materialize_false;
4065
+ Label* if_true = NULL;
4066
+ Label* if_false = NULL;
4067
+ Label* fall_through = NULL;
4068
+ context()->PrepareTest(&materialize_true, &materialize_false,
4069
+ &if_true, &if_false, &fall_through);
4070
+
4071
+ // First we try a fast inlined version of the compare when one of
4072
+ // the operands is a literal.
4073
+ Token::Value op = expr->op();
4074
+ Expression* left = expr->left();
4075
+ Expression* right = expr->right();
4076
+ if (TryLiteralCompare(op, left, right, if_true, if_false, fall_through)) {
4077
+ context()->Plug(if_true, if_false);
4078
+ return;
4079
+ }
4080
+
4081
+ VisitForStackValue(expr->left());
4082
+ switch (op) {
4083
+ case Token::IN:
4084
+ VisitForStackValue(expr->right());
4085
+ __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4086
+ PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
4087
+ __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4088
+ Split(equal, if_true, if_false, fall_through);
4089
+ break;
4090
+
4091
+ case Token::INSTANCEOF: {
4092
+ VisitForStackValue(expr->right());
4093
+ InstanceofStub stub(InstanceofStub::kNoFlags);
4094
+ __ CallStub(&stub);
4095
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4096
+ __ testq(rax, rax);
4097
+ // The stub returns 0 for true.
4098
+ Split(zero, if_true, if_false, fall_through);
4099
+ break;
4100
+ }
4101
+
4102
+ default: {
4103
+ VisitForAccumulatorValue(expr->right());
4104
+ Condition cc = no_condition;
4105
+ bool strict = false;
4106
+ switch (op) {
4107
+ case Token::EQ_STRICT:
4108
+ strict = true;
4109
+ // Fall through.
4110
+ case Token::EQ:
4111
+ cc = equal;
4112
+ __ pop(rdx);
4113
+ break;
4114
+ case Token::LT:
4115
+ cc = less;
4116
+ __ pop(rdx);
4117
+ break;
4118
+ case Token::GT:
4119
+ // Reverse left and right sizes to obtain ECMA-262 conversion order.
4120
+ cc = less;
4121
+ __ movq(rdx, result_register());
4122
+ __ pop(rax);
4123
+ break;
4124
+ case Token::LTE:
4125
+ // Reverse left and right sizes to obtain ECMA-262 conversion order.
4126
+ cc = greater_equal;
4127
+ __ movq(rdx, result_register());
4128
+ __ pop(rax);
4129
+ break;
4130
+ case Token::GTE:
4131
+ cc = greater_equal;
4132
+ __ pop(rdx);
4133
+ break;
4134
+ case Token::IN:
4135
+ case Token::INSTANCEOF:
4136
+ default:
4137
+ UNREACHABLE();
4138
+ }
4139
+
4140
+ bool inline_smi_code = ShouldInlineSmiCase(op);
4141
+ JumpPatchSite patch_site(masm_);
4142
+ if (inline_smi_code) {
4143
+ Label slow_case;
4144
+ __ movq(rcx, rdx);
4145
+ __ or_(rcx, rax);
4146
+ patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
4147
+ __ cmpq(rdx, rax);
4148
+ Split(cc, if_true, if_false, NULL);
4149
+ __ bind(&slow_case);
4150
+ }
4151
+
4152
+ // Record position and call the compare IC.
4153
+ SetSourcePosition(expr->position());
4154
+ Handle<Code> ic = CompareIC::GetUninitialized(op);
4155
+ EmitCallIC(ic, &patch_site, expr->id());
4156
+
4157
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4158
+ __ testq(rax, rax);
4159
+ Split(cc, if_true, if_false, fall_through);
4160
+ }
4161
+ }
4162
+
4163
+ // Convert the result of the comparison into one expected for this
4164
+ // expression's context.
4165
+ context()->Plug(if_true, if_false);
4166
+ }
4167
+
4168
+
4169
+ void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) {
4170
+ Comment cmnt(masm_, "[ CompareToNull");
4171
+ Label materialize_true, materialize_false;
4172
+ Label* if_true = NULL;
4173
+ Label* if_false = NULL;
4174
+ Label* fall_through = NULL;
4175
+ context()->PrepareTest(&materialize_true, &materialize_false,
4176
+ &if_true, &if_false, &fall_through);
4177
+
4178
+ VisitForAccumulatorValue(expr->expression());
4179
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4180
+ __ CompareRoot(rax, Heap::kNullValueRootIndex);
4181
+ if (expr->is_strict()) {
4182
+ Split(equal, if_true, if_false, fall_through);
4183
+ } else {
4184
+ __ j(equal, if_true);
4185
+ __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
4186
+ __ j(equal, if_true);
4187
+ Condition is_smi = masm_->CheckSmi(rax);
4188
+ __ j(is_smi, if_false);
4189
+ // It can be an undetectable object.
4190
+ __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
4191
+ __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4192
+ Immediate(1 << Map::kIsUndetectable));
4193
+ Split(not_zero, if_true, if_false, fall_through);
4194
+ }
4195
+ context()->Plug(if_true, if_false);
4196
+ }
4197
+
4198
+
4199
+ void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4200
+ __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4201
+ context()->Plug(rax);
4202
+ }
4203
+
4204
+
4205
+ Register FullCodeGenerator::result_register() {
4206
+ return rax;
4207
+ }
4208
+
4209
+
4210
+ Register FullCodeGenerator::context_register() {
4211
+ return rsi;
4212
+ }
4213
+
4214
+
4215
+ void FullCodeGenerator::EmitCallIC(Handle<Code> ic,
4216
+ RelocInfo::Mode mode,
4217
+ unsigned ast_id) {
4218
+ ASSERT(mode == RelocInfo::CODE_TARGET ||
4219
+ mode == RelocInfo::CODE_TARGET_CONTEXT);
4220
+ Counters* counters = isolate()->counters();
4221
+ switch (ic->kind()) {
4222
+ case Code::LOAD_IC:
4223
+ __ IncrementCounter(counters->named_load_full(), 1);
4224
+ break;
4225
+ case Code::KEYED_LOAD_IC:
4226
+ __ IncrementCounter(counters->keyed_load_full(), 1);
4227
+ break;
4228
+ case Code::STORE_IC:
4229
+ __ IncrementCounter(counters->named_store_full(), 1);
4230
+ break;
4231
+ case Code::KEYED_STORE_IC:
4232
+ __ IncrementCounter(counters->keyed_store_full(), 1);
4233
+ default:
4234
+ break;
4235
+ }
4236
+ __ call(ic, mode, ast_id);
4237
+ }
4238
+
4239
+
4240
+ void FullCodeGenerator::EmitCallIC(Handle<Code> ic,
4241
+ JumpPatchSite* patch_site,
4242
+ unsigned ast_id) {
4243
+ Counters* counters = isolate()->counters();
4244
+ switch (ic->kind()) {
4245
+ case Code::LOAD_IC:
4246
+ __ IncrementCounter(counters->named_load_full(), 1);
4247
+ break;
4248
+ case Code::KEYED_LOAD_IC:
4249
+ __ IncrementCounter(counters->keyed_load_full(), 1);
4250
+ break;
4251
+ case Code::STORE_IC:
4252
+ __ IncrementCounter(counters->named_store_full(), 1);
4253
+ break;
4254
+ case Code::KEYED_STORE_IC:
4255
+ __ IncrementCounter(counters->keyed_store_full(), 1);
4256
+ default:
4257
+ break;
4258
+ }
4259
+ __ call(ic, RelocInfo::CODE_TARGET, ast_id);
4260
+ if (patch_site != NULL && patch_site->is_bound()) {
4261
+ patch_site->EmitPatchInfo();
4262
+ } else {
4263
+ __ nop(); // Signals no inlined code.
4264
+ }
4265
+ }
4266
+
4267
+
4268
+ void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4269
+ ASSERT(IsAligned(frame_offset, kPointerSize));
4270
+ __ movq(Operand(rbp, frame_offset), value);
4271
+ }
4272
+
4273
+
4274
+ void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4275
+ __ movq(dst, ContextOperand(rsi, context_index));
4276
+ }
4277
+
4278
+
4279
+ // ----------------------------------------------------------------------------
4280
+ // Non-local control flow support.
4281
+
4282
+
4283
+ void FullCodeGenerator::EnterFinallyBlock() {
4284
+ ASSERT(!result_register().is(rdx));
4285
+ ASSERT(!result_register().is(rcx));
4286
+ // Cook return address on top of stack (smi encoded Code* delta)
4287
+ __ movq(rdx, Operand(rsp, 0));
4288
+ __ Move(rcx, masm_->CodeObject());
4289
+ __ subq(rdx, rcx);
4290
+ __ Integer32ToSmi(rdx, rdx);
4291
+ __ movq(Operand(rsp, 0), rdx);
4292
+ // Store result register while executing finally block.
4293
+ __ push(result_register());
4294
+ }
4295
+
4296
+
4297
+ void FullCodeGenerator::ExitFinallyBlock() {
4298
+ ASSERT(!result_register().is(rdx));
4299
+ ASSERT(!result_register().is(rcx));
4300
+ // Restore result register from stack.
4301
+ __ pop(result_register());
4302
+ // Uncook return address.
4303
+ __ movq(rdx, Operand(rsp, 0));
4304
+ __ SmiToInteger32(rdx, rdx);
4305
+ __ Move(rcx, masm_->CodeObject());
4306
+ __ addq(rdx, rcx);
4307
+ __ movq(Operand(rsp, 0), rdx);
4308
+ // And return.
4309
+ __ ret(0);
4310
+ }
4311
+
4312
+
4313
+ #undef __
4314
+
4315
+
4316
+ } } // namespace v8::internal
4317
+
4318
+ #endif // V8_TARGET_ARCH_X64