therubyracer 0.8.1.pre2 → 0.8.1

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of therubyracer might be problematic. Click here for more details.

Files changed (877) hide show
  1. data/Changelog.md +2 -1
  2. data/README.md +6 -3
  3. data/ext/v8/upstream/3.1.8/.gitignore +31 -0
  4. data/ext/v8/upstream/3.1.8/AUTHORS +40 -0
  5. data/ext/v8/upstream/3.1.8/ChangeLog +2566 -0
  6. data/ext/v8/upstream/3.1.8/LICENSE +52 -0
  7. data/ext/v8/upstream/3.1.8/LICENSE.strongtalk +29 -0
  8. data/ext/v8/upstream/3.1.8/LICENSE.v8 +26 -0
  9. data/ext/v8/upstream/3.1.8/LICENSE.valgrind +45 -0
  10. data/ext/v8/upstream/3.1.8/SConstruct +1192 -0
  11. data/ext/v8/upstream/3.1.8/build/README.txt +25 -0
  12. data/ext/v8/upstream/3.1.8/build/all.gyp +18 -0
  13. data/ext/v8/upstream/3.1.8/build/armu.gypi +32 -0
  14. data/ext/v8/upstream/3.1.8/build/common.gypi +82 -0
  15. data/ext/v8/upstream/3.1.8/build/gyp_v8 +145 -0
  16. data/ext/v8/upstream/3.1.8/include/v8-debug.h +384 -0
  17. data/ext/v8/upstream/3.1.8/include/v8-preparser.h +116 -0
  18. data/ext/v8/upstream/3.1.8/include/v8-profiler.h +426 -0
  19. data/ext/v8/upstream/3.1.8/include/v8-testing.h +99 -0
  20. data/ext/v8/upstream/3.1.8/include/v8.h +3846 -0
  21. data/ext/v8/upstream/3.1.8/include/v8stdint.h +53 -0
  22. data/ext/v8/upstream/3.1.8/preparser/preparser-process.cc +206 -0
  23. data/ext/v8/upstream/3.1.8/src/SConscript +356 -0
  24. data/ext/v8/upstream/3.1.8/src/accessors.cc +907 -0
  25. data/ext/v8/upstream/3.1.8/src/accessors.h +121 -0
  26. data/ext/v8/upstream/3.1.8/src/allocation.cc +204 -0
  27. data/ext/v8/upstream/3.1.8/src/allocation.h +176 -0
  28. data/ext/v8/upstream/3.1.8/src/api.cc +5191 -0
  29. data/ext/v8/upstream/3.1.8/src/api.h +508 -0
  30. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/apinatives.js +0 -0
  31. data/ext/v8/upstream/3.1.8/src/apiutils.h +80 -0
  32. data/ext/v8/upstream/3.1.8/src/arguments.h +105 -0
  33. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm-inl.h +352 -0
  34. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.cc +2756 -0
  35. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.h +1294 -0
  36. data/ext/v8/upstream/3.1.8/src/arm/builtins-arm.cc +1628 -0
  37. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.cc +6783 -0
  38. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.h +657 -0
  39. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm-inl.h +48 -0
  40. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.cc +7403 -0
  41. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.h +595 -0
  42. data/ext/v8/upstream/3.1.8/src/arm/constants-arm.cc +152 -0
  43. data/ext/v8/upstream/3.1.8/src/arm/constants-arm.h +769 -0
  44. data/ext/v8/upstream/3.1.8/src/arm/cpu-arm.cc +147 -0
  45. data/ext/v8/upstream/3.1.8/src/arm/debug-arm.cc +315 -0
  46. data/ext/v8/upstream/3.1.8/src/arm/deoptimizer-arm.cc +700 -0
  47. data/ext/v8/upstream/3.1.8/src/arm/disasm-arm.cc +1439 -0
  48. data/ext/v8/upstream/3.1.8/src/arm/frames-arm.cc +45 -0
  49. data/ext/v8/upstream/3.1.8/src/arm/frames-arm.h +168 -0
  50. data/ext/v8/upstream/3.1.8/src/arm/full-codegen-arm.cc +4230 -0
  51. data/ext/v8/upstream/3.1.8/src/arm/ic-arm.cc +1799 -0
  52. data/ext/v8/upstream/3.1.8/src/arm/jump-target-arm.cc +174 -0
  53. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.cc +2041 -0
  54. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.h +2046 -0
  55. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.cc +3822 -0
  56. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.h +312 -0
  57. data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.cc +303 -0
  58. data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.h +84 -0
  59. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.cc +2701 -0
  60. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.h +1015 -0
  61. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.cc +1280 -0
  62. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.h +252 -0
  63. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm-inl.h +0 -0
  64. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm.cc +0 -0
  65. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm.h +0 -0
  66. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.cc +3165 -0
  67. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.h +402 -0
  68. data/ext/v8/upstream/3.1.8/src/arm/stub-cache-arm.cc +4077 -0
  69. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/virtual-frame-arm-inl.h +0 -0
  70. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.cc +843 -0
  71. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.h +520 -0
  72. data/ext/v8/upstream/3.1.8/src/array.js +1231 -0
  73. data/ext/v8/upstream/3.1.8/src/assembler.cc +973 -0
  74. data/ext/v8/upstream/3.1.8/src/assembler.h +787 -0
  75. data/ext/v8/upstream/3.1.8/src/ast-inl.h +107 -0
  76. data/ext/v8/upstream/3.1.8/src/ast.cc +1067 -0
  77. data/ext/v8/upstream/3.1.8/src/ast.h +2177 -0
  78. data/ext/v8/upstream/3.1.8/src/atomicops.h +165 -0
  79. data/ext/v8/upstream/3.1.8/src/atomicops_internals_arm_gcc.h +145 -0
  80. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_gcc.cc +126 -0
  81. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_gcc.h +287 -0
  82. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_macosx.h +301 -0
  83. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_msvc.h +203 -0
  84. data/ext/v8/upstream/3.1.8/src/bignum-dtoa.cc +655 -0
  85. data/ext/v8/upstream/3.1.8/src/bignum-dtoa.h +81 -0
  86. data/ext/v8/upstream/3.1.8/src/bignum.cc +768 -0
  87. data/ext/v8/upstream/3.1.8/src/bignum.h +140 -0
  88. data/ext/v8/upstream/3.1.8/src/bootstrapper.cc +1888 -0
  89. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/bootstrapper.h +0 -0
  90. data/ext/v8/upstream/3.1.8/src/builtins.cc +1586 -0
  91. data/ext/v8/upstream/3.1.8/src/builtins.h +339 -0
  92. data/ext/v8/upstream/3.1.8/src/bytecodes-irregexp.h +105 -0
  93. data/ext/v8/upstream/3.1.8/src/cached-powers.cc +177 -0
  94. data/ext/v8/upstream/3.1.8/src/cached-powers.h +65 -0
  95. data/ext/v8/upstream/3.1.8/src/char-predicates-inl.h +94 -0
  96. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/char-predicates.h +0 -0
  97. data/ext/v8/upstream/3.1.8/src/checks.cc +110 -0
  98. data/ext/v8/upstream/3.1.8/src/checks.h +292 -0
  99. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/circular-queue-inl.h +0 -0
  100. data/ext/v8/upstream/3.1.8/src/circular-queue.cc +122 -0
  101. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/circular-queue.h +0 -0
  102. data/ext/v8/upstream/3.1.8/src/code-stubs.cc +230 -0
  103. data/ext/v8/upstream/3.1.8/src/code-stubs.h +950 -0
  104. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/code.h +0 -0
  105. data/ext/v8/upstream/3.1.8/src/codegen-inl.h +64 -0
  106. data/ext/v8/upstream/3.1.8/src/codegen.cc +495 -0
  107. data/ext/v8/upstream/3.1.8/src/codegen.h +245 -0
  108. data/ext/v8/upstream/3.1.8/src/compilation-cache.cc +654 -0
  109. data/ext/v8/upstream/3.1.8/src/compilation-cache.h +112 -0
  110. data/ext/v8/upstream/3.1.8/src/compiler.cc +806 -0
  111. data/ext/v8/upstream/3.1.8/src/compiler.h +290 -0
  112. data/ext/v8/upstream/3.1.8/src/contexts.cc +320 -0
  113. data/ext/v8/upstream/3.1.8/src/contexts.h +376 -0
  114. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/conversions-inl.h +0 -0
  115. data/ext/v8/upstream/3.1.8/src/conversions.cc +1069 -0
  116. data/ext/v8/upstream/3.1.8/src/conversions.h +122 -0
  117. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/counters.cc +0 -0
  118. data/ext/v8/upstream/3.1.8/src/counters.h +242 -0
  119. data/ext/v8/upstream/3.1.8/src/cpu-profiler-inl.h +100 -0
  120. data/ext/v8/upstream/3.1.8/src/cpu-profiler.cc +554 -0
  121. data/ext/v8/upstream/3.1.8/src/cpu-profiler.h +291 -0
  122. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/cpu.h +0 -0
  123. data/ext/v8/upstream/3.1.8/src/d8-debug.cc +367 -0
  124. data/ext/v8/upstream/3.1.8/src/d8-debug.h +157 -0
  125. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-posix.cc +0 -0
  126. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-readline.cc +0 -0
  127. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-windows.cc +0 -0
  128. data/ext/v8/upstream/3.1.8/src/d8.cc +792 -0
  129. data/ext/v8/upstream/3.1.8/src/d8.gyp +85 -0
  130. data/ext/v8/upstream/3.1.8/src/d8.h +231 -0
  131. data/ext/v8/upstream/3.1.8/src/d8.js +2798 -0
  132. data/ext/v8/upstream/3.1.8/src/data-flow.cc +545 -0
  133. data/ext/v8/upstream/3.1.8/src/data-flow.h +379 -0
  134. data/ext/v8/upstream/3.1.8/src/date.js +1103 -0
  135. data/ext/v8/upstream/3.1.8/src/dateparser-inl.h +125 -0
  136. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/dateparser.cc +0 -0
  137. data/ext/v8/upstream/3.1.8/src/dateparser.h +263 -0
  138. data/ext/v8/upstream/3.1.8/src/debug-agent.cc +446 -0
  139. data/ext/v8/upstream/3.1.8/src/debug-agent.h +131 -0
  140. data/ext/v8/upstream/3.1.8/src/debug-debugger.js +2569 -0
  141. data/ext/v8/upstream/3.1.8/src/debug.cc +3085 -0
  142. data/ext/v8/upstream/3.1.8/src/debug.h +1025 -0
  143. data/ext/v8/upstream/3.1.8/src/deoptimizer.cc +1185 -0
  144. data/ext/v8/upstream/3.1.8/src/deoptimizer.h +529 -0
  145. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/disasm.h +0 -0
  146. data/ext/v8/upstream/3.1.8/src/disassembler.cc +338 -0
  147. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/disassembler.h +0 -0
  148. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/diy-fp.cc +0 -0
  149. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/diy-fp.h +0 -0
  150. data/ext/v8/upstream/3.1.8/src/double.h +238 -0
  151. data/ext/v8/upstream/3.1.8/src/dtoa.cc +103 -0
  152. data/ext/v8/upstream/3.1.8/src/dtoa.h +85 -0
  153. data/ext/v8/upstream/3.1.8/src/execution.cc +735 -0
  154. data/ext/v8/upstream/3.1.8/src/execution.h +322 -0
  155. data/ext/v8/upstream/3.1.8/src/extensions/experimental/experimental.gyp +53 -0
  156. data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.cc +264 -0
  157. data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.h +64 -0
  158. data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.cc +141 -0
  159. data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.h +50 -0
  160. data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.cc +58 -0
  161. data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.h +49 -0
  162. data/ext/v8/upstream/3.1.8/src/factory.cc +1087 -0
  163. data/ext/v8/upstream/3.1.8/src/factory.h +432 -0
  164. data/ext/v8/upstream/3.1.8/src/fast-dtoa.cc +736 -0
  165. data/ext/v8/upstream/3.1.8/src/fast-dtoa.h +83 -0
  166. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/fixed-dtoa.cc +0 -0
  167. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/fixed-dtoa.h +0 -0
  168. data/ext/v8/upstream/3.1.8/src/flag-definitions.h +552 -0
  169. data/ext/v8/upstream/3.1.8/src/flags.cc +551 -0
  170. data/ext/v8/upstream/3.1.8/src/flags.h +79 -0
  171. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/frame-element.cc +0 -0
  172. data/ext/v8/upstream/3.1.8/src/frame-element.h +277 -0
  173. data/ext/v8/upstream/3.1.8/src/frames-inl.h +210 -0
  174. data/ext/v8/upstream/3.1.8/src/frames.cc +1232 -0
  175. data/ext/v8/upstream/3.1.8/src/frames.h +826 -0
  176. data/ext/v8/upstream/3.1.8/src/full-codegen.cc +1382 -0
  177. data/ext/v8/upstream/3.1.8/src/full-codegen.h +751 -0
  178. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.cc +90 -0
  179. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.h +111 -0
  180. data/ext/v8/upstream/3.1.8/src/gdb-jit.cc +1547 -0
  181. data/ext/v8/upstream/3.1.8/src/gdb-jit.h +138 -0
  182. data/ext/v8/upstream/3.1.8/src/global-handles.cc +534 -0
  183. data/ext/v8/upstream/3.1.8/src/global-handles.h +181 -0
  184. data/ext/v8/upstream/3.1.8/src/globals.h +325 -0
  185. data/ext/v8/upstream/3.1.8/src/handles-inl.h +80 -0
  186. data/ext/v8/upstream/3.1.8/src/handles.cc +910 -0
  187. data/ext/v8/upstream/3.1.8/src/handles.h +424 -0
  188. data/ext/v8/upstream/3.1.8/src/hashmap.cc +230 -0
  189. data/ext/v8/upstream/3.1.8/src/hashmap.h +121 -0
  190. data/ext/v8/upstream/3.1.8/src/heap-inl.h +587 -0
  191. data/ext/v8/upstream/3.1.8/src/heap-profiler.cc +1128 -0
  192. data/ext/v8/upstream/3.1.8/src/heap-profiler.h +381 -0
  193. data/ext/v8/upstream/3.1.8/src/heap.cc +5610 -0
  194. data/ext/v8/upstream/3.1.8/src/heap.h +2218 -0
  195. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.cc +1490 -0
  196. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.h +3493 -0
  197. data/ext/v8/upstream/3.1.8/src/hydrogen.cc +6056 -0
  198. data/ext/v8/upstream/3.1.8/src/hydrogen.h +1091 -0
  199. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32-inl.h +429 -0
  200. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.cc +2800 -0
  201. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.h +1093 -0
  202. data/ext/v8/upstream/3.1.8/src/ia32/builtins-ia32.cc +1590 -0
  203. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.cc +6624 -0
  204. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.h +536 -0
  205. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/codegen-ia32-inl.h +0 -0
  206. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.cc +10354 -0
  207. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.h +798 -0
  208. data/ext/v8/upstream/3.1.8/src/ia32/cpu-ia32.cc +87 -0
  209. data/ext/v8/upstream/3.1.8/src/ia32/debug-ia32.cc +309 -0
  210. data/ext/v8/upstream/3.1.8/src/ia32/deoptimizer-ia32.cc +664 -0
  211. data/ext/v8/upstream/3.1.8/src/ia32/disasm-ia32.cc +1597 -0
  212. data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.cc +45 -0
  213. data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.h +140 -0
  214. data/ext/v8/upstream/3.1.8/src/ia32/full-codegen-ia32.cc +4278 -0
  215. data/ext/v8/upstream/3.1.8/src/ia32/ic-ia32.cc +1786 -0
  216. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/jump-target-ia32.cc +0 -0
  217. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.cc +3880 -0
  218. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.h +309 -0
  219. data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.cc +460 -0
  220. data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.h +110 -0
  221. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.cc +2095 -0
  222. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.h +2127 -0
  223. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.cc +2031 -0
  224. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.h +798 -0
  225. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.cc +1253 -0
  226. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.h +215 -0
  227. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32-inl.h +0 -0
  228. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32.cc +0 -0
  229. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32.h +0 -0
  230. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/simulator-ia32.cc +0 -0
  231. data/ext/v8/upstream/3.1.8/src/ia32/simulator-ia32.h +72 -0
  232. data/ext/v8/upstream/3.1.8/src/ia32/stub-cache-ia32.cc +3732 -0
  233. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.cc +1360 -0
  234. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.h +646 -0
  235. data/ext/v8/upstream/3.1.8/src/ic-inl.h +129 -0
  236. data/ext/v8/upstream/3.1.8/src/ic.cc +2333 -0
  237. data/ext/v8/upstream/3.1.8/src/ic.h +639 -0
  238. data/ext/v8/upstream/3.1.8/src/inspector.cc +63 -0
  239. data/ext/v8/upstream/3.1.8/src/inspector.h +62 -0
  240. data/ext/v8/upstream/3.1.8/src/interpreter-irregexp.cc +655 -0
  241. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/interpreter-irregexp.h +0 -0
  242. data/ext/v8/upstream/3.1.8/src/json.js +342 -0
  243. data/ext/v8/upstream/3.1.8/src/jsregexp.cc +5340 -0
  244. data/ext/v8/upstream/3.1.8/src/jsregexp.h +1484 -0
  245. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-heavy-inl.h +0 -0
  246. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.cc +430 -0
  247. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.h +244 -0
  248. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-inl.h +0 -0
  249. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-light-inl.h +0 -0
  250. data/ext/v8/upstream/3.1.8/src/jump-target-light.cc +111 -0
  251. data/ext/v8/upstream/3.1.8/src/jump-target-light.h +193 -0
  252. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target.cc +0 -0
  253. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target.h +0 -0
  254. data/ext/v8/upstream/3.1.8/src/list-inl.h +206 -0
  255. data/ext/v8/upstream/3.1.8/src/list.h +164 -0
  256. data/ext/v8/upstream/3.1.8/src/lithium-allocator-inl.h +140 -0
  257. data/ext/v8/upstream/3.1.8/src/lithium-allocator.cc +2093 -0
  258. data/ext/v8/upstream/3.1.8/src/lithium-allocator.h +644 -0
  259. data/ext/v8/upstream/3.1.8/src/lithium.cc +168 -0
  260. data/ext/v8/upstream/3.1.8/src/lithium.h +592 -0
  261. data/ext/v8/upstream/3.1.8/src/liveedit-debugger.js +1082 -0
  262. data/ext/v8/upstream/3.1.8/src/liveedit.cc +1650 -0
  263. data/ext/v8/upstream/3.1.8/src/liveedit.h +174 -0
  264. data/ext/v8/upstream/3.1.8/src/liveobjectlist-inl.h +126 -0
  265. data/ext/v8/upstream/3.1.8/src/liveobjectlist.cc +2527 -0
  266. data/ext/v8/upstream/3.1.8/src/liveobjectlist.h +322 -0
  267. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/log-inl.h +0 -0
  268. data/ext/v8/upstream/3.1.8/src/log-utils.cc +336 -0
  269. data/ext/v8/upstream/3.1.8/src/log-utils.h +232 -0
  270. data/ext/v8/upstream/3.1.8/src/log.cc +1608 -0
  271. data/ext/v8/upstream/3.1.8/src/log.h +379 -0
  272. data/ext/v8/upstream/3.1.8/src/macro-assembler.h +120 -0
  273. data/ext/v8/upstream/3.1.8/src/macros.py +178 -0
  274. data/ext/v8/upstream/3.1.8/src/mark-compact.cc +2957 -0
  275. data/ext/v8/upstream/3.1.8/src/mark-compact.h +433 -0
  276. data/ext/v8/upstream/3.1.8/src/math.js +264 -0
  277. data/ext/v8/upstream/3.1.8/src/memory.h +82 -0
  278. data/ext/v8/upstream/3.1.8/src/messages.cc +164 -0
  279. data/ext/v8/upstream/3.1.8/src/messages.h +114 -0
  280. data/ext/v8/upstream/3.1.8/src/messages.js +1071 -0
  281. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/assembler-mips-inl.h +0 -0
  282. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/assembler-mips.cc +0 -0
  283. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips.h +667 -0
  284. data/ext/v8/upstream/3.1.8/src/mips/builtins-mips.cc +205 -0
  285. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/codegen-mips-inl.h +0 -0
  286. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/codegen-mips.cc +0 -0
  287. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips.h +431 -0
  288. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/constants-mips.cc +0 -0
  289. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/constants-mips.h +0 -0
  290. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/cpu-mips.cc +0 -0
  291. data/ext/v8/upstream/3.1.8/src/mips/debug-mips.cc +127 -0
  292. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/disasm-mips.cc +0 -0
  293. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/fast-codegen-mips.cc +0 -0
  294. data/ext/v8/upstream/3.1.8/src/mips/frames-mips.cc +96 -0
  295. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/frames-mips.h +0 -0
  296. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/full-codegen-mips.cc +0 -0
  297. data/ext/v8/upstream/3.1.8/src/mips/ic-mips.cc +208 -0
  298. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/jump-target-mips.cc +0 -0
  299. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/macro-assembler-mips.cc +0 -0
  300. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/macro-assembler-mips.h +0 -0
  301. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips-inl.h +0 -0
  302. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips.cc +0 -0
  303. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips.h +0 -0
  304. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.cc +1650 -0
  305. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.h +311 -0
  306. data/ext/v8/upstream/3.1.8/src/mips/stub-cache-mips.cc +418 -0
  307. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/virtual-frame-mips.cc +0 -0
  308. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/virtual-frame-mips.h +0 -0
  309. data/ext/v8/upstream/3.1.8/src/mirror-debugger.js +2380 -0
  310. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mksnapshot.cc +0 -0
  311. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/natives.h +0 -0
  312. data/ext/v8/upstream/3.1.8/src/objects-debug.cc +722 -0
  313. data/ext/v8/upstream/3.1.8/src/objects-inl.h +3946 -0
  314. data/ext/v8/upstream/3.1.8/src/objects-printer.cc +801 -0
  315. data/ext/v8/upstream/3.1.8/src/objects-visiting.cc +142 -0
  316. data/ext/v8/upstream/3.1.8/src/objects-visiting.h +401 -0
  317. data/ext/v8/upstream/3.1.8/src/objects.cc +10044 -0
  318. data/ext/v8/upstream/3.1.8/src/objects.h +6571 -0
  319. data/ext/v8/upstream/3.1.8/src/parser.cc +5165 -0
  320. data/ext/v8/upstream/3.1.8/src/parser.h +802 -0
  321. data/ext/v8/upstream/3.1.8/src/platform-cygwin.cc +745 -0
  322. data/ext/v8/upstream/3.1.8/src/platform-freebsd.cc +702 -0
  323. data/ext/v8/upstream/3.1.8/src/platform-linux.cc +981 -0
  324. data/ext/v8/upstream/3.1.8/src/platform-macos.cc +732 -0
  325. data/ext/v8/upstream/3.1.8/src/platform-nullos.cc +498 -0
  326. data/ext/v8/upstream/3.1.8/src/platform-openbsd.cc +657 -0
  327. data/ext/v8/upstream/3.1.8/src/platform-posix.cc +399 -0
  328. data/ext/v8/upstream/3.1.8/src/platform-solaris.cc +714 -0
  329. data/ext/v8/upstream/3.1.8/src/platform-win32.cc +1974 -0
  330. data/ext/v8/upstream/3.1.8/src/platform.h +636 -0
  331. data/ext/v8/upstream/3.1.8/src/preparse-data.cc +183 -0
  332. data/ext/v8/upstream/3.1.8/src/preparse-data.h +249 -0
  333. data/ext/v8/upstream/3.1.8/src/preparser-api.cc +213 -0
  334. data/ext/v8/upstream/3.1.8/src/preparser.cc +1205 -0
  335. data/ext/v8/upstream/3.1.8/src/preparser.h +278 -0
  336. data/ext/v8/upstream/3.1.8/src/prettyprinter.cc +1539 -0
  337. data/ext/v8/upstream/3.1.8/src/prettyprinter.h +223 -0
  338. data/ext/v8/upstream/3.1.8/src/profile-generator-inl.h +128 -0
  339. data/ext/v8/upstream/3.1.8/src/profile-generator.cc +2899 -0
  340. data/ext/v8/upstream/3.1.8/src/profile-generator.h +1151 -0
  341. data/ext/v8/upstream/3.1.8/src/property.cc +96 -0
  342. data/ext/v8/upstream/3.1.8/src/property.h +337 -0
  343. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-macro-assembler-irregexp-inl.h +0 -0
  344. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.cc +470 -0
  345. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.h +142 -0
  346. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-tracer.cc +373 -0
  347. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-tracer.h +104 -0
  348. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.cc +257 -0
  349. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.h +231 -0
  350. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-stack.cc +0 -0
  351. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-stack.h +0 -0
  352. data/ext/v8/upstream/3.1.8/src/regexp.js +483 -0
  353. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator-inl.h +0 -0
  354. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator.cc +0 -0
  355. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator.h +0 -0
  356. data/ext/v8/upstream/3.1.8/src/rewriter.cc +1023 -0
  357. data/ext/v8/upstream/3.1.8/src/rewriter.h +59 -0
  358. data/ext/v8/upstream/3.1.8/src/runtime-profiler.cc +443 -0
  359. data/ext/v8/upstream/3.1.8/src/runtime-profiler.h +77 -0
  360. data/ext/v8/upstream/3.1.8/src/runtime.cc +11592 -0
  361. data/ext/v8/upstream/3.1.8/src/runtime.h +582 -0
  362. data/ext/v8/upstream/3.1.8/src/runtime.js +643 -0
  363. data/ext/v8/upstream/3.1.8/src/safepoint-table.cc +253 -0
  364. data/ext/v8/upstream/3.1.8/src/safepoint-table.h +263 -0
  365. data/ext/v8/upstream/3.1.8/src/scanner-base.cc +971 -0
  366. data/ext/v8/upstream/3.1.8/src/scanner-base.h +653 -0
  367. data/ext/v8/upstream/3.1.8/src/scanner.cc +586 -0
  368. data/ext/v8/upstream/3.1.8/src/scanner.h +194 -0
  369. data/ext/v8/upstream/3.1.8/src/scopeinfo.cc +636 -0
  370. data/ext/v8/upstream/3.1.8/src/scopeinfo.h +238 -0
  371. data/ext/v8/upstream/3.1.8/src/scopes.cc +1063 -0
  372. data/ext/v8/upstream/3.1.8/src/scopes.h +494 -0
  373. data/ext/v8/upstream/3.1.8/src/serialize.cc +1535 -0
  374. data/ext/v8/upstream/3.1.8/src/serialize.h +584 -0
  375. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/shell.h +0 -0
  376. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/simulator.h +0 -0
  377. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/smart-pointer.h +0 -0
  378. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot-common.cc +0 -0
  379. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot-empty.cc +0 -0
  380. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot.h +0 -0
  381. data/ext/v8/upstream/3.1.8/src/spaces-inl.h +524 -0
  382. data/ext/v8/upstream/3.1.8/src/spaces.cc +3254 -0
  383. data/ext/v8/upstream/3.1.8/src/spaces.h +2362 -0
  384. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/splay-tree-inl.h +0 -0
  385. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/splay-tree.h +0 -0
  386. data/ext/v8/upstream/3.1.8/src/string-search.cc +40 -0
  387. data/ext/v8/upstream/3.1.8/src/string-search.h +567 -0
  388. data/ext/v8/upstream/3.1.8/src/string-stream.cc +584 -0
  389. data/ext/v8/upstream/3.1.8/src/string-stream.h +191 -0
  390. data/ext/v8/upstream/3.1.8/src/string.js +915 -0
  391. data/ext/v8/upstream/3.1.8/src/strtod.cc +440 -0
  392. data/ext/v8/upstream/3.1.8/src/strtod.h +40 -0
  393. data/ext/v8/upstream/3.1.8/src/stub-cache.cc +1878 -0
  394. data/ext/v8/upstream/3.1.8/src/stub-cache.h +849 -0
  395. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/third_party/valgrind/valgrind.h +0 -0
  396. data/ext/v8/upstream/3.1.8/src/token.cc +63 -0
  397. data/ext/v8/upstream/3.1.8/src/token.h +288 -0
  398. data/ext/v8/upstream/3.1.8/src/top.cc +1152 -0
  399. data/ext/v8/upstream/3.1.8/src/top.h +608 -0
  400. data/ext/v8/upstream/3.1.8/src/type-info.cc +406 -0
  401. data/ext/v8/upstream/3.1.8/src/type-info.h +283 -0
  402. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/unbound-queue-inl.h +0 -0
  403. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/unbound-queue.h +0 -0
  404. data/ext/v8/upstream/3.1.8/src/unicode-inl.h +238 -0
  405. data/ext/v8/upstream/3.1.8/src/unicode.cc +1624 -0
  406. data/ext/v8/upstream/3.1.8/src/unicode.h +280 -0
  407. data/ext/v8/upstream/3.1.8/src/uri.js +402 -0
  408. data/ext/v8/upstream/3.1.8/src/utils.cc +371 -0
  409. data/ext/v8/upstream/3.1.8/src/utils.h +793 -0
  410. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/v8-counters.cc +0 -0
  411. data/ext/v8/upstream/3.1.8/src/v8-counters.h +290 -0
  412. data/ext/v8/upstream/3.1.8/src/v8.cc +270 -0
  413. data/ext/v8/upstream/3.1.8/src/v8.h +127 -0
  414. data/ext/v8/upstream/3.1.8/src/v8checks.h +64 -0
  415. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/v8dll-main.cc +0 -0
  416. data/ext/v8/upstream/3.1.8/src/v8globals.h +480 -0
  417. data/ext/v8/upstream/3.1.8/src/v8natives.js +1252 -0
  418. data/ext/v8/upstream/3.1.8/src/v8preparserdll-main.cc +39 -0
  419. data/ext/v8/upstream/3.1.8/src/v8threads.cc +440 -0
  420. data/ext/v8/upstream/3.1.8/src/v8threads.h +157 -0
  421. data/ext/v8/upstream/3.1.8/src/v8utils.h +354 -0
  422. data/ext/v8/upstream/3.1.8/src/variables.cc +132 -0
  423. data/ext/v8/upstream/3.1.8/src/variables.h +212 -0
  424. data/ext/v8/upstream/3.1.8/src/version.cc +95 -0
  425. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/version.h +0 -0
  426. data/ext/v8/upstream/3.1.8/src/virtual-frame-heavy-inl.h +190 -0
  427. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-heavy.cc +0 -0
  428. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-inl.h +0 -0
  429. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-light-inl.h +0 -0
  430. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-light.cc +0 -0
  431. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame.cc +0 -0
  432. data/ext/v8/upstream/3.1.8/src/virtual-frame.h +59 -0
  433. data/ext/v8/upstream/3.1.8/src/vm-state-inl.h +134 -0
  434. data/ext/v8/upstream/3.1.8/src/vm-state.h +68 -0
  435. data/ext/v8/upstream/3.1.8/src/win32-headers.h +95 -0
  436. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64-inl.h +455 -0
  437. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.cc +3162 -0
  438. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.h +1584 -0
  439. data/ext/v8/upstream/3.1.8/src/x64/builtins-x64.cc +1492 -0
  440. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.cc +5150 -0
  441. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.h +519 -0
  442. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64-inl.h +46 -0
  443. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.cc +8835 -0
  444. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.h +750 -0
  445. data/ext/v8/upstream/3.1.8/src/x64/cpu-x64.cc +86 -0
  446. data/ext/v8/upstream/3.1.8/src/x64/debug-x64.cc +316 -0
  447. data/ext/v8/upstream/3.1.8/src/x64/deoptimizer-x64.cc +781 -0
  448. data/ext/v8/upstream/3.1.8/src/x64/disasm-x64.cc +1737 -0
  449. data/ext/v8/upstream/3.1.8/src/x64/frames-x64.cc +45 -0
  450. data/ext/v8/upstream/3.1.8/src/x64/frames-x64.h +130 -0
  451. data/ext/v8/upstream/3.1.8/src/x64/full-codegen-x64.cc +3984 -0
  452. data/ext/v8/upstream/3.1.8/src/x64/ic-x64.cc +1761 -0
  453. data/ext/v8/upstream/3.1.8/src/x64/jump-target-x64.cc +437 -0
  454. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.cc +3639 -0
  455. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.h +305 -0
  456. data/ext/v8/upstream/3.1.8/src/x64/lithium-gap-resolver-x64.cc +320 -0
  457. data/ext/v8/upstream/3.1.8/src/x64/lithium-gap-resolver-x64.h +74 -0
  458. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.cc +2044 -0
  459. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.h +2052 -0
  460. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.cc +2660 -0
  461. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.h +1852 -0
  462. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.cc +1382 -0
  463. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.h +278 -0
  464. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64-inl.h +0 -0
  465. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64.cc +0 -0
  466. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64.h +0 -0
  467. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/simulator-x64.cc +0 -0
  468. data/ext/v8/upstream/3.1.8/src/x64/simulator-x64.h +71 -0
  469. data/ext/v8/upstream/3.1.8/src/x64/stub-cache-x64.cc +3509 -0
  470. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.cc +1292 -0
  471. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.h +593 -0
  472. data/ext/v8/upstream/3.1.8/src/zone-inl.h +83 -0
  473. data/ext/v8/upstream/3.1.8/src/zone.cc +195 -0
  474. data/ext/v8/upstream/3.1.8/src/zone.h +233 -0
  475. data/ext/v8/upstream/3.1.8/tools/codemap.js +265 -0
  476. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/consarray.js +0 -0
  477. data/ext/v8/upstream/3.1.8/tools/csvparser.js +78 -0
  478. data/ext/v8/upstream/3.1.8/tools/disasm.py +92 -0
  479. data/ext/v8/upstream/3.1.8/tools/gc-nvp-trace-processor.py +328 -0
  480. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/generate-ten-powers.scm +0 -0
  481. data/ext/v8/upstream/3.1.8/tools/grokdump.py +840 -0
  482. data/ext/v8/upstream/3.1.8/tools/gyp/v8.gyp +869 -0
  483. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/js2c.py +0 -0
  484. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/jsmin.py +0 -0
  485. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/linux-tick-processor +0 -0
  486. data/ext/v8/upstream/3.1.8/tools/ll_prof.py +919 -0
  487. data/ext/v8/upstream/3.1.8/tools/logreader.js +185 -0
  488. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/mac-nm +0 -0
  489. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/mac-tick-processor +0 -0
  490. data/ext/v8/upstream/3.1.8/tools/oom_dump/README +31 -0
  491. data/ext/v8/upstream/3.1.8/tools/oom_dump/SConstruct +42 -0
  492. data/ext/v8/upstream/3.1.8/tools/oom_dump/oom_dump.cc +288 -0
  493. data/ext/v8/upstream/3.1.8/tools/presubmit.py +305 -0
  494. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/process-heap-prof.py +0 -0
  495. data/ext/v8/upstream/3.1.8/tools/profile.js +751 -0
  496. data/ext/v8/upstream/3.1.8/tools/profile_view.js +219 -0
  497. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/run-valgrind.py +0 -0
  498. data/ext/v8/upstream/3.1.8/tools/splaytree.js +316 -0
  499. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/stats-viewer.py +0 -0
  500. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/tickprocessor-driver.js +0 -0
  501. data/ext/v8/upstream/3.1.8/tools/tickprocessor.js +863 -0
  502. data/ext/v8/upstream/3.1.8/tools/utils.py +96 -0
  503. data/ext/v8/upstream/3.1.8/tools/visual_studio/README.txt +70 -0
  504. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/arm.vsprops +0 -0
  505. data/ext/v8/upstream/3.1.8/tools/visual_studio/common.vsprops +34 -0
  506. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8.vcproj +0 -0
  507. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8_arm.vcproj +0 -0
  508. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8_x64.vcproj +0 -0
  509. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8js2c.cmd +0 -0
  510. data/ext/v8/upstream/3.1.8/tools/visual_studio/debug.vsprops +17 -0
  511. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/ia32.vsprops +0 -0
  512. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/js2c.cmd +0 -0
  513. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/release.vsprops +0 -0
  514. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8.sln +0 -0
  515. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8.vcproj +0 -0
  516. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_arm.sln +0 -0
  517. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_arm.vcproj +0 -0
  518. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base.vcproj +1296 -0
  519. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_arm.vcproj +1234 -0
  520. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_x64.vcproj +1296 -0
  521. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest.vcproj +0 -0
  522. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest_arm.vcproj +0 -0
  523. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest_x64.vcproj +0 -0
  524. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_mksnapshot.vcproj +0 -0
  525. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -0
  526. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample.vcproj +0 -0
  527. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample_arm.vcproj +0 -0
  528. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample_x64.vcproj +0 -0
  529. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample.vcproj +147 -0
  530. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample_arm.vcproj +147 -0
  531. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample_x64.vcproj +163 -0
  532. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot.vcproj +0 -0
  533. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_cc.vcproj +0 -0
  534. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -0
  535. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_x64.vcproj +0 -0
  536. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_x64.sln +0 -0
  537. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_x64.vcproj +0 -0
  538. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/x64.vsprops +0 -0
  539. data/ext/v8/upstream/3.1.8/tools/windows-tick-processor.bat +30 -0
  540. data/ext/v8/upstream/Makefile +2 -1
  541. data/ext/v8/v8_template.cpp +2 -2
  542. data/lib/v8/version.rb +1 -1
  543. data/spec/redjs/jsapi_spec.rb +2 -2
  544. metadata +552 -490
  545. data/ext/v8/upstream/2.3.3/.gitignore +0 -26
  546. data/ext/v8/upstream/2.3.3/AUTHORS +0 -31
  547. data/ext/v8/upstream/2.3.3/ChangeLog +0 -1916
  548. data/ext/v8/upstream/2.3.3/LICENSE +0 -55
  549. data/ext/v8/upstream/2.3.3/SConstruct +0 -1154
  550. data/ext/v8/upstream/2.3.3/include/v8-debug.h +0 -381
  551. data/ext/v8/upstream/2.3.3/include/v8-profiler.h +0 -353
  552. data/ext/v8/upstream/2.3.3/include/v8.h +0 -3616
  553. data/ext/v8/upstream/2.3.3/src/SConscript +0 -330
  554. data/ext/v8/upstream/2.3.3/src/accessors.cc +0 -661
  555. data/ext/v8/upstream/2.3.3/src/accessors.h +0 -114
  556. data/ext/v8/upstream/2.3.3/src/allocation.cc +0 -198
  557. data/ext/v8/upstream/2.3.3/src/allocation.h +0 -169
  558. data/ext/v8/upstream/2.3.3/src/api.cc +0 -4795
  559. data/ext/v8/upstream/2.3.3/src/api.h +0 -485
  560. data/ext/v8/upstream/2.3.3/src/apiutils.h +0 -69
  561. data/ext/v8/upstream/2.3.3/src/arguments.h +0 -96
  562. data/ext/v8/upstream/2.3.3/src/arm/assembler-arm-inl.h +0 -305
  563. data/ext/v8/upstream/2.3.3/src/arm/assembler-arm.cc +0 -2580
  564. data/ext/v8/upstream/2.3.3/src/arm/assembler-arm.h +0 -1275
  565. data/ext/v8/upstream/2.3.3/src/arm/builtins-arm.cc +0 -1320
  566. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm-inl.h +0 -48
  567. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm.cc +0 -11398
  568. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm.h +0 -1102
  569. data/ext/v8/upstream/2.3.3/src/arm/constants-arm.cc +0 -154
  570. data/ext/v8/upstream/2.3.3/src/arm/constants-arm.h +0 -388
  571. data/ext/v8/upstream/2.3.3/src/arm/cpu-arm.cc +0 -142
  572. data/ext/v8/upstream/2.3.3/src/arm/debug-arm.cc +0 -309
  573. data/ext/v8/upstream/2.3.3/src/arm/disasm-arm.cc +0 -1459
  574. data/ext/v8/upstream/2.3.3/src/arm/fast-codegen-arm.cc +0 -241
  575. data/ext/v8/upstream/2.3.3/src/arm/frames-arm.cc +0 -123
  576. data/ext/v8/upstream/2.3.3/src/arm/frames-arm.h +0 -162
  577. data/ext/v8/upstream/2.3.3/src/arm/full-codegen-arm.cc +0 -3178
  578. data/ext/v8/upstream/2.3.3/src/arm/ic-arm.cc +0 -2258
  579. data/ext/v8/upstream/2.3.3/src/arm/jump-target-arm.cc +0 -164
  580. data/ext/v8/upstream/2.3.3/src/arm/macro-assembler-arm.cc +0 -1892
  581. data/ext/v8/upstream/2.3.3/src/arm/macro-assembler-arm.h +0 -727
  582. data/ext/v8/upstream/2.3.3/src/arm/regexp-macro-assembler-arm.cc +0 -1261
  583. data/ext/v8/upstream/2.3.3/src/arm/regexp-macro-assembler-arm.h +0 -266
  584. data/ext/v8/upstream/2.3.3/src/arm/simulator-arm.cc +0 -2822
  585. data/ext/v8/upstream/2.3.3/src/arm/simulator-arm.h +0 -361
  586. data/ext/v8/upstream/2.3.3/src/arm/stub-cache-arm.cc +0 -2387
  587. data/ext/v8/upstream/2.3.3/src/arm/virtual-frame-arm.cc +0 -834
  588. data/ext/v8/upstream/2.3.3/src/arm/virtual-frame-arm.h +0 -519
  589. data/ext/v8/upstream/2.3.3/src/array.js +0 -1127
  590. data/ext/v8/upstream/2.3.3/src/assembler.cc +0 -801
  591. data/ext/v8/upstream/2.3.3/src/assembler.h +0 -573
  592. data/ext/v8/upstream/2.3.3/src/ast-inl.h +0 -81
  593. data/ext/v8/upstream/2.3.3/src/ast.cc +0 -1152
  594. data/ext/v8/upstream/2.3.3/src/ast.h +0 -2106
  595. data/ext/v8/upstream/2.3.3/src/bootstrapper.cc +0 -1819
  596. data/ext/v8/upstream/2.3.3/src/builtins.cc +0 -1529
  597. data/ext/v8/upstream/2.3.3/src/builtins.h +0 -263
  598. data/ext/v8/upstream/2.3.3/src/bytecodes-irregexp.h +0 -104
  599. data/ext/v8/upstream/2.3.3/src/cached-powers.h +0 -119
  600. data/ext/v8/upstream/2.3.3/src/char-predicates-inl.h +0 -86
  601. data/ext/v8/upstream/2.3.3/src/checks.cc +0 -100
  602. data/ext/v8/upstream/2.3.3/src/checks.h +0 -310
  603. data/ext/v8/upstream/2.3.3/src/circular-queue.cc +0 -121
  604. data/ext/v8/upstream/2.3.3/src/code-stubs.cc +0 -177
  605. data/ext/v8/upstream/2.3.3/src/code-stubs.h +0 -177
  606. data/ext/v8/upstream/2.3.3/src/codegen-inl.h +0 -60
  607. data/ext/v8/upstream/2.3.3/src/codegen.cc +0 -516
  608. data/ext/v8/upstream/2.3.3/src/codegen.h +0 -897
  609. data/ext/v8/upstream/2.3.3/src/compilation-cache.cc +0 -562
  610. data/ext/v8/upstream/2.3.3/src/compilation-cache.h +0 -102
  611. data/ext/v8/upstream/2.3.3/src/compiler.cc +0 -654
  612. data/ext/v8/upstream/2.3.3/src/compiler.h +0 -299
  613. data/ext/v8/upstream/2.3.3/src/contexts.cc +0 -256
  614. data/ext/v8/upstream/2.3.3/src/contexts.h +0 -342
  615. data/ext/v8/upstream/2.3.3/src/conversions.cc +0 -1119
  616. data/ext/v8/upstream/2.3.3/src/conversions.h +0 -123
  617. data/ext/v8/upstream/2.3.3/src/counters.h +0 -239
  618. data/ext/v8/upstream/2.3.3/src/cpu-profiler-inl.h +0 -100
  619. data/ext/v8/upstream/2.3.3/src/cpu-profiler.cc +0 -538
  620. data/ext/v8/upstream/2.3.3/src/cpu-profiler.h +0 -285
  621. data/ext/v8/upstream/2.3.3/src/d8-debug.cc +0 -356
  622. data/ext/v8/upstream/2.3.3/src/d8-debug.h +0 -155
  623. data/ext/v8/upstream/2.3.3/src/d8.cc +0 -783
  624. data/ext/v8/upstream/2.3.3/src/d8.h +0 -227
  625. data/ext/v8/upstream/2.3.3/src/d8.js +0 -1683
  626. data/ext/v8/upstream/2.3.3/src/data-flow.cc +0 -758
  627. data/ext/v8/upstream/2.3.3/src/data-flow.h +0 -278
  628. data/ext/v8/upstream/2.3.3/src/date.js +0 -1059
  629. data/ext/v8/upstream/2.3.3/src/dateparser-inl.h +0 -123
  630. data/ext/v8/upstream/2.3.3/src/dateparser.h +0 -244
  631. data/ext/v8/upstream/2.3.3/src/debug-agent.cc +0 -427
  632. data/ext/v8/upstream/2.3.3/src/debug-agent.h +0 -129
  633. data/ext/v8/upstream/2.3.3/src/debug-debugger.js +0 -2227
  634. data/ext/v8/upstream/2.3.3/src/debug.cc +0 -3005
  635. data/ext/v8/upstream/2.3.3/src/debug.h +0 -993
  636. data/ext/v8/upstream/2.3.3/src/disassembler.cc +0 -312
  637. data/ext/v8/upstream/2.3.3/src/double.h +0 -169
  638. data/ext/v8/upstream/2.3.3/src/dtoa-config.c +0 -92
  639. data/ext/v8/upstream/2.3.3/src/dtoa.cc +0 -77
  640. data/ext/v8/upstream/2.3.3/src/dtoa.h +0 -81
  641. data/ext/v8/upstream/2.3.3/src/execution.cc +0 -809
  642. data/ext/v8/upstream/2.3.3/src/execution.h +0 -336
  643. data/ext/v8/upstream/2.3.3/src/factory.cc +0 -1003
  644. data/ext/v8/upstream/2.3.3/src/factory.h +0 -410
  645. data/ext/v8/upstream/2.3.3/src/fast-codegen.cc +0 -746
  646. data/ext/v8/upstream/2.3.3/src/fast-codegen.h +0 -161
  647. data/ext/v8/upstream/2.3.3/src/fast-dtoa.cc +0 -505
  648. data/ext/v8/upstream/2.3.3/src/fast-dtoa.h +0 -58
  649. data/ext/v8/upstream/2.3.3/src/flag-definitions.h +0 -455
  650. data/ext/v8/upstream/2.3.3/src/flags.cc +0 -551
  651. data/ext/v8/upstream/2.3.3/src/flags.h +0 -81
  652. data/ext/v8/upstream/2.3.3/src/flow-graph.cc +0 -763
  653. data/ext/v8/upstream/2.3.3/src/flow-graph.h +0 -180
  654. data/ext/v8/upstream/2.3.3/src/frame-element.h +0 -273
  655. data/ext/v8/upstream/2.3.3/src/frames-inl.h +0 -217
  656. data/ext/v8/upstream/2.3.3/src/frames.cc +0 -826
  657. data/ext/v8/upstream/2.3.3/src/frames.h +0 -682
  658. data/ext/v8/upstream/2.3.3/src/full-codegen.cc +0 -1443
  659. data/ext/v8/upstream/2.3.3/src/full-codegen.h +0 -548
  660. data/ext/v8/upstream/2.3.3/src/func-name-inferrer.cc +0 -76
  661. data/ext/v8/upstream/2.3.3/src/func-name-inferrer.h +0 -135
  662. data/ext/v8/upstream/2.3.3/src/global-handles.cc +0 -520
  663. data/ext/v8/upstream/2.3.3/src/global-handles.h +0 -180
  664. data/ext/v8/upstream/2.3.3/src/globals.h +0 -669
  665. data/ext/v8/upstream/2.3.3/src/handles-inl.h +0 -76
  666. data/ext/v8/upstream/2.3.3/src/handles.cc +0 -825
  667. data/ext/v8/upstream/2.3.3/src/handles.h +0 -393
  668. data/ext/v8/upstream/2.3.3/src/hashmap.cc +0 -226
  669. data/ext/v8/upstream/2.3.3/src/hashmap.h +0 -120
  670. data/ext/v8/upstream/2.3.3/src/heap-inl.h +0 -493
  671. data/ext/v8/upstream/2.3.3/src/heap-profiler.cc +0 -779
  672. data/ext/v8/upstream/2.3.3/src/heap-profiler.h +0 -323
  673. data/ext/v8/upstream/2.3.3/src/heap.cc +0 -4994
  674. data/ext/v8/upstream/2.3.3/src/heap.h +0 -1984
  675. data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32-inl.h +0 -360
  676. data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32.cc +0 -2600
  677. data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32.h +0 -969
  678. data/ext/v8/upstream/2.3.3/src/ia32/builtins-ia32.cc +0 -1261
  679. data/ext/v8/upstream/2.3.3/src/ia32/codegen-ia32.cc +0 -13968
  680. data/ext/v8/upstream/2.3.3/src/ia32/codegen-ia32.h +0 -1097
  681. data/ext/v8/upstream/2.3.3/src/ia32/cpu-ia32.cc +0 -83
  682. data/ext/v8/upstream/2.3.3/src/ia32/debug-ia32.cc +0 -309
  683. data/ext/v8/upstream/2.3.3/src/ia32/disasm-ia32.cc +0 -1471
  684. data/ext/v8/upstream/2.3.3/src/ia32/fast-codegen-ia32.cc +0 -954
  685. data/ext/v8/upstream/2.3.3/src/ia32/fast-codegen-ia32.h +0 -155
  686. data/ext/v8/upstream/2.3.3/src/ia32/frames-ia32.cc +0 -115
  687. data/ext/v8/upstream/2.3.3/src/ia32/frames-ia32.h +0 -135
  688. data/ext/v8/upstream/2.3.3/src/ia32/full-codegen-ia32.cc +0 -3281
  689. data/ext/v8/upstream/2.3.3/src/ia32/ic-ia32.cc +0 -1966
  690. data/ext/v8/upstream/2.3.3/src/ia32/macro-assembler-ia32.cc +0 -1610
  691. data/ext/v8/upstream/2.3.3/src/ia32/macro-assembler-ia32.h +0 -610
  692. data/ext/v8/upstream/2.3.3/src/ia32/regexp-macro-assembler-ia32.cc +0 -1247
  693. data/ext/v8/upstream/2.3.3/src/ia32/regexp-macro-assembler-ia32.h +0 -214
  694. data/ext/v8/upstream/2.3.3/src/ia32/simulator-ia32.h +0 -62
  695. data/ext/v8/upstream/2.3.3/src/ia32/stub-cache-ia32.cc +0 -2750
  696. data/ext/v8/upstream/2.3.3/src/ia32/virtual-frame-ia32.cc +0 -1334
  697. data/ext/v8/upstream/2.3.3/src/ia32/virtual-frame-ia32.h +0 -627
  698. data/ext/v8/upstream/2.3.3/src/ic-inl.h +0 -120
  699. data/ext/v8/upstream/2.3.3/src/ic.cc +0 -1827
  700. data/ext/v8/upstream/2.3.3/src/ic.h +0 -515
  701. data/ext/v8/upstream/2.3.3/src/interpreter-irregexp.cc +0 -646
  702. data/ext/v8/upstream/2.3.3/src/json.js +0 -268
  703. data/ext/v8/upstream/2.3.3/src/jsregexp.cc +0 -5283
  704. data/ext/v8/upstream/2.3.3/src/jsregexp.h +0 -1463
  705. data/ext/v8/upstream/2.3.3/src/jump-target-heavy.cc +0 -429
  706. data/ext/v8/upstream/2.3.3/src/jump-target-heavy.h +0 -244
  707. data/ext/v8/upstream/2.3.3/src/jump-target-light.cc +0 -110
  708. data/ext/v8/upstream/2.3.3/src/jump-target-light.h +0 -192
  709. data/ext/v8/upstream/2.3.3/src/list-inl.h +0 -166
  710. data/ext/v8/upstream/2.3.3/src/list.h +0 -159
  711. data/ext/v8/upstream/2.3.3/src/liveedit-debugger.js +0 -1057
  712. data/ext/v8/upstream/2.3.3/src/liveedit.cc +0 -1480
  713. data/ext/v8/upstream/2.3.3/src/liveedit.h +0 -170
  714. data/ext/v8/upstream/2.3.3/src/log-utils.cc +0 -497
  715. data/ext/v8/upstream/2.3.3/src/log-utils.h +0 -289
  716. data/ext/v8/upstream/2.3.3/src/log.cc +0 -1561
  717. data/ext/v8/upstream/2.3.3/src/log.h +0 -384
  718. data/ext/v8/upstream/2.3.3/src/macro-assembler.h +0 -86
  719. data/ext/v8/upstream/2.3.3/src/macros.py +0 -177
  720. data/ext/v8/upstream/2.3.3/src/mark-compact.cc +0 -2330
  721. data/ext/v8/upstream/2.3.3/src/mark-compact.h +0 -451
  722. data/ext/v8/upstream/2.3.3/src/math.js +0 -264
  723. data/ext/v8/upstream/2.3.3/src/memory.h +0 -74
  724. data/ext/v8/upstream/2.3.3/src/messages.cc +0 -183
  725. data/ext/v8/upstream/2.3.3/src/messages.h +0 -113
  726. data/ext/v8/upstream/2.3.3/src/messages.js +0 -982
  727. data/ext/v8/upstream/2.3.3/src/mips/assembler-mips.h +0 -668
  728. data/ext/v8/upstream/2.3.3/src/mips/builtins-mips.cc +0 -205
  729. data/ext/v8/upstream/2.3.3/src/mips/codegen-mips.h +0 -434
  730. data/ext/v8/upstream/2.3.3/src/mips/debug-mips.cc +0 -131
  731. data/ext/v8/upstream/2.3.3/src/mips/frames-mips.cc +0 -102
  732. data/ext/v8/upstream/2.3.3/src/mips/ic-mips.cc +0 -220
  733. data/ext/v8/upstream/2.3.3/src/mips/simulator-mips.cc +0 -1651
  734. data/ext/v8/upstream/2.3.3/src/mips/simulator-mips.h +0 -311
  735. data/ext/v8/upstream/2.3.3/src/mips/stub-cache-mips.cc +0 -403
  736. data/ext/v8/upstream/2.3.3/src/mirror-debugger.js +0 -2380
  737. data/ext/v8/upstream/2.3.3/src/objects-debug.cc +0 -1366
  738. data/ext/v8/upstream/2.3.3/src/objects-inl.h +0 -3333
  739. data/ext/v8/upstream/2.3.3/src/objects.cc +0 -8820
  740. data/ext/v8/upstream/2.3.3/src/objects.h +0 -5373
  741. data/ext/v8/upstream/2.3.3/src/oprofile-agent.cc +0 -108
  742. data/ext/v8/upstream/2.3.3/src/oprofile-agent.h +0 -77
  743. data/ext/v8/upstream/2.3.3/src/parser.cc +0 -5207
  744. data/ext/v8/upstream/2.3.3/src/parser.h +0 -197
  745. data/ext/v8/upstream/2.3.3/src/platform-freebsd.cc +0 -667
  746. data/ext/v8/upstream/2.3.3/src/platform-linux.cc +0 -862
  747. data/ext/v8/upstream/2.3.3/src/platform-macos.cc +0 -665
  748. data/ext/v8/upstream/2.3.3/src/platform-nullos.cc +0 -454
  749. data/ext/v8/upstream/2.3.3/src/platform-openbsd.cc +0 -622
  750. data/ext/v8/upstream/2.3.3/src/platform-posix.cc +0 -362
  751. data/ext/v8/upstream/2.3.3/src/platform-solaris.cc +0 -653
  752. data/ext/v8/upstream/2.3.3/src/platform-win32.cc +0 -1911
  753. data/ext/v8/upstream/2.3.3/src/platform.h +0 -577
  754. data/ext/v8/upstream/2.3.3/src/powers-ten.h +0 -2461
  755. data/ext/v8/upstream/2.3.3/src/prettyprinter.cc +0 -1531
  756. data/ext/v8/upstream/2.3.3/src/prettyprinter.h +0 -221
  757. data/ext/v8/upstream/2.3.3/src/profile-generator-inl.h +0 -148
  758. data/ext/v8/upstream/2.3.3/src/profile-generator.cc +0 -1830
  759. data/ext/v8/upstream/2.3.3/src/profile-generator.h +0 -853
  760. data/ext/v8/upstream/2.3.3/src/property.cc +0 -96
  761. data/ext/v8/upstream/2.3.3/src/property.h +0 -315
  762. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-irregexp.cc +0 -464
  763. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-irregexp.h +0 -141
  764. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-tracer.cc +0 -356
  765. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-tracer.h +0 -103
  766. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler.cc +0 -261
  767. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler.h +0 -228
  768. data/ext/v8/upstream/2.3.3/src/regexp.js +0 -549
  769. data/ext/v8/upstream/2.3.3/src/rewriter.cc +0 -1038
  770. data/ext/v8/upstream/2.3.3/src/rewriter.h +0 -54
  771. data/ext/v8/upstream/2.3.3/src/runtime.cc +0 -10599
  772. data/ext/v8/upstream/2.3.3/src/runtime.h +0 -459
  773. data/ext/v8/upstream/2.3.3/src/runtime.js +0 -629
  774. data/ext/v8/upstream/2.3.3/src/scanner.cc +0 -1346
  775. data/ext/v8/upstream/2.3.3/src/scanner.h +0 -503
  776. data/ext/v8/upstream/2.3.3/src/scopeinfo.cc +0 -637
  777. data/ext/v8/upstream/2.3.3/src/scopeinfo.h +0 -233
  778. data/ext/v8/upstream/2.3.3/src/scopes.cc +0 -962
  779. data/ext/v8/upstream/2.3.3/src/scopes.h +0 -400
  780. data/ext/v8/upstream/2.3.3/src/serialize.cc +0 -1461
  781. data/ext/v8/upstream/2.3.3/src/serialize.h +0 -581
  782. data/ext/v8/upstream/2.3.3/src/spaces-inl.h +0 -483
  783. data/ext/v8/upstream/2.3.3/src/spaces.cc +0 -2901
  784. data/ext/v8/upstream/2.3.3/src/spaces.h +0 -2197
  785. data/ext/v8/upstream/2.3.3/src/string-stream.cc +0 -584
  786. data/ext/v8/upstream/2.3.3/src/string-stream.h +0 -189
  787. data/ext/v8/upstream/2.3.3/src/string.js +0 -1006
  788. data/ext/v8/upstream/2.3.3/src/stub-cache.cc +0 -1379
  789. data/ext/v8/upstream/2.3.3/src/stub-cache.h +0 -756
  790. data/ext/v8/upstream/2.3.3/src/third_party/dtoa/COPYING +0 -15
  791. data/ext/v8/upstream/2.3.3/src/third_party/dtoa/dtoa.c +0 -3334
  792. data/ext/v8/upstream/2.3.3/src/token.cc +0 -56
  793. data/ext/v8/upstream/2.3.3/src/token.h +0 -270
  794. data/ext/v8/upstream/2.3.3/src/top.cc +0 -1067
  795. data/ext/v8/upstream/2.3.3/src/top.h +0 -463
  796. data/ext/v8/upstream/2.3.3/src/type-info.cc +0 -53
  797. data/ext/v8/upstream/2.3.3/src/type-info.h +0 -244
  798. data/ext/v8/upstream/2.3.3/src/unicode-inl.h +0 -238
  799. data/ext/v8/upstream/2.3.3/src/unicode.cc +0 -749
  800. data/ext/v8/upstream/2.3.3/src/unicode.h +0 -279
  801. data/ext/v8/upstream/2.3.3/src/uri.js +0 -415
  802. data/ext/v8/upstream/2.3.3/src/utils.cc +0 -285
  803. data/ext/v8/upstream/2.3.3/src/utils.h +0 -745
  804. data/ext/v8/upstream/2.3.3/src/v8-counters.h +0 -250
  805. data/ext/v8/upstream/2.3.3/src/v8.cc +0 -228
  806. data/ext/v8/upstream/2.3.3/src/v8.h +0 -121
  807. data/ext/v8/upstream/2.3.3/src/v8natives.js +0 -1188
  808. data/ext/v8/upstream/2.3.3/src/v8threads.cc +0 -461
  809. data/ext/v8/upstream/2.3.3/src/v8threads.h +0 -159
  810. data/ext/v8/upstream/2.3.3/src/variables.cc +0 -119
  811. data/ext/v8/upstream/2.3.3/src/variables.h +0 -205
  812. data/ext/v8/upstream/2.3.3/src/version.cc +0 -88
  813. data/ext/v8/upstream/2.3.3/src/virtual-frame-heavy-inl.h +0 -192
  814. data/ext/v8/upstream/2.3.3/src/virtual-frame.h +0 -46
  815. data/ext/v8/upstream/2.3.3/src/vm-state-inl.h +0 -137
  816. data/ext/v8/upstream/2.3.3/src/vm-state.cc +0 -39
  817. data/ext/v8/upstream/2.3.3/src/vm-state.h +0 -77
  818. data/ext/v8/upstream/2.3.3/src/x64/assembler-x64-inl.h +0 -400
  819. data/ext/v8/upstream/2.3.3/src/x64/assembler-x64.cc +0 -2963
  820. data/ext/v8/upstream/2.3.3/src/x64/assembler-x64.h +0 -1438
  821. data/ext/v8/upstream/2.3.3/src/x64/builtins-x64.cc +0 -1296
  822. data/ext/v8/upstream/2.3.3/src/x64/codegen-x64-inl.h +0 -46
  823. data/ext/v8/upstream/2.3.3/src/x64/codegen-x64.cc +0 -12491
  824. data/ext/v8/upstream/2.3.3/src/x64/codegen-x64.h +0 -1090
  825. data/ext/v8/upstream/2.3.3/src/x64/cpu-x64.cc +0 -83
  826. data/ext/v8/upstream/2.3.3/src/x64/debug-x64.cc +0 -267
  827. data/ext/v8/upstream/2.3.3/src/x64/disasm-x64.cc +0 -1696
  828. data/ext/v8/upstream/2.3.3/src/x64/fast-codegen-x64.cc +0 -250
  829. data/ext/v8/upstream/2.3.3/src/x64/frames-x64.cc +0 -113
  830. data/ext/v8/upstream/2.3.3/src/x64/frames-x64.h +0 -125
  831. data/ext/v8/upstream/2.3.3/src/x64/full-codegen-x64.cc +0 -3270
  832. data/ext/v8/upstream/2.3.3/src/x64/ic-x64.cc +0 -1907
  833. data/ext/v8/upstream/2.3.3/src/x64/jump-target-x64.cc +0 -437
  834. data/ext/v8/upstream/2.3.3/src/x64/macro-assembler-x64.cc +0 -2793
  835. data/ext/v8/upstream/2.3.3/src/x64/macro-assembler-x64.h +0 -916
  836. data/ext/v8/upstream/2.3.3/src/x64/regexp-macro-assembler-x64.cc +0 -1374
  837. data/ext/v8/upstream/2.3.3/src/x64/regexp-macro-assembler-x64.h +0 -277
  838. data/ext/v8/upstream/2.3.3/src/x64/simulator-x64.h +0 -63
  839. data/ext/v8/upstream/2.3.3/src/x64/stub-cache-x64.cc +0 -2560
  840. data/ext/v8/upstream/2.3.3/src/x64/virtual-frame-x64.cc +0 -1264
  841. data/ext/v8/upstream/2.3.3/src/x64/virtual-frame-x64.h +0 -590
  842. data/ext/v8/upstream/2.3.3/src/zone-inl.h +0 -82
  843. data/ext/v8/upstream/2.3.3/src/zone.cc +0 -194
  844. data/ext/v8/upstream/2.3.3/src/zone.h +0 -221
  845. data/ext/v8/upstream/2.3.3/tools/codemap.js +0 -270
  846. data/ext/v8/upstream/2.3.3/tools/csvparser.js +0 -83
  847. data/ext/v8/upstream/2.3.3/tools/gc-nvp-trace-processor.py +0 -317
  848. data/ext/v8/upstream/2.3.3/tools/gyp/v8.gyp +0 -749
  849. data/ext/v8/upstream/2.3.3/tools/linux-tick-processor.py +0 -78
  850. data/ext/v8/upstream/2.3.3/tools/logreader.js +0 -338
  851. data/ext/v8/upstream/2.3.3/tools/oprofile/annotate +0 -7
  852. data/ext/v8/upstream/2.3.3/tools/oprofile/common +0 -19
  853. data/ext/v8/upstream/2.3.3/tools/oprofile/dump +0 -7
  854. data/ext/v8/upstream/2.3.3/tools/oprofile/report +0 -7
  855. data/ext/v8/upstream/2.3.3/tools/oprofile/reset +0 -7
  856. data/ext/v8/upstream/2.3.3/tools/oprofile/run +0 -14
  857. data/ext/v8/upstream/2.3.3/tools/oprofile/shutdown +0 -7
  858. data/ext/v8/upstream/2.3.3/tools/oprofile/start +0 -7
  859. data/ext/v8/upstream/2.3.3/tools/presubmit.py +0 -299
  860. data/ext/v8/upstream/2.3.3/tools/profile.js +0 -691
  861. data/ext/v8/upstream/2.3.3/tools/profile_view.js +0 -224
  862. data/ext/v8/upstream/2.3.3/tools/splaytree.js +0 -322
  863. data/ext/v8/upstream/2.3.3/tools/splaytree.py +0 -226
  864. data/ext/v8/upstream/2.3.3/tools/tickprocessor.js +0 -862
  865. data/ext/v8/upstream/2.3.3/tools/tickprocessor.py +0 -571
  866. data/ext/v8/upstream/2.3.3/tools/utils.py +0 -88
  867. data/ext/v8/upstream/2.3.3/tools/visual_studio/README.txt +0 -71
  868. data/ext/v8/upstream/2.3.3/tools/visual_studio/common.vsprops +0 -34
  869. data/ext/v8/upstream/2.3.3/tools/visual_studio/debug.vsprops +0 -17
  870. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base.vcproj +0 -1143
  871. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base_arm.vcproj +0 -1115
  872. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base_x64.vcproj +0 -1096
  873. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample.vcproj +0 -145
  874. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample_arm.vcproj +0 -145
  875. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample_x64.vcproj +0 -161
  876. data/ext/v8/upstream/2.3.3/tools/windows-tick-processor.bat +0 -29
  877. data/ext/v8/upstream/2.3.3/tools/windows-tick-processor.py +0 -137
@@ -0,0 +1,437 @@
1
+ // Copyright 2010 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #include "v8.h"
29
+
30
+ #if defined(V8_TARGET_ARCH_X64)
31
+
32
+ #include "codegen-inl.h"
33
+ #include "jump-target-inl.h"
34
+ #include "register-allocator-inl.h"
35
+ #include "virtual-frame-inl.h"
36
+
37
+ namespace v8 {
38
+ namespace internal {
39
+
40
+ // -------------------------------------------------------------------------
41
+ // JumpTarget implementation.
42
+
43
+ #define __ ACCESS_MASM(cgen()->masm())
44
+
45
+ void JumpTarget::DoJump() {
46
+ ASSERT(cgen()->has_valid_frame());
47
+ // Live non-frame registers are not allowed at unconditional jumps
48
+ // because we have no way of invalidating the corresponding results
49
+ // which are still live in the C++ code.
50
+ ASSERT(cgen()->HasValidEntryRegisters());
51
+
52
+ if (is_bound()) {
53
+ // Backward jump. There is an expected frame to merge to.
54
+ ASSERT(direction_ == BIDIRECTIONAL);
55
+ cgen()->frame()->PrepareMergeTo(entry_frame_);
56
+ cgen()->frame()->MergeTo(entry_frame_);
57
+ cgen()->DeleteFrame();
58
+ __ jmp(&entry_label_);
59
+ } else if (entry_frame_ != NULL) {
60
+ // Forward jump with a preconfigured entry frame. Assert the
61
+ // current frame matches the expected one and jump to the block.
62
+ ASSERT(cgen()->frame()->Equals(entry_frame_));
63
+ cgen()->DeleteFrame();
64
+ __ jmp(&entry_label_);
65
+ } else {
66
+ // Forward jump. Remember the current frame and emit a jump to
67
+ // its merge code.
68
+ AddReachingFrame(cgen()->frame());
69
+ RegisterFile empty;
70
+ cgen()->SetFrame(NULL, &empty);
71
+ __ jmp(&merge_labels_.last());
72
+ }
73
+ }
74
+
75
+
76
+ void JumpTarget::DoBranch(Condition cc, Hint b) {
77
+ ASSERT(cgen() != NULL);
78
+ ASSERT(cgen()->has_valid_frame());
79
+
80
+ if (is_bound()) {
81
+ ASSERT(direction_ == BIDIRECTIONAL);
82
+ // Backward branch. We have an expected frame to merge to on the
83
+ // backward edge.
84
+
85
+ // Swap the current frame for a copy (we do the swapping to get
86
+ // the off-frame registers off the fall through) to use for the
87
+ // branch.
88
+ VirtualFrame* fall_through_frame = cgen()->frame();
89
+ VirtualFrame* branch_frame = new VirtualFrame(fall_through_frame);
90
+ RegisterFile non_frame_registers;
91
+ cgen()->SetFrame(branch_frame, &non_frame_registers);
92
+
93
+ // Check if we can avoid merge code.
94
+ cgen()->frame()->PrepareMergeTo(entry_frame_);
95
+ if (cgen()->frame()->Equals(entry_frame_)) {
96
+ // Branch right in to the block.
97
+ cgen()->DeleteFrame();
98
+ __ j(cc, &entry_label_);
99
+ cgen()->SetFrame(fall_through_frame, &non_frame_registers);
100
+ return;
101
+ }
102
+
103
+ // Check if we can reuse existing merge code.
104
+ for (int i = 0; i < reaching_frames_.length(); i++) {
105
+ if (reaching_frames_[i] != NULL &&
106
+ cgen()->frame()->Equals(reaching_frames_[i])) {
107
+ // Branch to the merge code.
108
+ cgen()->DeleteFrame();
109
+ __ j(cc, &merge_labels_[i]);
110
+ cgen()->SetFrame(fall_through_frame, &non_frame_registers);
111
+ return;
112
+ }
113
+ }
114
+
115
+ // To emit the merge code here, we negate the condition and branch
116
+ // around the merge code on the fall through path.
117
+ Label original_fall_through;
118
+ __ j(NegateCondition(cc), &original_fall_through);
119
+ cgen()->frame()->MergeTo(entry_frame_);
120
+ cgen()->DeleteFrame();
121
+ __ jmp(&entry_label_);
122
+ cgen()->SetFrame(fall_through_frame, &non_frame_registers);
123
+ __ bind(&original_fall_through);
124
+
125
+ } else if (entry_frame_ != NULL) {
126
+ // Forward branch with a preconfigured entry frame. Assert the
127
+ // current frame matches the expected one and branch to the block.
128
+ ASSERT(cgen()->frame()->Equals(entry_frame_));
129
+ // Explicitly use the macro assembler instead of __ as forward
130
+ // branches are expected to be a fixed size (no inserted
131
+ // coverage-checking instructions please). This is used in
132
+ // Reference::GetValue.
133
+ cgen()->masm()->j(cc, &entry_label_);
134
+
135
+ } else {
136
+ // Forward branch. A copy of the current frame is remembered and
137
+ // a branch to the merge code is emitted. Explicitly use the
138
+ // macro assembler instead of __ as forward branches are expected
139
+ // to be a fixed size (no inserted coverage-checking instructions
140
+ // please). This is used in Reference::GetValue.
141
+ AddReachingFrame(new VirtualFrame(cgen()->frame()));
142
+ cgen()->masm()->j(cc, &merge_labels_.last());
143
+ }
144
+ }
145
+
146
+
147
+ void JumpTarget::Call() {
148
+ // Call is used to push the address of the catch block on the stack as
149
+ // a return address when compiling try/catch and try/finally. We
150
+ // fully spill the frame before making the call. The expected frame
151
+ // at the label (which should be the only one) is the spilled current
152
+ // frame plus an in-memory return address. The "fall-through" frame
153
+ // at the return site is the spilled current frame.
154
+ ASSERT(cgen() != NULL);
155
+ ASSERT(cgen()->has_valid_frame());
156
+ // There are no non-frame references across the call.
157
+ ASSERT(cgen()->HasValidEntryRegisters());
158
+ ASSERT(!is_linked());
159
+
160
+ cgen()->frame()->SpillAll();
161
+ VirtualFrame* target_frame = new VirtualFrame(cgen()->frame());
162
+ target_frame->Adjust(1);
163
+ // We do not expect a call with a preconfigured entry frame.
164
+ ASSERT(entry_frame_ == NULL);
165
+ AddReachingFrame(target_frame);
166
+ __ call(&merge_labels_.last());
167
+ }
168
+
169
+
170
+ void JumpTarget::DoBind() {
171
+ ASSERT(cgen() != NULL);
172
+ ASSERT(!is_bound());
173
+
174
+ // Live non-frame registers are not allowed at the start of a basic
175
+ // block.
176
+ ASSERT(!cgen()->has_valid_frame() || cgen()->HasValidEntryRegisters());
177
+
178
+ // Fast case: the jump target was manually configured with an entry
179
+ // frame to use.
180
+ if (entry_frame_ != NULL) {
181
+ // Assert no reaching frames to deal with.
182
+ ASSERT(reaching_frames_.is_empty());
183
+ ASSERT(!cgen()->has_valid_frame());
184
+
185
+ RegisterFile empty;
186
+ if (direction_ == BIDIRECTIONAL) {
187
+ // Copy the entry frame so the original can be used for a
188
+ // possible backward jump.
189
+ cgen()->SetFrame(new VirtualFrame(entry_frame_), &empty);
190
+ } else {
191
+ // Take ownership of the entry frame.
192
+ cgen()->SetFrame(entry_frame_, &empty);
193
+ entry_frame_ = NULL;
194
+ }
195
+ __ bind(&entry_label_);
196
+ return;
197
+ }
198
+
199
+ if (!is_linked()) {
200
+ ASSERT(cgen()->has_valid_frame());
201
+ if (direction_ == FORWARD_ONLY) {
202
+ // Fast case: no forward jumps and no possible backward jumps.
203
+ // The stack pointer can be floating above the top of the
204
+ // virtual frame before the bind. Afterward, it should not.
205
+ VirtualFrame* frame = cgen()->frame();
206
+ int difference = frame->stack_pointer_ - (frame->element_count() - 1);
207
+ if (difference > 0) {
208
+ frame->stack_pointer_ -= difference;
209
+ __ addq(rsp, Immediate(difference * kPointerSize));
210
+ }
211
+ } else {
212
+ ASSERT(direction_ == BIDIRECTIONAL);
213
+ // Fast case: no forward jumps, possible backward ones. Remove
214
+ // constants and copies above the watermark on the fall-through
215
+ // frame and use it as the entry frame.
216
+ cgen()->frame()->MakeMergable();
217
+ entry_frame_ = new VirtualFrame(cgen()->frame());
218
+ }
219
+ __ bind(&entry_label_);
220
+ return;
221
+ }
222
+
223
+ if (direction_ == FORWARD_ONLY &&
224
+ !cgen()->has_valid_frame() &&
225
+ reaching_frames_.length() == 1) {
226
+ // Fast case: no fall-through, a single forward jump, and no
227
+ // possible backward jumps. Pick up the only reaching frame, take
228
+ // ownership of it, and use it for the block about to be emitted.
229
+ VirtualFrame* frame = reaching_frames_[0];
230
+ RegisterFile empty;
231
+ cgen()->SetFrame(frame, &empty);
232
+ reaching_frames_[0] = NULL;
233
+ __ bind(&merge_labels_[0]);
234
+
235
+ // The stack pointer can be floating above the top of the
236
+ // virtual frame before the bind. Afterward, it should not.
237
+ int difference = frame->stack_pointer_ - (frame->element_count() - 1);
238
+ if (difference > 0) {
239
+ frame->stack_pointer_ -= difference;
240
+ __ addq(rsp, Immediate(difference * kPointerSize));
241
+ }
242
+
243
+ __ bind(&entry_label_);
244
+ return;
245
+ }
246
+
247
+ // If there is a current frame, record it as the fall-through. It
248
+ // is owned by the reaching frames for now.
249
+ bool had_fall_through = false;
250
+ if (cgen()->has_valid_frame()) {
251
+ had_fall_through = true;
252
+ AddReachingFrame(cgen()->frame()); // Return value ignored.
253
+ RegisterFile empty;
254
+ cgen()->SetFrame(NULL, &empty);
255
+ }
256
+
257
+ // Compute the frame to use for entry to the block.
258
+ ComputeEntryFrame();
259
+
260
+ // Some moves required to merge to an expected frame require purely
261
+ // frame state changes, and do not require any code generation.
262
+ // Perform those first to increase the possibility of finding equal
263
+ // frames below.
264
+ for (int i = 0; i < reaching_frames_.length(); i++) {
265
+ if (reaching_frames_[i] != NULL) {
266
+ reaching_frames_[i]->PrepareMergeTo(entry_frame_);
267
+ }
268
+ }
269
+
270
+ if (is_linked()) {
271
+ // There were forward jumps. Handle merging the reaching frames
272
+ // to the entry frame.
273
+
274
+ // Loop over the (non-null) reaching frames and process any that
275
+ // need merge code. Iterate backwards through the list to handle
276
+ // the fall-through frame first. Set frames that will be
277
+ // processed after 'i' to NULL if we want to avoid processing
278
+ // them.
279
+ for (int i = reaching_frames_.length() - 1; i >= 0; i--) {
280
+ VirtualFrame* frame = reaching_frames_[i];
281
+
282
+ if (frame != NULL) {
283
+ // Does the frame (probably) need merge code?
284
+ if (!frame->Equals(entry_frame_)) {
285
+ // We could have a valid frame as the fall through to the
286
+ // binding site or as the fall through from a previous merge
287
+ // code block. Jump around the code we are about to
288
+ // generate.
289
+ if (cgen()->has_valid_frame()) {
290
+ cgen()->DeleteFrame();
291
+ __ jmp(&entry_label_);
292
+ }
293
+ // Pick up the frame for this block. Assume ownership if
294
+ // there cannot be backward jumps.
295
+ RegisterFile empty;
296
+ if (direction_ == BIDIRECTIONAL) {
297
+ cgen()->SetFrame(new VirtualFrame(frame), &empty);
298
+ } else {
299
+ cgen()->SetFrame(frame, &empty);
300
+ reaching_frames_[i] = NULL;
301
+ }
302
+ __ bind(&merge_labels_[i]);
303
+
304
+ // Loop over the remaining (non-null) reaching frames,
305
+ // looking for any that can share merge code with this one.
306
+ for (int j = 0; j < i; j++) {
307
+ VirtualFrame* other = reaching_frames_[j];
308
+ if (other != NULL && other->Equals(cgen()->frame())) {
309
+ // Set the reaching frame element to null to avoid
310
+ // processing it later, and then bind its entry label.
311
+ reaching_frames_[j] = NULL;
312
+ __ bind(&merge_labels_[j]);
313
+ }
314
+ }
315
+
316
+ // Emit the merge code.
317
+ cgen()->frame()->MergeTo(entry_frame_);
318
+ } else if (i == reaching_frames_.length() - 1 && had_fall_through) {
319
+ // If this is the fall through frame, and it didn't need
320
+ // merge code, we need to pick up the frame so we can jump
321
+ // around subsequent merge blocks if necessary.
322
+ RegisterFile empty;
323
+ cgen()->SetFrame(frame, &empty);
324
+ reaching_frames_[i] = NULL;
325
+ }
326
+ }
327
+ }
328
+
329
+ // The code generator may not have a current frame if there was no
330
+ // fall through and none of the reaching frames needed merging.
331
+ // In that case, clone the entry frame as the current frame.
332
+ if (!cgen()->has_valid_frame()) {
333
+ RegisterFile empty;
334
+ cgen()->SetFrame(new VirtualFrame(entry_frame_), &empty);
335
+ }
336
+
337
+ // There may be unprocessed reaching frames that did not need
338
+ // merge code. They will have unbound merge labels. Bind their
339
+ // merge labels to be the same as the entry label and deallocate
340
+ // them.
341
+ for (int i = 0; i < reaching_frames_.length(); i++) {
342
+ if (!merge_labels_[i].is_bound()) {
343
+ reaching_frames_[i] = NULL;
344
+ __ bind(&merge_labels_[i]);
345
+ }
346
+ }
347
+
348
+ // There are non-NULL reaching frames with bound labels for each
349
+ // merge block, but only on backward targets.
350
+ } else {
351
+ // There were no forward jumps. There must be a current frame and
352
+ // this must be a bidirectional target.
353
+ ASSERT(reaching_frames_.length() == 1);
354
+ ASSERT(reaching_frames_[0] != NULL);
355
+ ASSERT(direction_ == BIDIRECTIONAL);
356
+
357
+ // Use a copy of the reaching frame so the original can be saved
358
+ // for possible reuse as a backward merge block.
359
+ RegisterFile empty;
360
+ cgen()->SetFrame(new VirtualFrame(reaching_frames_[0]), &empty);
361
+ __ bind(&merge_labels_[0]);
362
+ cgen()->frame()->MergeTo(entry_frame_);
363
+ }
364
+
365
+ __ bind(&entry_label_);
366
+ }
367
+
368
+
369
+ void BreakTarget::Jump() {
370
+ // Drop leftover statement state from the frame before merging, without
371
+ // emitting code.
372
+ ASSERT(cgen()->has_valid_frame());
373
+ int count = cgen()->frame()->height() - expected_height_;
374
+ cgen()->frame()->ForgetElements(count);
375
+ DoJump();
376
+ }
377
+
378
+
379
+ void BreakTarget::Jump(Result* arg) {
380
+ // Drop leftover statement state from the frame before merging, without
381
+ // emitting code.
382
+ ASSERT(cgen()->has_valid_frame());
383
+ int count = cgen()->frame()->height() - expected_height_;
384
+ cgen()->frame()->ForgetElements(count);
385
+ cgen()->frame()->Push(arg);
386
+ DoJump();
387
+ }
388
+
389
+
390
+ void BreakTarget::Bind() {
391
+ #ifdef DEBUG
392
+ // All the forward-reaching frames should have been adjusted at the
393
+ // jumps to this target.
394
+ for (int i = 0; i < reaching_frames_.length(); i++) {
395
+ ASSERT(reaching_frames_[i] == NULL ||
396
+ reaching_frames_[i]->height() == expected_height_);
397
+ }
398
+ #endif
399
+ // Drop leftover statement state from the frame before merging, even on
400
+ // the fall through. This is so we can bind the return target with state
401
+ // on the frame.
402
+ if (cgen()->has_valid_frame()) {
403
+ int count = cgen()->frame()->height() - expected_height_;
404
+ cgen()->frame()->ForgetElements(count);
405
+ }
406
+ DoBind();
407
+ }
408
+
409
+
410
+ void BreakTarget::Bind(Result* arg) {
411
+ #ifdef DEBUG
412
+ // All the forward-reaching frames should have been adjusted at the
413
+ // jumps to this target.
414
+ for (int i = 0; i < reaching_frames_.length(); i++) {
415
+ ASSERT(reaching_frames_[i] == NULL ||
416
+ reaching_frames_[i]->height() == expected_height_ + 1);
417
+ }
418
+ #endif
419
+ // Drop leftover statement state from the frame before merging, even on
420
+ // the fall through. This is so we can bind the return target with state
421
+ // on the frame.
422
+ if (cgen()->has_valid_frame()) {
423
+ int count = cgen()->frame()->height() - expected_height_;
424
+ cgen()->frame()->ForgetElements(count);
425
+ cgen()->frame()->Push(arg);
426
+ }
427
+ DoBind();
428
+ *arg = cgen()->frame()->Pop();
429
+ }
430
+
431
+
432
+ #undef __
433
+
434
+
435
+ } } // namespace v8::internal
436
+
437
+ #endif // V8_TARGET_ARCH_X64
@@ -0,0 +1,3639 @@
1
+ // Copyright 2011 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #include "v8.h"
29
+
30
+ #if defined(V8_TARGET_ARCH_X64)
31
+
32
+ #include "x64/lithium-codegen-x64.h"
33
+ #include "code-stubs.h"
34
+ #include "stub-cache.h"
35
+
36
+ namespace v8 {
37
+ namespace internal {
38
+
39
+
40
+ // When invoking builtins, we need to record the safepoint in the middle of
41
+ // the invoke instruction sequence generated by the macro assembler.
42
+ class SafepointGenerator : public PostCallGenerator {
43
+ public:
44
+ SafepointGenerator(LCodeGen* codegen,
45
+ LPointerMap* pointers,
46
+ int deoptimization_index,
47
+ bool ensure_reloc_space = false)
48
+ : codegen_(codegen),
49
+ pointers_(pointers),
50
+ deoptimization_index_(deoptimization_index),
51
+ ensure_reloc_space_(ensure_reloc_space) { }
52
+ virtual ~SafepointGenerator() { }
53
+
54
+ virtual void Generate() {
55
+ // Ensure that we have enough space in the reloc info to patch
56
+ // this with calls when doing deoptimization.
57
+ if (ensure_reloc_space_) {
58
+ codegen_->masm()->RecordComment(RelocInfo::kFillerCommentString, true);
59
+ }
60
+ codegen_->RecordSafepoint(pointers_, deoptimization_index_);
61
+ }
62
+
63
+ private:
64
+ LCodeGen* codegen_;
65
+ LPointerMap* pointers_;
66
+ int deoptimization_index_;
67
+ bool ensure_reloc_space_;
68
+ };
69
+
70
+
71
+ #define __ masm()->
72
+
73
+ bool LCodeGen::GenerateCode() {
74
+ HPhase phase("Code generation", chunk());
75
+ ASSERT(is_unused());
76
+ status_ = GENERATING;
77
+ return GeneratePrologue() &&
78
+ GenerateBody() &&
79
+ GenerateDeferredCode() &&
80
+ GenerateJumpTable() &&
81
+ GenerateSafepointTable();
82
+ }
83
+
84
+
85
+ void LCodeGen::FinishCode(Handle<Code> code) {
86
+ ASSERT(is_done());
87
+ code->set_stack_slots(StackSlotCount());
88
+ code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
89
+ PopulateDeoptimizationData(code);
90
+ }
91
+
92
+
93
+ void LCodeGen::Abort(const char* format, ...) {
94
+ if (FLAG_trace_bailout) {
95
+ SmartPointer<char> debug_name = graph()->debug_name()->ToCString();
96
+ PrintF("Aborting LCodeGen in @\"%s\": ", *debug_name);
97
+ va_list arguments;
98
+ va_start(arguments, format);
99
+ OS::VPrint(format, arguments);
100
+ va_end(arguments);
101
+ PrintF("\n");
102
+ }
103
+ status_ = ABORTED;
104
+ }
105
+
106
+
107
+ void LCodeGen::Comment(const char* format, ...) {
108
+ if (!FLAG_code_comments) return;
109
+ char buffer[4 * KB];
110
+ StringBuilder builder(buffer, ARRAY_SIZE(buffer));
111
+ va_list arguments;
112
+ va_start(arguments, format);
113
+ builder.AddFormattedList(format, arguments);
114
+ va_end(arguments);
115
+
116
+ // Copy the string before recording it in the assembler to avoid
117
+ // issues when the stack allocated buffer goes out of scope.
118
+ int length = builder.position();
119
+ Vector<char> copy = Vector<char>::New(length + 1);
120
+ memcpy(copy.start(), builder.Finalize(), copy.length());
121
+ masm()->RecordComment(copy.start());
122
+ }
123
+
124
+
125
+ bool LCodeGen::GeneratePrologue() {
126
+ ASSERT(is_generating());
127
+
128
+ #ifdef DEBUG
129
+ if (strlen(FLAG_stop_at) > 0 &&
130
+ info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
131
+ __ int3();
132
+ }
133
+ #endif
134
+
135
+ __ push(rbp); // Caller's frame pointer.
136
+ __ movq(rbp, rsp);
137
+ __ push(rsi); // Callee's context.
138
+ __ push(rdi); // Callee's JS function.
139
+
140
+ // Reserve space for the stack slots needed by the code.
141
+ int slots = StackSlotCount();
142
+ if (slots > 0) {
143
+ if (FLAG_debug_code) {
144
+ __ movl(rax, Immediate(slots));
145
+ __ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE);
146
+ Label loop;
147
+ __ bind(&loop);
148
+ __ push(kScratchRegister);
149
+ __ decl(rax);
150
+ __ j(not_zero, &loop);
151
+ } else {
152
+ __ subq(rsp, Immediate(slots * kPointerSize));
153
+ #ifdef _MSC_VER
154
+ // On windows, you may not access the stack more than one page below
155
+ // the most recently mapped page. To make the allocated area randomly
156
+ // accessible, we write to each page in turn (the value is irrelevant).
157
+ const int kPageSize = 4 * KB;
158
+ for (int offset = slots * kPointerSize - kPageSize;
159
+ offset > 0;
160
+ offset -= kPageSize) {
161
+ __ movq(Operand(rsp, offset), rax);
162
+ }
163
+ #endif
164
+ }
165
+ }
166
+
167
+ // Possibly allocate a local context.
168
+ int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
169
+ if (heap_slots > 0) {
170
+ Comment(";;; Allocate local context");
171
+ // Argument to NewContext is the function, which is still in rdi.
172
+ __ push(rdi);
173
+ if (heap_slots <= FastNewContextStub::kMaximumSlots) {
174
+ FastNewContextStub stub(heap_slots);
175
+ __ CallStub(&stub);
176
+ } else {
177
+ __ CallRuntime(Runtime::kNewContext, 1);
178
+ }
179
+ RecordSafepoint(Safepoint::kNoDeoptimizationIndex);
180
+ // Context is returned in both rax and rsi. It replaces the context
181
+ // passed to us. It's saved in the stack and kept live in rsi.
182
+ __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
183
+
184
+ // Copy any necessary parameters into the context.
185
+ int num_parameters = scope()->num_parameters();
186
+ for (int i = 0; i < num_parameters; i++) {
187
+ Slot* slot = scope()->parameter(i)->AsSlot();
188
+ if (slot != NULL && slot->type() == Slot::CONTEXT) {
189
+ int parameter_offset = StandardFrameConstants::kCallerSPOffset +
190
+ (num_parameters - 1 - i) * kPointerSize;
191
+ // Load parameter from stack.
192
+ __ movq(rax, Operand(rbp, parameter_offset));
193
+ // Store it in the context.
194
+ int context_offset = Context::SlotOffset(slot->index());
195
+ __ movq(Operand(rsi, context_offset), rax);
196
+ // Update the write barrier. This clobbers all involved
197
+ // registers, so we have use a third register to avoid
198
+ // clobbering rsi.
199
+ __ movq(rcx, rsi);
200
+ __ RecordWrite(rcx, context_offset, rax, rbx);
201
+ }
202
+ }
203
+ Comment(";;; End allocate local context");
204
+ }
205
+
206
+ // Trace the call.
207
+ if (FLAG_trace) {
208
+ __ CallRuntime(Runtime::kTraceEnter, 0);
209
+ }
210
+ return !is_aborted();
211
+ }
212
+
213
+
214
+ bool LCodeGen::GenerateBody() {
215
+ ASSERT(is_generating());
216
+ bool emit_instructions = true;
217
+ for (current_instruction_ = 0;
218
+ !is_aborted() && current_instruction_ < instructions_->length();
219
+ current_instruction_++) {
220
+ LInstruction* instr = instructions_->at(current_instruction_);
221
+ if (instr->IsLabel()) {
222
+ LLabel* label = LLabel::cast(instr);
223
+ emit_instructions = !label->HasReplacement();
224
+ }
225
+
226
+ if (emit_instructions) {
227
+ Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
228
+ instr->CompileToNative(this);
229
+ }
230
+ }
231
+ return !is_aborted();
232
+ }
233
+
234
+
235
+ LInstruction* LCodeGen::GetNextInstruction() {
236
+ if (current_instruction_ < instructions_->length() - 1) {
237
+ return instructions_->at(current_instruction_ + 1);
238
+ } else {
239
+ return NULL;
240
+ }
241
+ }
242
+
243
+
244
+ bool LCodeGen::GenerateJumpTable() {
245
+ for (int i = 0; i < jump_table_.length(); i++) {
246
+ JumpTableEntry* info = jump_table_[i];
247
+ __ bind(&(info->label_));
248
+ __ Jump(info->address_, RelocInfo::RUNTIME_ENTRY);
249
+ }
250
+ return !is_aborted();
251
+ }
252
+
253
+
254
+ bool LCodeGen::GenerateDeferredCode() {
255
+ ASSERT(is_generating());
256
+ for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
257
+ LDeferredCode* code = deferred_[i];
258
+ __ bind(code->entry());
259
+ code->Generate();
260
+ __ jmp(code->exit());
261
+ }
262
+
263
+ // Deferred code is the last part of the instruction sequence. Mark
264
+ // the generated code as done unless we bailed out.
265
+ if (!is_aborted()) status_ = DONE;
266
+ return !is_aborted();
267
+ }
268
+
269
+
270
+ bool LCodeGen::GenerateSafepointTable() {
271
+ ASSERT(is_done());
272
+ // Ensure that there is space at the end of the code to write a number
273
+ // of jump instructions, as well as to afford writing a call near the end
274
+ // of the code.
275
+ // The jumps are used when there isn't room in the code stream to write
276
+ // a long call instruction. Instead it writes a shorter call to a
277
+ // jump instruction in the same code object.
278
+ // The calls are used when lazy deoptimizing a function and calls to a
279
+ // deoptimization function.
280
+ int short_deopts = safepoints_.CountShortDeoptimizationIntervals(
281
+ static_cast<unsigned>(MacroAssembler::kJumpInstructionLength));
282
+ int byte_count = (short_deopts) * MacroAssembler::kJumpInstructionLength;
283
+ while (byte_count-- > 0) {
284
+ __ int3();
285
+ }
286
+ safepoints_.Emit(masm(), StackSlotCount());
287
+ return !is_aborted();
288
+ }
289
+
290
+
291
+ Register LCodeGen::ToRegister(int index) const {
292
+ return Register::FromAllocationIndex(index);
293
+ }
294
+
295
+
296
+ XMMRegister LCodeGen::ToDoubleRegister(int index) const {
297
+ return XMMRegister::FromAllocationIndex(index);
298
+ }
299
+
300
+
301
+ Register LCodeGen::ToRegister(LOperand* op) const {
302
+ ASSERT(op->IsRegister());
303
+ return ToRegister(op->index());
304
+ }
305
+
306
+
307
+ XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
308
+ ASSERT(op->IsDoubleRegister());
309
+ return ToDoubleRegister(op->index());
310
+ }
311
+
312
+
313
+ bool LCodeGen::IsInteger32Constant(LConstantOperand* op) const {
314
+ return op->IsConstantOperand() &&
315
+ chunk_->LookupLiteralRepresentation(op).IsInteger32();
316
+ }
317
+
318
+
319
+ bool LCodeGen::IsTaggedConstant(LConstantOperand* op) const {
320
+ return op->IsConstantOperand() &&
321
+ chunk_->LookupLiteralRepresentation(op).IsTagged();
322
+ }
323
+
324
+
325
+ int LCodeGen::ToInteger32(LConstantOperand* op) const {
326
+ Handle<Object> value = chunk_->LookupLiteral(op);
327
+ ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
328
+ ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
329
+ value->Number());
330
+ return static_cast<int32_t>(value->Number());
331
+ }
332
+
333
+
334
+ Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
335
+ Handle<Object> literal = chunk_->LookupLiteral(op);
336
+ ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged());
337
+ return literal;
338
+ }
339
+
340
+
341
+ Operand LCodeGen::ToOperand(LOperand* op) const {
342
+ // Does not handle registers. In X64 assembler, plain registers are not
343
+ // representable as an Operand.
344
+ ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
345
+ int index = op->index();
346
+ if (index >= 0) {
347
+ // Local or spill slot. Skip the frame pointer, function, and
348
+ // context in the fixed part of the frame.
349
+ return Operand(rbp, -(index + 3) * kPointerSize);
350
+ } else {
351
+ // Incoming parameter. Skip the return address.
352
+ return Operand(rbp, -(index - 1) * kPointerSize);
353
+ }
354
+ }
355
+
356
+
357
+ void LCodeGen::WriteTranslation(LEnvironment* environment,
358
+ Translation* translation) {
359
+ if (environment == NULL) return;
360
+
361
+ // The translation includes one command per value in the environment.
362
+ int translation_size = environment->values()->length();
363
+ // The output frame height does not include the parameters.
364
+ int height = translation_size - environment->parameter_count();
365
+
366
+ WriteTranslation(environment->outer(), translation);
367
+ int closure_id = DefineDeoptimizationLiteral(environment->closure());
368
+ translation->BeginFrame(environment->ast_id(), closure_id, height);
369
+ for (int i = 0; i < translation_size; ++i) {
370
+ LOperand* value = environment->values()->at(i);
371
+ // spilled_registers_ and spilled_double_registers_ are either
372
+ // both NULL or both set.
373
+ if (environment->spilled_registers() != NULL && value != NULL) {
374
+ if (value->IsRegister() &&
375
+ environment->spilled_registers()[value->index()] != NULL) {
376
+ translation->MarkDuplicate();
377
+ AddToTranslation(translation,
378
+ environment->spilled_registers()[value->index()],
379
+ environment->HasTaggedValueAt(i));
380
+ } else if (
381
+ value->IsDoubleRegister() &&
382
+ environment->spilled_double_registers()[value->index()] != NULL) {
383
+ translation->MarkDuplicate();
384
+ AddToTranslation(
385
+ translation,
386
+ environment->spilled_double_registers()[value->index()],
387
+ false);
388
+ }
389
+ }
390
+
391
+ AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
392
+ }
393
+ }
394
+
395
+
396
+ void LCodeGen::AddToTranslation(Translation* translation,
397
+ LOperand* op,
398
+ bool is_tagged) {
399
+ if (op == NULL) {
400
+ // TODO(twuerthinger): Introduce marker operands to indicate that this value
401
+ // is not present and must be reconstructed from the deoptimizer. Currently
402
+ // this is only used for the arguments object.
403
+ translation->StoreArgumentsObject();
404
+ } else if (op->IsStackSlot()) {
405
+ if (is_tagged) {
406
+ translation->StoreStackSlot(op->index());
407
+ } else {
408
+ translation->StoreInt32StackSlot(op->index());
409
+ }
410
+ } else if (op->IsDoubleStackSlot()) {
411
+ translation->StoreDoubleStackSlot(op->index());
412
+ } else if (op->IsArgument()) {
413
+ ASSERT(is_tagged);
414
+ int src_index = StackSlotCount() + op->index();
415
+ translation->StoreStackSlot(src_index);
416
+ } else if (op->IsRegister()) {
417
+ Register reg = ToRegister(op);
418
+ if (is_tagged) {
419
+ translation->StoreRegister(reg);
420
+ } else {
421
+ translation->StoreInt32Register(reg);
422
+ }
423
+ } else if (op->IsDoubleRegister()) {
424
+ XMMRegister reg = ToDoubleRegister(op);
425
+ translation->StoreDoubleRegister(reg);
426
+ } else if (op->IsConstantOperand()) {
427
+ Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
428
+ int src_index = DefineDeoptimizationLiteral(literal);
429
+ translation->StoreLiteral(src_index);
430
+ } else {
431
+ UNREACHABLE();
432
+ }
433
+ }
434
+
435
+
436
+ void LCodeGen::CallCode(Handle<Code> code,
437
+ RelocInfo::Mode mode,
438
+ LInstruction* instr) {
439
+ ASSERT(instr != NULL);
440
+ LPointerMap* pointers = instr->pointer_map();
441
+ RecordPosition(pointers->position());
442
+ __ call(code, mode);
443
+ RegisterLazyDeoptimization(instr);
444
+
445
+ // Signal that we don't inline smi code before these stubs in the
446
+ // optimizing code generator.
447
+ if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC ||
448
+ code->kind() == Code::COMPARE_IC) {
449
+ __ nop();
450
+ }
451
+ }
452
+
453
+
454
+ void LCodeGen::CallRuntime(Runtime::Function* function,
455
+ int num_arguments,
456
+ LInstruction* instr) {
457
+ ASSERT(instr != NULL);
458
+ ASSERT(instr->HasPointerMap());
459
+ LPointerMap* pointers = instr->pointer_map();
460
+ RecordPosition(pointers->position());
461
+
462
+ __ CallRuntime(function, num_arguments);
463
+ RegisterLazyDeoptimization(instr);
464
+ }
465
+
466
+
467
+ void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) {
468
+ // Create the environment to bailout to. If the call has side effects
469
+ // execution has to continue after the call otherwise execution can continue
470
+ // from a previous bailout point repeating the call.
471
+ LEnvironment* deoptimization_environment;
472
+ if (instr->HasDeoptimizationEnvironment()) {
473
+ deoptimization_environment = instr->deoptimization_environment();
474
+ } else {
475
+ deoptimization_environment = instr->environment();
476
+ }
477
+
478
+ RegisterEnvironmentForDeoptimization(deoptimization_environment);
479
+ RecordSafepoint(instr->pointer_map(),
480
+ deoptimization_environment->deoptimization_index());
481
+ }
482
+
483
+
484
+ void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
485
+ if (!environment->HasBeenRegistered()) {
486
+ // Physical stack frame layout:
487
+ // -x ............. -4 0 ..................................... y
488
+ // [incoming arguments] [spill slots] [pushed outgoing arguments]
489
+
490
+ // Layout of the environment:
491
+ // 0 ..................................................... size-1
492
+ // [parameters] [locals] [expression stack including arguments]
493
+
494
+ // Layout of the translation:
495
+ // 0 ........................................................ size - 1 + 4
496
+ // [expression stack including arguments] [locals] [4 words] [parameters]
497
+ // |>------------ translation_size ------------<|
498
+
499
+ int frame_count = 0;
500
+ for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
501
+ ++frame_count;
502
+ }
503
+ Translation translation(&translations_, frame_count);
504
+ WriteTranslation(environment, &translation);
505
+ int deoptimization_index = deoptimizations_.length();
506
+ environment->Register(deoptimization_index, translation.index());
507
+ deoptimizations_.Add(environment);
508
+ }
509
+ }
510
+
511
+
512
+ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
513
+ RegisterEnvironmentForDeoptimization(environment);
514
+ ASSERT(environment->HasBeenRegistered());
515
+ int id = environment->deoptimization_index();
516
+ Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
517
+ ASSERT(entry != NULL);
518
+ if (entry == NULL) {
519
+ Abort("bailout was not prepared");
520
+ return;
521
+ }
522
+
523
+ if (cc == no_condition) {
524
+ __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
525
+ } else {
526
+ JumpTableEntry* jump_info = NULL;
527
+ // We often have several deopts to the same entry, reuse the last
528
+ // jump entry if this is the case.
529
+ if (jump_table_.length() > 0 &&
530
+ jump_table_[jump_table_.length() - 1]->address_ == entry) {
531
+ jump_info = jump_table_[jump_table_.length() - 1];
532
+ } else {
533
+ jump_info = new JumpTableEntry(entry);
534
+ jump_table_.Add(jump_info);
535
+ }
536
+ __ j(cc, &jump_info->label_);
537
+ }
538
+ }
539
+
540
+
541
+ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
542
+ int length = deoptimizations_.length();
543
+ if (length == 0) return;
544
+ ASSERT(FLAG_deopt);
545
+ Handle<DeoptimizationInputData> data =
546
+ Factory::NewDeoptimizationInputData(length, TENURED);
547
+
548
+ Handle<ByteArray> translations = translations_.CreateByteArray();
549
+ data->SetTranslationByteArray(*translations);
550
+ data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
551
+
552
+ Handle<FixedArray> literals =
553
+ Factory::NewFixedArray(deoptimization_literals_.length(), TENURED);
554
+ for (int i = 0; i < deoptimization_literals_.length(); i++) {
555
+ literals->set(i, *deoptimization_literals_[i]);
556
+ }
557
+ data->SetLiteralArray(*literals);
558
+
559
+ data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
560
+ data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
561
+
562
+ // Populate the deoptimization entries.
563
+ for (int i = 0; i < length; i++) {
564
+ LEnvironment* env = deoptimizations_[i];
565
+ data->SetAstId(i, Smi::FromInt(env->ast_id()));
566
+ data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
567
+ data->SetArgumentsStackHeight(i,
568
+ Smi::FromInt(env->arguments_stack_height()));
569
+ }
570
+ code->set_deoptimization_data(*data);
571
+ }
572
+
573
+
574
+ int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
575
+ int result = deoptimization_literals_.length();
576
+ for (int i = 0; i < deoptimization_literals_.length(); ++i) {
577
+ if (deoptimization_literals_[i].is_identical_to(literal)) return i;
578
+ }
579
+ deoptimization_literals_.Add(literal);
580
+ return result;
581
+ }
582
+
583
+
584
+ void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
585
+ ASSERT(deoptimization_literals_.length() == 0);
586
+
587
+ const ZoneList<Handle<JSFunction> >* inlined_closures =
588
+ chunk()->inlined_closures();
589
+
590
+ for (int i = 0, length = inlined_closures->length();
591
+ i < length;
592
+ i++) {
593
+ DefineDeoptimizationLiteral(inlined_closures->at(i));
594
+ }
595
+
596
+ inlined_function_count_ = deoptimization_literals_.length();
597
+ }
598
+
599
+
600
+ void LCodeGen::RecordSafepoint(
601
+ LPointerMap* pointers,
602
+ Safepoint::Kind kind,
603
+ int arguments,
604
+ int deoptimization_index) {
605
+ const ZoneList<LOperand*>* operands = pointers->operands();
606
+
607
+ Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
608
+ kind, arguments, deoptimization_index);
609
+ for (int i = 0; i < operands->length(); i++) {
610
+ LOperand* pointer = operands->at(i);
611
+ if (pointer->IsStackSlot()) {
612
+ safepoint.DefinePointerSlot(pointer->index());
613
+ } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
614
+ safepoint.DefinePointerRegister(ToRegister(pointer));
615
+ }
616
+ }
617
+ if (kind & Safepoint::kWithRegisters) {
618
+ // Register rsi always contains a pointer to the context.
619
+ safepoint.DefinePointerRegister(rsi);
620
+ }
621
+ }
622
+
623
+
624
+ void LCodeGen::RecordSafepoint(LPointerMap* pointers,
625
+ int deoptimization_index) {
626
+ RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index);
627
+ }
628
+
629
+
630
+ void LCodeGen::RecordSafepoint(int deoptimization_index) {
631
+ LPointerMap empty_pointers(RelocInfo::kNoPosition);
632
+ RecordSafepoint(&empty_pointers, deoptimization_index);
633
+ }
634
+
635
+
636
+ void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
637
+ int arguments,
638
+ int deoptimization_index) {
639
+ RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments,
640
+ deoptimization_index);
641
+ }
642
+
643
+
644
+ void LCodeGen::RecordPosition(int position) {
645
+ if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return;
646
+ masm()->positions_recorder()->RecordPosition(position);
647
+ }
648
+
649
+
650
+ void LCodeGen::DoLabel(LLabel* label) {
651
+ if (label->is_loop_header()) {
652
+ Comment(";;; B%d - LOOP entry", label->block_id());
653
+ } else {
654
+ Comment(";;; B%d", label->block_id());
655
+ }
656
+ __ bind(label->label());
657
+ current_block_ = label->block_id();
658
+ LCodeGen::DoGap(label);
659
+ }
660
+
661
+
662
+ void LCodeGen::DoParallelMove(LParallelMove* move) {
663
+ resolver_.Resolve(move);
664
+ }
665
+
666
+
667
+ void LCodeGen::DoGap(LGap* gap) {
668
+ for (int i = LGap::FIRST_INNER_POSITION;
669
+ i <= LGap::LAST_INNER_POSITION;
670
+ i++) {
671
+ LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
672
+ LParallelMove* move = gap->GetParallelMove(inner_pos);
673
+ if (move != NULL) DoParallelMove(move);
674
+ }
675
+
676
+ LInstruction* next = GetNextInstruction();
677
+ if (next != NULL && next->IsLazyBailout()) {
678
+ int pc = masm()->pc_offset();
679
+ safepoints_.SetPcAfterGap(pc);
680
+ }
681
+ }
682
+
683
+
684
+ void LCodeGen::DoParameter(LParameter* instr) {
685
+ // Nothing to do.
686
+ }
687
+
688
+
689
+ void LCodeGen::DoCallStub(LCallStub* instr) {
690
+ ASSERT(ToRegister(instr->result()).is(rax));
691
+ switch (instr->hydrogen()->major_key()) {
692
+ case CodeStub::RegExpConstructResult: {
693
+ RegExpConstructResultStub stub;
694
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
695
+ break;
696
+ }
697
+ case CodeStub::RegExpExec: {
698
+ RegExpExecStub stub;
699
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
700
+ break;
701
+ }
702
+ case CodeStub::SubString: {
703
+ SubStringStub stub;
704
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
705
+ break;
706
+ }
707
+ case CodeStub::StringCharAt: {
708
+ StringCharAtStub stub;
709
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
710
+ break;
711
+ }
712
+ case CodeStub::MathPow: {
713
+ MathPowStub stub;
714
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
715
+ break;
716
+ }
717
+ case CodeStub::NumberToString: {
718
+ NumberToStringStub stub;
719
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
720
+ break;
721
+ }
722
+ case CodeStub::StringAdd: {
723
+ StringAddStub stub(NO_STRING_ADD_FLAGS);
724
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
725
+ break;
726
+ }
727
+ case CodeStub::StringCompare: {
728
+ StringCompareStub stub;
729
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
730
+ break;
731
+ }
732
+ case CodeStub::TranscendentalCache: {
733
+ TranscendentalCacheStub stub(instr->transcendental_type(),
734
+ TranscendentalCacheStub::TAGGED);
735
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
736
+ break;
737
+ }
738
+ default:
739
+ UNREACHABLE();
740
+ }
741
+ }
742
+
743
+
744
+ void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
745
+ // Nothing to do.
746
+ }
747
+
748
+
749
+ void LCodeGen::DoModI(LModI* instr) {
750
+ LOperand* right = instr->InputAt(1);
751
+ ASSERT(ToRegister(instr->result()).is(rdx));
752
+ ASSERT(ToRegister(instr->InputAt(0)).is(rax));
753
+ ASSERT(!ToRegister(instr->InputAt(1)).is(rax));
754
+ ASSERT(!ToRegister(instr->InputAt(1)).is(rdx));
755
+
756
+ Register right_reg = ToRegister(right);
757
+
758
+ // Check for x % 0.
759
+ if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
760
+ __ testl(right_reg, right_reg);
761
+ DeoptimizeIf(zero, instr->environment());
762
+ }
763
+
764
+ // Sign extend eax to edx. (We are using only the low 32 bits of the values.)
765
+ __ cdq();
766
+
767
+ // Check for (0 % -x) that will produce negative zero.
768
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
769
+ NearLabel positive_left;
770
+ NearLabel done;
771
+ __ testl(rax, rax);
772
+ __ j(not_sign, &positive_left);
773
+ __ idivl(right_reg);
774
+
775
+ // Test the remainder for 0, because then the result would be -0.
776
+ __ testl(rdx, rdx);
777
+ __ j(not_zero, &done);
778
+
779
+ DeoptimizeIf(no_condition, instr->environment());
780
+ __ bind(&positive_left);
781
+ __ idivl(right_reg);
782
+ __ bind(&done);
783
+ } else {
784
+ __ idivl(right_reg);
785
+ }
786
+ }
787
+
788
+
789
+ void LCodeGen::DoDivI(LDivI* instr) {
790
+ LOperand* right = instr->InputAt(1);
791
+ ASSERT(ToRegister(instr->result()).is(rax));
792
+ ASSERT(ToRegister(instr->InputAt(0)).is(rax));
793
+ ASSERT(!ToRegister(instr->InputAt(1)).is(rax));
794
+ ASSERT(!ToRegister(instr->InputAt(1)).is(rdx));
795
+
796
+ Register left_reg = rax;
797
+
798
+ // Check for x / 0.
799
+ Register right_reg = ToRegister(right);
800
+ if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
801
+ __ testl(right_reg, right_reg);
802
+ DeoptimizeIf(zero, instr->environment());
803
+ }
804
+
805
+ // Check for (0 / -x) that will produce negative zero.
806
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
807
+ NearLabel left_not_zero;
808
+ __ testl(left_reg, left_reg);
809
+ __ j(not_zero, &left_not_zero);
810
+ __ testl(right_reg, right_reg);
811
+ DeoptimizeIf(sign, instr->environment());
812
+ __ bind(&left_not_zero);
813
+ }
814
+
815
+ // Check for (-kMinInt / -1).
816
+ if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
817
+ NearLabel left_not_min_int;
818
+ __ cmpl(left_reg, Immediate(kMinInt));
819
+ __ j(not_zero, &left_not_min_int);
820
+ __ cmpl(right_reg, Immediate(-1));
821
+ DeoptimizeIf(zero, instr->environment());
822
+ __ bind(&left_not_min_int);
823
+ }
824
+
825
+ // Sign extend to rdx.
826
+ __ cdq();
827
+ __ idivl(right_reg);
828
+
829
+ // Deoptimize if remainder is not 0.
830
+ __ testl(rdx, rdx);
831
+ DeoptimizeIf(not_zero, instr->environment());
832
+ }
833
+
834
+
835
+ void LCodeGen::DoMulI(LMulI* instr) {
836
+ Register left = ToRegister(instr->InputAt(0));
837
+ LOperand* right = instr->InputAt(1);
838
+
839
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
840
+ __ movl(kScratchRegister, left);
841
+ }
842
+
843
+ if (right->IsConstantOperand()) {
844
+ int right_value = ToInteger32(LConstantOperand::cast(right));
845
+ __ imull(left, left, Immediate(right_value));
846
+ } else if (right->IsStackSlot()) {
847
+ __ imull(left, ToOperand(right));
848
+ } else {
849
+ __ imull(left, ToRegister(right));
850
+ }
851
+
852
+ if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
853
+ DeoptimizeIf(overflow, instr->environment());
854
+ }
855
+
856
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
857
+ // Bail out if the result is supposed to be negative zero.
858
+ NearLabel done;
859
+ __ testl(left, left);
860
+ __ j(not_zero, &done);
861
+ if (right->IsConstantOperand()) {
862
+ if (ToInteger32(LConstantOperand::cast(right)) <= 0) {
863
+ DeoptimizeIf(no_condition, instr->environment());
864
+ }
865
+ } else if (right->IsStackSlot()) {
866
+ __ or_(kScratchRegister, ToOperand(right));
867
+ DeoptimizeIf(sign, instr->environment());
868
+ } else {
869
+ // Test the non-zero operand for negative sign.
870
+ __ or_(kScratchRegister, ToRegister(right));
871
+ DeoptimizeIf(sign, instr->environment());
872
+ }
873
+ __ bind(&done);
874
+ }
875
+ }
876
+
877
+
878
+ void LCodeGen::DoBitI(LBitI* instr) {
879
+ LOperand* left = instr->InputAt(0);
880
+ LOperand* right = instr->InputAt(1);
881
+ ASSERT(left->Equals(instr->result()));
882
+ ASSERT(left->IsRegister());
883
+
884
+ if (right->IsConstantOperand()) {
885
+ int right_operand = ToInteger32(LConstantOperand::cast(right));
886
+ switch (instr->op()) {
887
+ case Token::BIT_AND:
888
+ __ andl(ToRegister(left), Immediate(right_operand));
889
+ break;
890
+ case Token::BIT_OR:
891
+ __ orl(ToRegister(left), Immediate(right_operand));
892
+ break;
893
+ case Token::BIT_XOR:
894
+ __ xorl(ToRegister(left), Immediate(right_operand));
895
+ break;
896
+ default:
897
+ UNREACHABLE();
898
+ break;
899
+ }
900
+ } else if (right->IsStackSlot()) {
901
+ switch (instr->op()) {
902
+ case Token::BIT_AND:
903
+ __ andl(ToRegister(left), ToOperand(right));
904
+ break;
905
+ case Token::BIT_OR:
906
+ __ orl(ToRegister(left), ToOperand(right));
907
+ break;
908
+ case Token::BIT_XOR:
909
+ __ xorl(ToRegister(left), ToOperand(right));
910
+ break;
911
+ default:
912
+ UNREACHABLE();
913
+ break;
914
+ }
915
+ } else {
916
+ ASSERT(right->IsRegister());
917
+ switch (instr->op()) {
918
+ case Token::BIT_AND:
919
+ __ andl(ToRegister(left), ToRegister(right));
920
+ break;
921
+ case Token::BIT_OR:
922
+ __ orl(ToRegister(left), ToRegister(right));
923
+ break;
924
+ case Token::BIT_XOR:
925
+ __ xorl(ToRegister(left), ToRegister(right));
926
+ break;
927
+ default:
928
+ UNREACHABLE();
929
+ break;
930
+ }
931
+ }
932
+ }
933
+
934
+
935
+ void LCodeGen::DoShiftI(LShiftI* instr) {
936
+ LOperand* left = instr->InputAt(0);
937
+ LOperand* right = instr->InputAt(1);
938
+ ASSERT(left->Equals(instr->result()));
939
+ ASSERT(left->IsRegister());
940
+ if (right->IsRegister()) {
941
+ ASSERT(ToRegister(right).is(rcx));
942
+
943
+ switch (instr->op()) {
944
+ case Token::SAR:
945
+ __ sarl_cl(ToRegister(left));
946
+ break;
947
+ case Token::SHR:
948
+ __ shrl_cl(ToRegister(left));
949
+ if (instr->can_deopt()) {
950
+ __ testl(ToRegister(left), ToRegister(left));
951
+ DeoptimizeIf(negative, instr->environment());
952
+ }
953
+ break;
954
+ case Token::SHL:
955
+ __ shll_cl(ToRegister(left));
956
+ break;
957
+ default:
958
+ UNREACHABLE();
959
+ break;
960
+ }
961
+ } else {
962
+ int value = ToInteger32(LConstantOperand::cast(right));
963
+ uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
964
+ switch (instr->op()) {
965
+ case Token::SAR:
966
+ if (shift_count != 0) {
967
+ __ sarl(ToRegister(left), Immediate(shift_count));
968
+ }
969
+ break;
970
+ case Token::SHR:
971
+ if (shift_count == 0 && instr->can_deopt()) {
972
+ __ testl(ToRegister(left), ToRegister(left));
973
+ DeoptimizeIf(negative, instr->environment());
974
+ } else {
975
+ __ shrl(ToRegister(left), Immediate(shift_count));
976
+ }
977
+ break;
978
+ case Token::SHL:
979
+ if (shift_count != 0) {
980
+ __ shll(ToRegister(left), Immediate(shift_count));
981
+ }
982
+ break;
983
+ default:
984
+ UNREACHABLE();
985
+ break;
986
+ }
987
+ }
988
+ }
989
+
990
+
991
+ void LCodeGen::DoSubI(LSubI* instr) {
992
+ LOperand* left = instr->InputAt(0);
993
+ LOperand* right = instr->InputAt(1);
994
+ ASSERT(left->Equals(instr->result()));
995
+
996
+ if (right->IsConstantOperand()) {
997
+ __ subl(ToRegister(left),
998
+ Immediate(ToInteger32(LConstantOperand::cast(right))));
999
+ } else if (right->IsRegister()) {
1000
+ __ subl(ToRegister(left), ToRegister(right));
1001
+ } else {
1002
+ __ subl(ToRegister(left), ToOperand(right));
1003
+ }
1004
+
1005
+ if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1006
+ DeoptimizeIf(overflow, instr->environment());
1007
+ }
1008
+ }
1009
+
1010
+
1011
+ void LCodeGen::DoConstantI(LConstantI* instr) {
1012
+ ASSERT(instr->result()->IsRegister());
1013
+ __ movl(ToRegister(instr->result()), Immediate(instr->value()));
1014
+ }
1015
+
1016
+
1017
+ void LCodeGen::DoConstantD(LConstantD* instr) {
1018
+ ASSERT(instr->result()->IsDoubleRegister());
1019
+ XMMRegister res = ToDoubleRegister(instr->result());
1020
+ double v = instr->value();
1021
+ uint64_t int_val = BitCast<uint64_t, double>(v);
1022
+ // Use xor to produce +0.0 in a fast and compact way, but avoid to
1023
+ // do so if the constant is -0.0.
1024
+ if (int_val == 0) {
1025
+ __ xorpd(res, res);
1026
+ } else {
1027
+ Register tmp = ToRegister(instr->TempAt(0));
1028
+ __ Set(tmp, int_val);
1029
+ __ movq(res, tmp);
1030
+ }
1031
+ }
1032
+
1033
+
1034
+ void LCodeGen::DoConstantT(LConstantT* instr) {
1035
+ ASSERT(instr->result()->IsRegister());
1036
+ __ Move(ToRegister(instr->result()), instr->value());
1037
+ }
1038
+
1039
+
1040
+ void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
1041
+ Register result = ToRegister(instr->result());
1042
+ Register array = ToRegister(instr->InputAt(0));
1043
+ __ movq(result, FieldOperand(array, JSArray::kLengthOffset));
1044
+ }
1045
+
1046
+
1047
+ void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
1048
+ Register result = ToRegister(instr->result());
1049
+ Register array = ToRegister(instr->InputAt(0));
1050
+ __ movq(result, FieldOperand(array, FixedArray::kLengthOffset));
1051
+ }
1052
+
1053
+
1054
+ void LCodeGen::DoPixelArrayLength(LPixelArrayLength* instr) {
1055
+ Register result = ToRegister(instr->result());
1056
+ Register array = ToRegister(instr->InputAt(0));
1057
+ __ movq(result, FieldOperand(array, PixelArray::kLengthOffset));
1058
+ }
1059
+
1060
+
1061
+ void LCodeGen::DoValueOf(LValueOf* instr) {
1062
+ Register input = ToRegister(instr->InputAt(0));
1063
+ Register result = ToRegister(instr->result());
1064
+ ASSERT(input.is(result));
1065
+ NearLabel done;
1066
+ // If the object is a smi return the object.
1067
+ __ JumpIfSmi(input, &done);
1068
+
1069
+ // If the object is not a value type, return the object.
1070
+ __ CmpObjectType(input, JS_VALUE_TYPE, kScratchRegister);
1071
+ __ j(not_equal, &done);
1072
+ __ movq(result, FieldOperand(input, JSValue::kValueOffset));
1073
+
1074
+ __ bind(&done);
1075
+ }
1076
+
1077
+
1078
+ void LCodeGen::DoBitNotI(LBitNotI* instr) {
1079
+ LOperand* input = instr->InputAt(0);
1080
+ ASSERT(input->Equals(instr->result()));
1081
+ __ not_(ToRegister(input));
1082
+ }
1083
+
1084
+
1085
+ void LCodeGen::DoThrow(LThrow* instr) {
1086
+ __ push(ToRegister(instr->InputAt(0)));
1087
+ CallRuntime(Runtime::kThrow, 1, instr);
1088
+
1089
+ if (FLAG_debug_code) {
1090
+ Comment("Unreachable code.");
1091
+ __ int3();
1092
+ }
1093
+ }
1094
+
1095
+
1096
+ void LCodeGen::DoAddI(LAddI* instr) {
1097
+ LOperand* left = instr->InputAt(0);
1098
+ LOperand* right = instr->InputAt(1);
1099
+ ASSERT(left->Equals(instr->result()));
1100
+
1101
+ if (right->IsConstantOperand()) {
1102
+ __ addl(ToRegister(left),
1103
+ Immediate(ToInteger32(LConstantOperand::cast(right))));
1104
+ } else if (right->IsRegister()) {
1105
+ __ addl(ToRegister(left), ToRegister(right));
1106
+ } else {
1107
+ __ addl(ToRegister(left), ToOperand(right));
1108
+ }
1109
+
1110
+ if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1111
+ DeoptimizeIf(overflow, instr->environment());
1112
+ }
1113
+ }
1114
+
1115
+
1116
+ void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
1117
+ LOperand* left = instr->InputAt(0);
1118
+ LOperand* right = instr->InputAt(1);
1119
+ // All operations except MOD are computed in-place.
1120
+ ASSERT(instr->op() == Token::MOD || left->Equals(instr->result()));
1121
+ switch (instr->op()) {
1122
+ case Token::ADD:
1123
+ __ addsd(ToDoubleRegister(left), ToDoubleRegister(right));
1124
+ break;
1125
+ case Token::SUB:
1126
+ __ subsd(ToDoubleRegister(left), ToDoubleRegister(right));
1127
+ break;
1128
+ case Token::MUL:
1129
+ __ mulsd(ToDoubleRegister(left), ToDoubleRegister(right));
1130
+ break;
1131
+ case Token::DIV:
1132
+ __ divsd(ToDoubleRegister(left), ToDoubleRegister(right));
1133
+ break;
1134
+ case Token::MOD:
1135
+ Abort("Unimplemented: %s", "DoArithmeticD MOD");
1136
+ break;
1137
+ default:
1138
+ UNREACHABLE();
1139
+ break;
1140
+ }
1141
+ }
1142
+
1143
+
1144
+ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
1145
+ ASSERT(ToRegister(instr->InputAt(0)).is(rdx));
1146
+ ASSERT(ToRegister(instr->InputAt(1)).is(rax));
1147
+ ASSERT(ToRegister(instr->result()).is(rax));
1148
+
1149
+ TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE);
1150
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1151
+ }
1152
+
1153
+
1154
+ int LCodeGen::GetNextEmittedBlock(int block) {
1155
+ for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1156
+ LLabel* label = chunk_->GetLabel(i);
1157
+ if (!label->HasReplacement()) return i;
1158
+ }
1159
+ return -1;
1160
+ }
1161
+
1162
+
1163
+ void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
1164
+ int next_block = GetNextEmittedBlock(current_block_);
1165
+ right_block = chunk_->LookupDestination(right_block);
1166
+ left_block = chunk_->LookupDestination(left_block);
1167
+
1168
+ if (right_block == left_block) {
1169
+ EmitGoto(left_block);
1170
+ } else if (left_block == next_block) {
1171
+ __ j(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
1172
+ } else if (right_block == next_block) {
1173
+ __ j(cc, chunk_->GetAssemblyLabel(left_block));
1174
+ } else {
1175
+ __ j(cc, chunk_->GetAssemblyLabel(left_block));
1176
+ if (cc != always) {
1177
+ __ jmp(chunk_->GetAssemblyLabel(right_block));
1178
+ }
1179
+ }
1180
+ }
1181
+
1182
+
1183
+ void LCodeGen::DoBranch(LBranch* instr) {
1184
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
1185
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
1186
+
1187
+ Representation r = instr->hydrogen()->representation();
1188
+ if (r.IsInteger32()) {
1189
+ Register reg = ToRegister(instr->InputAt(0));
1190
+ __ testl(reg, reg);
1191
+ EmitBranch(true_block, false_block, not_zero);
1192
+ } else if (r.IsDouble()) {
1193
+ XMMRegister reg = ToDoubleRegister(instr->InputAt(0));
1194
+ __ xorpd(xmm0, xmm0);
1195
+ __ ucomisd(reg, xmm0);
1196
+ EmitBranch(true_block, false_block, not_equal);
1197
+ } else {
1198
+ ASSERT(r.IsTagged());
1199
+ Register reg = ToRegister(instr->InputAt(0));
1200
+ HType type = instr->hydrogen()->type();
1201
+ if (type.IsBoolean()) {
1202
+ __ Cmp(reg, Factory::true_value());
1203
+ EmitBranch(true_block, false_block, equal);
1204
+ } else if (type.IsSmi()) {
1205
+ __ SmiCompare(reg, Smi::FromInt(0));
1206
+ EmitBranch(true_block, false_block, not_equal);
1207
+ } else {
1208
+ Label* true_label = chunk_->GetAssemblyLabel(true_block);
1209
+ Label* false_label = chunk_->GetAssemblyLabel(false_block);
1210
+
1211
+ __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
1212
+ __ j(equal, false_label);
1213
+ __ CompareRoot(reg, Heap::kTrueValueRootIndex);
1214
+ __ j(equal, true_label);
1215
+ __ CompareRoot(reg, Heap::kFalseValueRootIndex);
1216
+ __ j(equal, false_label);
1217
+ __ SmiCompare(reg, Smi::FromInt(0));
1218
+ __ j(equal, false_label);
1219
+ __ JumpIfSmi(reg, true_label);
1220
+
1221
+ // Test for double values. Plus/minus zero and NaN are false.
1222
+ NearLabel call_stub;
1223
+ __ CompareRoot(FieldOperand(reg, HeapObject::kMapOffset),
1224
+ Heap::kHeapNumberMapRootIndex);
1225
+ __ j(not_equal, &call_stub);
1226
+
1227
+ // HeapNumber => false iff +0, -0, or NaN. These three cases set the
1228
+ // zero flag when compared to zero using ucomisd.
1229
+ __ xorpd(xmm0, xmm0);
1230
+ __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset));
1231
+ __ j(zero, false_label);
1232
+ __ jmp(true_label);
1233
+
1234
+ // The conversion stub doesn't cause garbage collections so it's
1235
+ // safe to not record a safepoint after the call.
1236
+ __ bind(&call_stub);
1237
+ ToBooleanStub stub;
1238
+ __ Pushad();
1239
+ __ push(reg);
1240
+ __ CallStub(&stub);
1241
+ __ testq(rax, rax);
1242
+ __ Popad();
1243
+ EmitBranch(true_block, false_block, not_zero);
1244
+ }
1245
+ }
1246
+ }
1247
+
1248
+
1249
+ void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
1250
+ block = chunk_->LookupDestination(block);
1251
+ int next_block = GetNextEmittedBlock(current_block_);
1252
+ if (block != next_block) {
1253
+ // Perform stack overflow check if this goto needs it before jumping.
1254
+ if (deferred_stack_check != NULL) {
1255
+ __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
1256
+ __ j(above_equal, chunk_->GetAssemblyLabel(block));
1257
+ __ jmp(deferred_stack_check->entry());
1258
+ deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block));
1259
+ } else {
1260
+ __ jmp(chunk_->GetAssemblyLabel(block));
1261
+ }
1262
+ }
1263
+ }
1264
+
1265
+
1266
+ void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
1267
+ __ Pushad();
1268
+ __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
1269
+ RecordSafepointWithRegisters(
1270
+ instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
1271
+ __ Popad();
1272
+ }
1273
+
1274
+
1275
+ void LCodeGen::DoGoto(LGoto* instr) {
1276
+ class DeferredStackCheck: public LDeferredCode {
1277
+ public:
1278
+ DeferredStackCheck(LCodeGen* codegen, LGoto* instr)
1279
+ : LDeferredCode(codegen), instr_(instr) { }
1280
+ virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
1281
+ private:
1282
+ LGoto* instr_;
1283
+ };
1284
+
1285
+ DeferredStackCheck* deferred = NULL;
1286
+ if (instr->include_stack_check()) {
1287
+ deferred = new DeferredStackCheck(this, instr);
1288
+ }
1289
+ EmitGoto(instr->block_id(), deferred);
1290
+ }
1291
+
1292
+
1293
+ inline Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
1294
+ Condition cond = no_condition;
1295
+ switch (op) {
1296
+ case Token::EQ:
1297
+ case Token::EQ_STRICT:
1298
+ cond = equal;
1299
+ break;
1300
+ case Token::LT:
1301
+ cond = is_unsigned ? below : less;
1302
+ break;
1303
+ case Token::GT:
1304
+ cond = is_unsigned ? above : greater;
1305
+ break;
1306
+ case Token::LTE:
1307
+ cond = is_unsigned ? below_equal : less_equal;
1308
+ break;
1309
+ case Token::GTE:
1310
+ cond = is_unsigned ? above_equal : greater_equal;
1311
+ break;
1312
+ case Token::IN:
1313
+ case Token::INSTANCEOF:
1314
+ default:
1315
+ UNREACHABLE();
1316
+ }
1317
+ return cond;
1318
+ }
1319
+
1320
+
1321
+ void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
1322
+ if (right->IsConstantOperand()) {
1323
+ int32_t value = ToInteger32(LConstantOperand::cast(right));
1324
+ if (left->IsRegister()) {
1325
+ __ cmpl(ToRegister(left), Immediate(value));
1326
+ } else {
1327
+ __ cmpl(ToOperand(left), Immediate(value));
1328
+ }
1329
+ } else if (right->IsRegister()) {
1330
+ __ cmpl(ToRegister(left), ToRegister(right));
1331
+ } else {
1332
+ __ cmpl(ToRegister(left), ToOperand(right));
1333
+ }
1334
+ }
1335
+
1336
+
1337
+ void LCodeGen::DoCmpID(LCmpID* instr) {
1338
+ LOperand* left = instr->InputAt(0);
1339
+ LOperand* right = instr->InputAt(1);
1340
+ LOperand* result = instr->result();
1341
+
1342
+ NearLabel unordered;
1343
+ if (instr->is_double()) {
1344
+ // Don't base result on EFLAGS when a NaN is involved. Instead
1345
+ // jump to the unordered case, which produces a false value.
1346
+ __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
1347
+ __ j(parity_even, &unordered);
1348
+ } else {
1349
+ EmitCmpI(left, right);
1350
+ }
1351
+
1352
+ NearLabel done;
1353
+ Condition cc = TokenToCondition(instr->op(), instr->is_double());
1354
+ __ LoadRoot(ToRegister(result), Heap::kTrueValueRootIndex);
1355
+ __ j(cc, &done);
1356
+
1357
+ __ bind(&unordered);
1358
+ __ LoadRoot(ToRegister(result), Heap::kFalseValueRootIndex);
1359
+ __ bind(&done);
1360
+ }
1361
+
1362
+
1363
+ void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
1364
+ LOperand* left = instr->InputAt(0);
1365
+ LOperand* right = instr->InputAt(1);
1366
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
1367
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
1368
+
1369
+ if (instr->is_double()) {
1370
+ // Don't base result on EFLAGS when a NaN is involved. Instead
1371
+ // jump to the false block.
1372
+ __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
1373
+ __ j(parity_even, chunk_->GetAssemblyLabel(false_block));
1374
+ } else {
1375
+ EmitCmpI(left, right);
1376
+ }
1377
+
1378
+ Condition cc = TokenToCondition(instr->op(), instr->is_double());
1379
+ EmitBranch(true_block, false_block, cc);
1380
+ }
1381
+
1382
+
1383
+ void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
1384
+ Register left = ToRegister(instr->InputAt(0));
1385
+ Register right = ToRegister(instr->InputAt(1));
1386
+ Register result = ToRegister(instr->result());
1387
+
1388
+ NearLabel different, done;
1389
+ __ cmpq(left, right);
1390
+ __ j(not_equal, &different);
1391
+ __ LoadRoot(result, Heap::kTrueValueRootIndex);
1392
+ __ jmp(&done);
1393
+ __ bind(&different);
1394
+ __ LoadRoot(result, Heap::kFalseValueRootIndex);
1395
+ __ bind(&done);
1396
+ }
1397
+
1398
+
1399
+ void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
1400
+ Register left = ToRegister(instr->InputAt(0));
1401
+ Register right = ToRegister(instr->InputAt(1));
1402
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
1403
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
1404
+
1405
+ __ cmpq(left, right);
1406
+ EmitBranch(true_block, false_block, equal);
1407
+ }
1408
+
1409
+
1410
+ void LCodeGen::DoIsNull(LIsNull* instr) {
1411
+ Register reg = ToRegister(instr->InputAt(0));
1412
+ Register result = ToRegister(instr->result());
1413
+
1414
+ // If the expression is known to be a smi, then it's
1415
+ // definitely not null. Materialize false.
1416
+ // Consider adding other type and representation tests too.
1417
+ if (instr->hydrogen()->value()->type().IsSmi()) {
1418
+ __ LoadRoot(result, Heap::kFalseValueRootIndex);
1419
+ return;
1420
+ }
1421
+
1422
+ __ CompareRoot(reg, Heap::kNullValueRootIndex);
1423
+ if (instr->is_strict()) {
1424
+ __ movl(result, Immediate(Heap::kTrueValueRootIndex));
1425
+ NearLabel load;
1426
+ __ j(equal, &load);
1427
+ __ movl(result, Immediate(Heap::kFalseValueRootIndex));
1428
+ __ bind(&load);
1429
+ __ movq(result, Operand(kRootRegister, result, times_pointer_size, 0));
1430
+ } else {
1431
+ NearLabel true_value, false_value, done;
1432
+ __ j(equal, &true_value);
1433
+ __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
1434
+ __ j(equal, &true_value);
1435
+ __ JumpIfSmi(reg, &false_value);
1436
+ // Check for undetectable objects by looking in the bit field in
1437
+ // the map. The object has already been smi checked.
1438
+ Register scratch = result;
1439
+ __ movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1440
+ __ testb(FieldOperand(scratch, Map::kBitFieldOffset),
1441
+ Immediate(1 << Map::kIsUndetectable));
1442
+ __ j(not_zero, &true_value);
1443
+ __ bind(&false_value);
1444
+ __ LoadRoot(result, Heap::kFalseValueRootIndex);
1445
+ __ jmp(&done);
1446
+ __ bind(&true_value);
1447
+ __ LoadRoot(result, Heap::kTrueValueRootIndex);
1448
+ __ bind(&done);
1449
+ }
1450
+ }
1451
+
1452
+
1453
+ void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
1454
+ Register reg = ToRegister(instr->InputAt(0));
1455
+
1456
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
1457
+
1458
+ if (instr->hydrogen()->representation().IsSpecialization() ||
1459
+ instr->hydrogen()->type().IsSmi()) {
1460
+ // If the expression is known to untagged or smi, then it's definitely
1461
+ // not null, and it can't be a an undetectable object.
1462
+ // Jump directly to the false block.
1463
+ EmitGoto(false_block);
1464
+ return;
1465
+ }
1466
+
1467
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
1468
+
1469
+ __ Cmp(reg, Factory::null_value());
1470
+ if (instr->is_strict()) {
1471
+ EmitBranch(true_block, false_block, equal);
1472
+ } else {
1473
+ Label* true_label = chunk_->GetAssemblyLabel(true_block);
1474
+ Label* false_label = chunk_->GetAssemblyLabel(false_block);
1475
+ __ j(equal, true_label);
1476
+ __ Cmp(reg, Factory::undefined_value());
1477
+ __ j(equal, true_label);
1478
+ __ JumpIfSmi(reg, false_label);
1479
+ // Check for undetectable objects by looking in the bit field in
1480
+ // the map. The object has already been smi checked.
1481
+ Register scratch = ToRegister(instr->TempAt(0));
1482
+ __ movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1483
+ __ testb(FieldOperand(scratch, Map::kBitFieldOffset),
1484
+ Immediate(1 << Map::kIsUndetectable));
1485
+ EmitBranch(true_block, false_block, not_zero);
1486
+ }
1487
+ }
1488
+
1489
+
1490
+ Condition LCodeGen::EmitIsObject(Register input,
1491
+ Label* is_not_object,
1492
+ Label* is_object) {
1493
+ ASSERT(!input.is(kScratchRegister));
1494
+
1495
+ __ JumpIfSmi(input, is_not_object);
1496
+
1497
+ __ CompareRoot(input, Heap::kNullValueRootIndex);
1498
+ __ j(equal, is_object);
1499
+
1500
+ __ movq(kScratchRegister, FieldOperand(input, HeapObject::kMapOffset));
1501
+ // Undetectable objects behave like undefined.
1502
+ __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
1503
+ Immediate(1 << Map::kIsUndetectable));
1504
+ __ j(not_zero, is_not_object);
1505
+
1506
+ __ movzxbl(kScratchRegister,
1507
+ FieldOperand(kScratchRegister, Map::kInstanceTypeOffset));
1508
+ __ cmpb(kScratchRegister, Immediate(FIRST_JS_OBJECT_TYPE));
1509
+ __ j(below, is_not_object);
1510
+ __ cmpb(kScratchRegister, Immediate(LAST_JS_OBJECT_TYPE));
1511
+ return below_equal;
1512
+ }
1513
+
1514
+
1515
+ void LCodeGen::DoIsObject(LIsObject* instr) {
1516
+ Register reg = ToRegister(instr->InputAt(0));
1517
+ Register result = ToRegister(instr->result());
1518
+ Label is_false, is_true, done;
1519
+
1520
+ Condition true_cond = EmitIsObject(reg, &is_false, &is_true);
1521
+ __ j(true_cond, &is_true);
1522
+
1523
+ __ bind(&is_false);
1524
+ __ LoadRoot(result, Heap::kFalseValueRootIndex);
1525
+ __ jmp(&done);
1526
+
1527
+ __ bind(&is_true);
1528
+ __ LoadRoot(result, Heap::kTrueValueRootIndex);
1529
+
1530
+ __ bind(&done);
1531
+ }
1532
+
1533
+
1534
+ void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
1535
+ Register reg = ToRegister(instr->InputAt(0));
1536
+
1537
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
1538
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
1539
+ Label* true_label = chunk_->GetAssemblyLabel(true_block);
1540
+ Label* false_label = chunk_->GetAssemblyLabel(false_block);
1541
+
1542
+ Condition true_cond = EmitIsObject(reg, false_label, true_label);
1543
+
1544
+ EmitBranch(true_block, false_block, true_cond);
1545
+ }
1546
+
1547
+
1548
+ void LCodeGen::DoIsSmi(LIsSmi* instr) {
1549
+ LOperand* input_operand = instr->InputAt(0);
1550
+ Register result = ToRegister(instr->result());
1551
+ if (input_operand->IsRegister()) {
1552
+ Register input = ToRegister(input_operand);
1553
+ __ CheckSmiToIndicator(result, input);
1554
+ } else {
1555
+ Operand input = ToOperand(instr->InputAt(0));
1556
+ __ CheckSmiToIndicator(result, input);
1557
+ }
1558
+ // result is zero if input is a smi, and one otherwise.
1559
+ ASSERT(Heap::kFalseValueRootIndex == Heap::kTrueValueRootIndex + 1);
1560
+ __ movq(result, Operand(kRootRegister, result, times_pointer_size,
1561
+ Heap::kTrueValueRootIndex * kPointerSize));
1562
+ }
1563
+
1564
+
1565
+ void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1566
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
1567
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
1568
+
1569
+ Condition is_smi;
1570
+ if (instr->InputAt(0)->IsRegister()) {
1571
+ Register input = ToRegister(instr->InputAt(0));
1572
+ is_smi = masm()->CheckSmi(input);
1573
+ } else {
1574
+ Operand input = ToOperand(instr->InputAt(0));
1575
+ is_smi = masm()->CheckSmi(input);
1576
+ }
1577
+ EmitBranch(true_block, false_block, is_smi);
1578
+ }
1579
+
1580
+
1581
+ static InstanceType TestType(HHasInstanceType* instr) {
1582
+ InstanceType from = instr->from();
1583
+ InstanceType to = instr->to();
1584
+ if (from == FIRST_TYPE) return to;
1585
+ ASSERT(from == to || to == LAST_TYPE);
1586
+ return from;
1587
+ }
1588
+
1589
+
1590
+ static Condition BranchCondition(HHasInstanceType* instr) {
1591
+ InstanceType from = instr->from();
1592
+ InstanceType to = instr->to();
1593
+ if (from == to) return equal;
1594
+ if (to == LAST_TYPE) return above_equal;
1595
+ if (from == FIRST_TYPE) return below_equal;
1596
+ UNREACHABLE();
1597
+ return equal;
1598
+ }
1599
+
1600
+
1601
+ void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
1602
+ Register input = ToRegister(instr->InputAt(0));
1603
+ Register result = ToRegister(instr->result());
1604
+
1605
+ ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1606
+ __ testl(input, Immediate(kSmiTagMask));
1607
+ NearLabel done, is_false;
1608
+ __ j(zero, &is_false);
1609
+ __ CmpObjectType(input, TestType(instr->hydrogen()), result);
1610
+ __ j(NegateCondition(BranchCondition(instr->hydrogen())), &is_false);
1611
+ __ LoadRoot(result, Heap::kTrueValueRootIndex);
1612
+ __ jmp(&done);
1613
+ __ bind(&is_false);
1614
+ __ LoadRoot(result, Heap::kFalseValueRootIndex);
1615
+ __ bind(&done);
1616
+ }
1617
+
1618
+
1619
+ void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
1620
+ Register input = ToRegister(instr->InputAt(0));
1621
+
1622
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
1623
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
1624
+
1625
+ Label* false_label = chunk_->GetAssemblyLabel(false_block);
1626
+
1627
+ __ JumpIfSmi(input, false_label);
1628
+
1629
+ __ CmpObjectType(input, TestType(instr->hydrogen()), kScratchRegister);
1630
+ EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
1631
+ }
1632
+
1633
+
1634
+ void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
1635
+ Register input = ToRegister(instr->InputAt(0));
1636
+ Register result = ToRegister(instr->result());
1637
+
1638
+ ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1639
+ __ LoadRoot(result, Heap::kTrueValueRootIndex);
1640
+ __ testl(FieldOperand(input, String::kHashFieldOffset),
1641
+ Immediate(String::kContainsCachedArrayIndexMask));
1642
+ NearLabel done;
1643
+ __ j(not_zero, &done);
1644
+ __ LoadRoot(result, Heap::kFalseValueRootIndex);
1645
+ __ bind(&done);
1646
+ }
1647
+
1648
+
1649
+ void LCodeGen::DoHasCachedArrayIndexAndBranch(
1650
+ LHasCachedArrayIndexAndBranch* instr) {
1651
+ Register input = ToRegister(instr->InputAt(0));
1652
+
1653
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
1654
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
1655
+
1656
+ __ testl(FieldOperand(input, String::kHashFieldOffset),
1657
+ Immediate(String::kContainsCachedArrayIndexMask));
1658
+ EmitBranch(true_block, false_block, not_equal);
1659
+ }
1660
+
1661
+
1662
+ // Branches to a label or falls through with the answer in the z flag.
1663
+ // Trashes the temp register and possibly input (if it and temp are aliased).
1664
+ void LCodeGen::EmitClassOfTest(Label* is_true,
1665
+ Label* is_false,
1666
+ Handle<String> class_name,
1667
+ Register input,
1668
+ Register temp) {
1669
+ __ JumpIfSmi(input, is_false);
1670
+ __ CmpObjectType(input, FIRST_JS_OBJECT_TYPE, temp);
1671
+ __ j(below, is_false);
1672
+
1673
+ // Map is now in temp.
1674
+ // Functions have class 'Function'.
1675
+ __ CmpInstanceType(temp, JS_FUNCTION_TYPE);
1676
+ if (class_name->IsEqualTo(CStrVector("Function"))) {
1677
+ __ j(equal, is_true);
1678
+ } else {
1679
+ __ j(equal, is_false);
1680
+ }
1681
+
1682
+ // Check if the constructor in the map is a function.
1683
+ __ movq(temp, FieldOperand(temp, Map::kConstructorOffset));
1684
+
1685
+ // As long as JS_FUNCTION_TYPE is the last instance type and it is
1686
+ // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
1687
+ // LAST_JS_OBJECT_TYPE.
1688
+ ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1689
+ ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
1690
+
1691
+ // Objects with a non-function constructor have class 'Object'.
1692
+ __ CmpObjectType(temp, JS_FUNCTION_TYPE, kScratchRegister);
1693
+ if (class_name->IsEqualTo(CStrVector("Object"))) {
1694
+ __ j(not_equal, is_true);
1695
+ } else {
1696
+ __ j(not_equal, is_false);
1697
+ }
1698
+
1699
+ // temp now contains the constructor function. Grab the
1700
+ // instance class name from there.
1701
+ __ movq(temp, FieldOperand(temp, JSFunction::kSharedFunctionInfoOffset));
1702
+ __ movq(temp, FieldOperand(temp,
1703
+ SharedFunctionInfo::kInstanceClassNameOffset));
1704
+ // The class name we are testing against is a symbol because it's a literal.
1705
+ // The name in the constructor is a symbol because of the way the context is
1706
+ // booted. This routine isn't expected to work for random API-created
1707
+ // classes and it doesn't have to because you can't access it with natives
1708
+ // syntax. Since both sides are symbols it is sufficient to use an identity
1709
+ // comparison.
1710
+ ASSERT(class_name->IsSymbol());
1711
+ __ Cmp(temp, class_name);
1712
+ // End with the answer in the z flag.
1713
+ }
1714
+
1715
+
1716
+ void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
1717
+ Register input = ToRegister(instr->InputAt(0));
1718
+ Register result = ToRegister(instr->result());
1719
+ ASSERT(input.is(result));
1720
+ Register temp = ToRegister(instr->TempAt(0));
1721
+ Handle<String> class_name = instr->hydrogen()->class_name();
1722
+ NearLabel done;
1723
+ Label is_true, is_false;
1724
+
1725
+ EmitClassOfTest(&is_true, &is_false, class_name, input, temp);
1726
+
1727
+ __ j(not_equal, &is_false);
1728
+
1729
+ __ bind(&is_true);
1730
+ __ LoadRoot(result, Heap::kTrueValueRootIndex);
1731
+ __ jmp(&done);
1732
+
1733
+ __ bind(&is_false);
1734
+ __ LoadRoot(result, Heap::kFalseValueRootIndex);
1735
+ __ bind(&done);
1736
+ }
1737
+
1738
+
1739
+ void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
1740
+ Register input = ToRegister(instr->InputAt(0));
1741
+ Register temp = ToRegister(instr->TempAt(0));
1742
+ Handle<String> class_name = instr->hydrogen()->class_name();
1743
+
1744
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
1745
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
1746
+
1747
+ Label* true_label = chunk_->GetAssemblyLabel(true_block);
1748
+ Label* false_label = chunk_->GetAssemblyLabel(false_block);
1749
+
1750
+ EmitClassOfTest(true_label, false_label, class_name, input, temp);
1751
+
1752
+ EmitBranch(true_block, false_block, equal);
1753
+ }
1754
+
1755
+
1756
+ void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
1757
+ Register reg = ToRegister(instr->InputAt(0));
1758
+ int true_block = instr->true_block_id();
1759
+ int false_block = instr->false_block_id();
1760
+
1761
+ __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map());
1762
+ EmitBranch(true_block, false_block, equal);
1763
+ }
1764
+
1765
+
1766
+ void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
1767
+ InstanceofStub stub(InstanceofStub::kNoFlags);
1768
+ __ push(ToRegister(instr->InputAt(0)));
1769
+ __ push(ToRegister(instr->InputAt(1)));
1770
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1771
+ NearLabel true_value, done;
1772
+ __ testq(rax, rax);
1773
+ __ j(zero, &true_value);
1774
+ __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
1775
+ __ jmp(&done);
1776
+ __ bind(&true_value);
1777
+ __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
1778
+ __ bind(&done);
1779
+ }
1780
+
1781
+
1782
+ void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
1783
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
1784
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
1785
+
1786
+ InstanceofStub stub(InstanceofStub::kNoFlags);
1787
+ __ push(ToRegister(instr->InputAt(0)));
1788
+ __ push(ToRegister(instr->InputAt(1)));
1789
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1790
+ __ testq(rax, rax);
1791
+ EmitBranch(true_block, false_block, zero);
1792
+ }
1793
+
1794
+
1795
+ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1796
+ class DeferredInstanceOfKnownGlobal: public LDeferredCode {
1797
+ public:
1798
+ DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
1799
+ LInstanceOfKnownGlobal* instr)
1800
+ : LDeferredCode(codegen), instr_(instr) { }
1801
+ virtual void Generate() {
1802
+ codegen()->DoDeferredLInstanceOfKnownGlobal(instr_);
1803
+ }
1804
+
1805
+ private:
1806
+ LInstanceOfKnownGlobal* instr_;
1807
+ };
1808
+
1809
+
1810
+ DeferredInstanceOfKnownGlobal* deferred;
1811
+ deferred = new DeferredInstanceOfKnownGlobal(this, instr);
1812
+
1813
+ Label false_result;
1814
+ Register object = ToRegister(instr->InputAt(0));
1815
+
1816
+ // A Smi is not an instance of anything.
1817
+ __ JumpIfSmi(object, &false_result);
1818
+
1819
+ // Null is not an instance of anything.
1820
+ __ CompareRoot(object, Heap::kNullValueRootIndex);
1821
+ __ j(equal, &false_result);
1822
+
1823
+ // String values are not instances of anything.
1824
+ __ JumpIfNotString(object, kScratchRegister, deferred->entry());
1825
+
1826
+ __ bind(&false_result);
1827
+ __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
1828
+
1829
+ __ bind(deferred->exit());
1830
+ }
1831
+
1832
+
1833
+ void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1834
+ __ PushSafepointRegisters();
1835
+
1836
+ InstanceofStub stub(InstanceofStub::kNoFlags);
1837
+
1838
+ __ push(ToRegister(instr->InputAt(0)));
1839
+ __ Push(instr->function());
1840
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1841
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1842
+ __ movq(kScratchRegister, rax);
1843
+ __ PopSafepointRegisters();
1844
+ __ testq(kScratchRegister, kScratchRegister);
1845
+ Label load_false;
1846
+ Label done;
1847
+ __ j(not_zero, &load_false);
1848
+ __ LoadRoot(rax, Heap::kTrueValueRootIndex);
1849
+ __ jmp(&done);
1850
+ __ bind(&load_false);
1851
+ __ LoadRoot(rax, Heap::kFalseValueRootIndex);
1852
+ __ bind(&done);
1853
+ }
1854
+
1855
+
1856
+ void LCodeGen::DoCmpT(LCmpT* instr) {
1857
+ Token::Value op = instr->op();
1858
+
1859
+ Handle<Code> ic = CompareIC::GetUninitialized(op);
1860
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
1861
+
1862
+ Condition condition = TokenToCondition(op, false);
1863
+ if (op == Token::GT || op == Token::LTE) {
1864
+ condition = ReverseCondition(condition);
1865
+ }
1866
+ NearLabel true_value, done;
1867
+ __ testq(rax, rax);
1868
+ __ j(condition, &true_value);
1869
+ __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
1870
+ __ jmp(&done);
1871
+ __ bind(&true_value);
1872
+ __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
1873
+ __ bind(&done);
1874
+ }
1875
+
1876
+
1877
+ void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
1878
+ Token::Value op = instr->op();
1879
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
1880
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
1881
+
1882
+ Handle<Code> ic = CompareIC::GetUninitialized(op);
1883
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
1884
+
1885
+ // The compare stub expects compare condition and the input operands
1886
+ // reversed for GT and LTE.
1887
+ Condition condition = TokenToCondition(op, false);
1888
+ if (op == Token::GT || op == Token::LTE) {
1889
+ condition = ReverseCondition(condition);
1890
+ }
1891
+ __ testq(rax, rax);
1892
+ EmitBranch(true_block, false_block, condition);
1893
+ }
1894
+
1895
+
1896
+ void LCodeGen::DoReturn(LReturn* instr) {
1897
+ if (FLAG_trace) {
1898
+ // Preserve the return value on the stack and rely on the runtime
1899
+ // call to return the value in the same register.
1900
+ __ push(rax);
1901
+ __ CallRuntime(Runtime::kTraceExit, 1);
1902
+ }
1903
+ __ movq(rsp, rbp);
1904
+ __ pop(rbp);
1905
+ __ Ret((ParameterCount() + 1) * kPointerSize, rcx);
1906
+ }
1907
+
1908
+
1909
+ void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) {
1910
+ Register result = ToRegister(instr->result());
1911
+ if (result.is(rax)) {
1912
+ __ load_rax(instr->hydrogen()->cell().location(),
1913
+ RelocInfo::GLOBAL_PROPERTY_CELL);
1914
+ } else {
1915
+ __ movq(result, instr->hydrogen()->cell(), RelocInfo::GLOBAL_PROPERTY_CELL);
1916
+ __ movq(result, Operand(result, 0));
1917
+ }
1918
+ if (instr->hydrogen()->check_hole_value()) {
1919
+ __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
1920
+ DeoptimizeIf(equal, instr->environment());
1921
+ }
1922
+ }
1923
+
1924
+
1925
+ void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) {
1926
+ Register value = ToRegister(instr->InputAt(0));
1927
+ Register temp = ToRegister(instr->TempAt(0));
1928
+ ASSERT(!value.is(temp));
1929
+ bool check_hole = instr->hydrogen()->check_hole_value();
1930
+ if (!check_hole && value.is(rax)) {
1931
+ __ store_rax(instr->hydrogen()->cell().location(),
1932
+ RelocInfo::GLOBAL_PROPERTY_CELL);
1933
+ return;
1934
+ }
1935
+ // If the cell we are storing to contains the hole it could have
1936
+ // been deleted from the property dictionary. In that case, we need
1937
+ // to update the property details in the property dictionary to mark
1938
+ // it as no longer deleted. We deoptimize in that case.
1939
+ __ movq(temp, instr->hydrogen()->cell(), RelocInfo::GLOBAL_PROPERTY_CELL);
1940
+ if (check_hole) {
1941
+ __ CompareRoot(Operand(temp, 0), Heap::kTheHoleValueRootIndex);
1942
+ DeoptimizeIf(equal, instr->environment());
1943
+ }
1944
+ __ movq(Operand(temp, 0), value);
1945
+ }
1946
+
1947
+
1948
+ void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
1949
+ Register context = ToRegister(instr->context());
1950
+ Register result = ToRegister(instr->result());
1951
+ __ movq(result, ContextOperand(context, instr->slot_index()));
1952
+ }
1953
+
1954
+
1955
+ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
1956
+ Register context = ToRegister(instr->context());
1957
+ Register value = ToRegister(instr->value());
1958
+ __ movq(ContextOperand(context, instr->slot_index()), value);
1959
+ if (instr->needs_write_barrier()) {
1960
+ int offset = Context::SlotOffset(instr->slot_index());
1961
+ __ RecordWrite(context, offset, value, kScratchRegister);
1962
+ }
1963
+ }
1964
+
1965
+
1966
+ void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
1967
+ Register object = ToRegister(instr->InputAt(0));
1968
+ Register result = ToRegister(instr->result());
1969
+ if (instr->hydrogen()->is_in_object()) {
1970
+ __ movq(result, FieldOperand(object, instr->hydrogen()->offset()));
1971
+ } else {
1972
+ __ movq(result, FieldOperand(object, JSObject::kPropertiesOffset));
1973
+ __ movq(result, FieldOperand(result, instr->hydrogen()->offset()));
1974
+ }
1975
+ }
1976
+
1977
+
1978
+ void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
1979
+ ASSERT(ToRegister(instr->object()).is(rax));
1980
+ ASSERT(ToRegister(instr->result()).is(rax));
1981
+
1982
+ __ Move(rcx, instr->name());
1983
+ Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
1984
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
1985
+ }
1986
+
1987
+
1988
+ void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
1989
+ Register function = ToRegister(instr->function());
1990
+ Register result = ToRegister(instr->result());
1991
+
1992
+ // Check that the function really is a function.
1993
+ __ CmpObjectType(function, JS_FUNCTION_TYPE, result);
1994
+ DeoptimizeIf(not_equal, instr->environment());
1995
+
1996
+ // Check whether the function has an instance prototype.
1997
+ NearLabel non_instance;
1998
+ __ testb(FieldOperand(result, Map::kBitFieldOffset),
1999
+ Immediate(1 << Map::kHasNonInstancePrototype));
2000
+ __ j(not_zero, &non_instance);
2001
+
2002
+ // Get the prototype or initial map from the function.
2003
+ __ movq(result,
2004
+ FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2005
+
2006
+ // Check that the function has a prototype or an initial map.
2007
+ __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2008
+ DeoptimizeIf(equal, instr->environment());
2009
+
2010
+ // If the function does not have an initial map, we're done.
2011
+ NearLabel done;
2012
+ __ CmpObjectType(result, MAP_TYPE, kScratchRegister);
2013
+ __ j(not_equal, &done);
2014
+
2015
+ // Get the prototype from the initial map.
2016
+ __ movq(result, FieldOperand(result, Map::kPrototypeOffset));
2017
+ __ jmp(&done);
2018
+
2019
+ // Non-instance prototype: Fetch prototype from constructor field
2020
+ // in the function's map.
2021
+ __ bind(&non_instance);
2022
+ __ movq(result, FieldOperand(result, Map::kConstructorOffset));
2023
+
2024
+ // All done.
2025
+ __ bind(&done);
2026
+ }
2027
+
2028
+
2029
+ void LCodeGen::DoLoadElements(LLoadElements* instr) {
2030
+ Register result = ToRegister(instr->result());
2031
+ Register input = ToRegister(instr->InputAt(0));
2032
+ __ movq(result, FieldOperand(input, JSObject::kElementsOffset));
2033
+ if (FLAG_debug_code) {
2034
+ NearLabel done;
2035
+ __ Cmp(FieldOperand(result, HeapObject::kMapOffset),
2036
+ Factory::fixed_array_map());
2037
+ __ j(equal, &done);
2038
+ __ Cmp(FieldOperand(result, HeapObject::kMapOffset),
2039
+ Factory::pixel_array_map());
2040
+ __ j(equal, &done);
2041
+ __ Cmp(FieldOperand(result, HeapObject::kMapOffset),
2042
+ Factory::fixed_cow_array_map());
2043
+ __ Check(equal, "Check for fast elements failed.");
2044
+ __ bind(&done);
2045
+ }
2046
+ }
2047
+
2048
+
2049
+ void LCodeGen::DoLoadPixelArrayExternalPointer(
2050
+ LLoadPixelArrayExternalPointer* instr) {
2051
+ Register result = ToRegister(instr->result());
2052
+ Register input = ToRegister(instr->InputAt(0));
2053
+ __ movq(result, FieldOperand(input, PixelArray::kExternalPointerOffset));
2054
+ }
2055
+
2056
+
2057
+ void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
2058
+ Register arguments = ToRegister(instr->arguments());
2059
+ Register length = ToRegister(instr->length());
2060
+ Register result = ToRegister(instr->result());
2061
+
2062
+ if (instr->index()->IsRegister()) {
2063
+ __ subl(length, ToRegister(instr->index()));
2064
+ } else {
2065
+ __ subl(length, ToOperand(instr->index()));
2066
+ }
2067
+ DeoptimizeIf(below_equal, instr->environment());
2068
+
2069
+ // There are two words between the frame pointer and the last argument.
2070
+ // Subtracting from length accounts for one of them add one more.
2071
+ __ movq(result, Operand(arguments, length, times_pointer_size, kPointerSize));
2072
+ }
2073
+
2074
+
2075
+ void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
2076
+ Register elements = ToRegister(instr->elements());
2077
+ Register key = ToRegister(instr->key());
2078
+ Register result = ToRegister(instr->result());
2079
+ ASSERT(result.is(elements));
2080
+
2081
+ // Load the result.
2082
+ __ movq(result, FieldOperand(elements,
2083
+ key,
2084
+ times_pointer_size,
2085
+ FixedArray::kHeaderSize));
2086
+
2087
+ // Check for the hole value.
2088
+ __ Cmp(result, Factory::the_hole_value());
2089
+ DeoptimizeIf(equal, instr->environment());
2090
+ }
2091
+
2092
+
2093
+ void LCodeGen::DoLoadPixelArrayElement(LLoadPixelArrayElement* instr) {
2094
+ Register external_elements = ToRegister(instr->external_pointer());
2095
+ Register key = ToRegister(instr->key());
2096
+ Register result = ToRegister(instr->result());
2097
+ ASSERT(result.is(external_elements));
2098
+
2099
+ // Load the result.
2100
+ __ movzxbq(result, Operand(external_elements, key, times_1, 0));
2101
+ }
2102
+
2103
+
2104
+ void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2105
+ ASSERT(ToRegister(instr->object()).is(rdx));
2106
+ ASSERT(ToRegister(instr->key()).is(rax));
2107
+
2108
+ Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
2109
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
2110
+ }
2111
+
2112
+
2113
+ void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
2114
+ Register result = ToRegister(instr->result());
2115
+
2116
+ // Check for arguments adapter frame.
2117
+ NearLabel done, adapted;
2118
+ __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2119
+ __ SmiCompare(Operand(result, StandardFrameConstants::kContextOffset),
2120
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2121
+ __ j(equal, &adapted);
2122
+
2123
+ // No arguments adaptor frame.
2124
+ __ movq(result, rbp);
2125
+ __ jmp(&done);
2126
+
2127
+ // Arguments adaptor frame present.
2128
+ __ bind(&adapted);
2129
+ __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2130
+
2131
+ // Result is the frame pointer for the frame if not adapted and for the real
2132
+ // frame below the adaptor frame if adapted.
2133
+ __ bind(&done);
2134
+ }
2135
+
2136
+
2137
+ void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
2138
+ Register result = ToRegister(instr->result());
2139
+
2140
+ NearLabel done;
2141
+
2142
+ // If no arguments adaptor frame the number of arguments is fixed.
2143
+ if (instr->InputAt(0)->IsRegister()) {
2144
+ __ cmpq(rbp, ToRegister(instr->InputAt(0)));
2145
+ } else {
2146
+ __ cmpq(rbp, ToOperand(instr->InputAt(0)));
2147
+ }
2148
+ __ movq(result, Immediate(scope()->num_parameters()));
2149
+ __ j(equal, &done);
2150
+
2151
+ // Arguments adaptor frame present. Get argument length from there.
2152
+ __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2153
+ __ movq(result, Operand(result,
2154
+ ArgumentsAdaptorFrameConstants::kLengthOffset));
2155
+ __ SmiToInteger32(result, result);
2156
+
2157
+ // Argument length is in result register.
2158
+ __ bind(&done);
2159
+ }
2160
+
2161
+
2162
+ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2163
+ Register receiver = ToRegister(instr->receiver());
2164
+ Register function = ToRegister(instr->function());
2165
+ Register length = ToRegister(instr->length());
2166
+ Register elements = ToRegister(instr->elements());
2167
+ ASSERT(receiver.is(rax)); // Used for parameter count.
2168
+ ASSERT(function.is(rdi)); // Required by InvokeFunction.
2169
+ ASSERT(ToRegister(instr->result()).is(rax));
2170
+
2171
+ // If the receiver is null or undefined, we have to pass the global object
2172
+ // as a receiver.
2173
+ NearLabel global_object, receiver_ok;
2174
+ __ CompareRoot(receiver, Heap::kNullValueRootIndex);
2175
+ __ j(equal, &global_object);
2176
+ __ CompareRoot(receiver, Heap::kUndefinedValueRootIndex);
2177
+ __ j(equal, &global_object);
2178
+
2179
+ // The receiver should be a JS object.
2180
+ Condition is_smi = __ CheckSmi(receiver);
2181
+ DeoptimizeIf(is_smi, instr->environment());
2182
+ __ CmpObjectType(receiver, FIRST_JS_OBJECT_TYPE, kScratchRegister);
2183
+ DeoptimizeIf(below, instr->environment());
2184
+ __ jmp(&receiver_ok);
2185
+
2186
+ __ bind(&global_object);
2187
+ // TODO(kmillikin): We have a hydrogen value for the global object. See
2188
+ // if it's better to use it than to explicitly fetch it from the context
2189
+ // here.
2190
+ __ movq(receiver, Operand(rbp, StandardFrameConstants::kContextOffset));
2191
+ __ movq(receiver, ContextOperand(receiver, Context::GLOBAL_INDEX));
2192
+ __ bind(&receiver_ok);
2193
+
2194
+ // Copy the arguments to this function possibly from the
2195
+ // adaptor frame below it.
2196
+ const uint32_t kArgumentsLimit = 1 * KB;
2197
+ __ cmpq(length, Immediate(kArgumentsLimit));
2198
+ DeoptimizeIf(above, instr->environment());
2199
+
2200
+ __ push(receiver);
2201
+ __ movq(receiver, length);
2202
+
2203
+ // Loop through the arguments pushing them onto the execution
2204
+ // stack.
2205
+ NearLabel invoke, loop;
2206
+ // length is a small non-negative integer, due to the test above.
2207
+ __ testl(length, length);
2208
+ __ j(zero, &invoke);
2209
+ __ bind(&loop);
2210
+ __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize));
2211
+ __ decl(length);
2212
+ __ j(not_zero, &loop);
2213
+
2214
+ // Invoke the function.
2215
+ __ bind(&invoke);
2216
+ ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2217
+ LPointerMap* pointers = instr->pointer_map();
2218
+ LEnvironment* env = instr->deoptimization_environment();
2219
+ RecordPosition(pointers->position());
2220
+ RegisterEnvironmentForDeoptimization(env);
2221
+ SafepointGenerator safepoint_generator(this,
2222
+ pointers,
2223
+ env->deoptimization_index(),
2224
+ true);
2225
+ v8::internal::ParameterCount actual(rax);
2226
+ __ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator);
2227
+ }
2228
+
2229
+
2230
+ void LCodeGen::DoPushArgument(LPushArgument* instr) {
2231
+ LOperand* argument = instr->InputAt(0);
2232
+ if (argument->IsConstantOperand()) {
2233
+ EmitPushConstantOperand(argument);
2234
+ } else if (argument->IsRegister()) {
2235
+ __ push(ToRegister(argument));
2236
+ } else {
2237
+ ASSERT(!argument->IsDoubleRegister());
2238
+ __ push(ToOperand(argument));
2239
+ }
2240
+ }
2241
+
2242
+
2243
+ void LCodeGen::DoContext(LContext* instr) {
2244
+ Register result = ToRegister(instr->result());
2245
+ __ movq(result, Operand(rbp, StandardFrameConstants::kContextOffset));
2246
+ }
2247
+
2248
+
2249
+ void LCodeGen::DoOuterContext(LOuterContext* instr) {
2250
+ Register context = ToRegister(instr->context());
2251
+ Register result = ToRegister(instr->result());
2252
+ __ movq(result,
2253
+ Operand(context, Context::SlotOffset(Context::CLOSURE_INDEX)));
2254
+ __ movq(result, FieldOperand(result, JSFunction::kContextOffset));
2255
+ }
2256
+
2257
+
2258
+ void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
2259
+ Register result = ToRegister(instr->result());
2260
+ __ movq(result, GlobalObjectOperand());
2261
+ }
2262
+
2263
+
2264
+ void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
2265
+ Register result = ToRegister(instr->result());
2266
+ __ movq(result, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2267
+ __ movq(result, FieldOperand(result, GlobalObject::kGlobalReceiverOffset));
2268
+ }
2269
+
2270
+
2271
+ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2272
+ int arity,
2273
+ LInstruction* instr) {
2274
+ // Change context if needed.
2275
+ bool change_context =
2276
+ (graph()->info()->closure()->context() != function->context()) ||
2277
+ scope()->contains_with() ||
2278
+ (scope()->num_heap_slots() > 0);
2279
+ if (change_context) {
2280
+ __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2281
+ }
2282
+
2283
+ // Set rax to arguments count if adaption is not needed. Assumes that rax
2284
+ // is available to write to at this point.
2285
+ if (!function->NeedsArgumentsAdaption()) {
2286
+ __ Set(rax, arity);
2287
+ }
2288
+
2289
+ LPointerMap* pointers = instr->pointer_map();
2290
+ RecordPosition(pointers->position());
2291
+
2292
+ // Invoke function.
2293
+ if (*function == *graph()->info()->closure()) {
2294
+ __ CallSelf();
2295
+ } else {
2296
+ __ call(FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2297
+ }
2298
+
2299
+ // Setup deoptimization.
2300
+ RegisterLazyDeoptimization(instr);
2301
+
2302
+ // Restore context.
2303
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2304
+ }
2305
+
2306
+
2307
+ void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2308
+ ASSERT(ToRegister(instr->result()).is(rax));
2309
+ __ Move(rdi, instr->function());
2310
+ CallKnownFunction(instr->function(), instr->arity(), instr);
2311
+ }
2312
+
2313
+
2314
+ void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
2315
+ Register input_reg = ToRegister(instr->InputAt(0));
2316
+ __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
2317
+ Heap::kHeapNumberMapRootIndex);
2318
+ DeoptimizeIf(not_equal, instr->environment());
2319
+
2320
+ Label done;
2321
+ Register tmp = input_reg.is(rax) ? rcx : rax;
2322
+ Register tmp2 = tmp.is(rcx) ? rdx : input_reg.is(rcx) ? rdx : rcx;
2323
+
2324
+ // Preserve the value of all registers.
2325
+ __ PushSafepointRegisters();
2326
+
2327
+ Label negative;
2328
+ __ movl(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset));
2329
+ // Check the sign of the argument. If the argument is positive, just
2330
+ // return it. We do not need to patch the stack since |input| and
2331
+ // |result| are the same register and |input| will be restored
2332
+ // unchanged by popping safepoint registers.
2333
+ __ testl(tmp, Immediate(HeapNumber::kSignMask));
2334
+ __ j(not_zero, &negative);
2335
+ __ jmp(&done);
2336
+
2337
+ __ bind(&negative);
2338
+
2339
+ Label allocated, slow;
2340
+ __ AllocateHeapNumber(tmp, tmp2, &slow);
2341
+ __ jmp(&allocated);
2342
+
2343
+ // Slow case: Call the runtime system to do the number allocation.
2344
+ __ bind(&slow);
2345
+
2346
+ __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
2347
+ RecordSafepointWithRegisters(
2348
+ instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2349
+ // Set the pointer to the new heap number in tmp.
2350
+ if (!tmp.is(rax)) {
2351
+ __ movq(tmp, rax);
2352
+ }
2353
+
2354
+ // Restore input_reg after call to runtime.
2355
+ __ LoadFromSafepointRegisterSlot(input_reg, input_reg);
2356
+
2357
+ __ bind(&allocated);
2358
+ __ movq(tmp2, FieldOperand(input_reg, HeapNumber::kValueOffset));
2359
+ __ shl(tmp2, Immediate(1));
2360
+ __ shr(tmp2, Immediate(1));
2361
+ __ movq(FieldOperand(tmp, HeapNumber::kValueOffset), tmp2);
2362
+ __ StoreToSafepointRegisterSlot(input_reg, tmp);
2363
+
2364
+ __ bind(&done);
2365
+ __ PopSafepointRegisters();
2366
+ }
2367
+
2368
+
2369
+ void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2370
+ Register input_reg = ToRegister(instr->InputAt(0));
2371
+ __ testl(input_reg, input_reg);
2372
+ Label is_positive;
2373
+ __ j(not_sign, &is_positive);
2374
+ __ negl(input_reg); // Sets flags.
2375
+ DeoptimizeIf(negative, instr->environment());
2376
+ __ bind(&is_positive);
2377
+ }
2378
+
2379
+
2380
+ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2381
+ // Class for deferred case.
2382
+ class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
2383
+ public:
2384
+ DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
2385
+ LUnaryMathOperation* instr)
2386
+ : LDeferredCode(codegen), instr_(instr) { }
2387
+ virtual void Generate() {
2388
+ codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
2389
+ }
2390
+ private:
2391
+ LUnaryMathOperation* instr_;
2392
+ };
2393
+
2394
+ ASSERT(instr->InputAt(0)->Equals(instr->result()));
2395
+ Representation r = instr->hydrogen()->value()->representation();
2396
+
2397
+ if (r.IsDouble()) {
2398
+ XMMRegister scratch = xmm0;
2399
+ XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2400
+ __ xorpd(scratch, scratch);
2401
+ __ subsd(scratch, input_reg);
2402
+ __ andpd(input_reg, scratch);
2403
+ } else if (r.IsInteger32()) {
2404
+ EmitIntegerMathAbs(instr);
2405
+ } else { // Tagged case.
2406
+ DeferredMathAbsTaggedHeapNumber* deferred =
2407
+ new DeferredMathAbsTaggedHeapNumber(this, instr);
2408
+ Register input_reg = ToRegister(instr->InputAt(0));
2409
+ // Smi check.
2410
+ __ JumpIfNotSmi(input_reg, deferred->entry());
2411
+ EmitIntegerMathAbs(instr);
2412
+ __ bind(deferred->exit());
2413
+ }
2414
+ }
2415
+
2416
+
2417
+ void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
2418
+ XMMRegister xmm_scratch = xmm0;
2419
+ Register output_reg = ToRegister(instr->result());
2420
+ XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2421
+ __ xorpd(xmm_scratch, xmm_scratch); // Zero the register.
2422
+ __ ucomisd(input_reg, xmm_scratch);
2423
+
2424
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2425
+ DeoptimizeIf(below_equal, instr->environment());
2426
+ } else {
2427
+ DeoptimizeIf(below, instr->environment());
2428
+ }
2429
+
2430
+ // Use truncating instruction (OK because input is positive).
2431
+ __ cvttsd2si(output_reg, input_reg);
2432
+
2433
+ // Overflow is signalled with minint.
2434
+ __ cmpl(output_reg, Immediate(0x80000000));
2435
+ DeoptimizeIf(equal, instr->environment());
2436
+ }
2437
+
2438
+
2439
+ void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2440
+ const XMMRegister xmm_scratch = xmm0;
2441
+ Register output_reg = ToRegister(instr->result());
2442
+ XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2443
+
2444
+ // xmm_scratch = 0.5
2445
+ __ movq(kScratchRegister, V8_INT64_C(0x3FE0000000000000), RelocInfo::NONE);
2446
+ __ movq(xmm_scratch, kScratchRegister);
2447
+
2448
+ // input = input + 0.5
2449
+ __ addsd(input_reg, xmm_scratch);
2450
+
2451
+ // We need to return -0 for the input range [-0.5, 0[, otherwise
2452
+ // compute Math.floor(value + 0.5).
2453
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2454
+ __ ucomisd(input_reg, xmm_scratch);
2455
+ DeoptimizeIf(below_equal, instr->environment());
2456
+ } else {
2457
+ // If we don't need to bailout on -0, we check only bailout
2458
+ // on negative inputs.
2459
+ __ xorpd(xmm_scratch, xmm_scratch); // Zero the register.
2460
+ __ ucomisd(input_reg, xmm_scratch);
2461
+ DeoptimizeIf(below, instr->environment());
2462
+ }
2463
+
2464
+ // Compute Math.floor(value + 0.5).
2465
+ // Use truncating instruction (OK because input is positive).
2466
+ __ cvttsd2si(output_reg, input_reg);
2467
+
2468
+ // Overflow is signalled with minint.
2469
+ __ cmpl(output_reg, Immediate(0x80000000));
2470
+ DeoptimizeIf(equal, instr->environment());
2471
+ }
2472
+
2473
+
2474
+ void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
2475
+ XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2476
+ ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
2477
+ __ sqrtsd(input_reg, input_reg);
2478
+ }
2479
+
2480
+
2481
+ void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
2482
+ XMMRegister xmm_scratch = xmm0;
2483
+ XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2484
+ ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
2485
+ __ xorpd(xmm_scratch, xmm_scratch);
2486
+ __ addsd(input_reg, xmm_scratch); // Convert -0 to +0.
2487
+ __ sqrtsd(input_reg, input_reg);
2488
+ }
2489
+
2490
+
2491
+ void LCodeGen::DoPower(LPower* instr) {
2492
+ LOperand* left = instr->InputAt(0);
2493
+ XMMRegister left_reg = ToDoubleRegister(left);
2494
+ ASSERT(!left_reg.is(xmm1));
2495
+ LOperand* right = instr->InputAt(1);
2496
+ XMMRegister result_reg = ToDoubleRegister(instr->result());
2497
+ Representation exponent_type = instr->hydrogen()->right()->representation();
2498
+ if (exponent_type.IsDouble()) {
2499
+ __ PrepareCallCFunction(2);
2500
+ // Move arguments to correct registers
2501
+ __ movsd(xmm0, left_reg);
2502
+ ASSERT(ToDoubleRegister(right).is(xmm1));
2503
+ __ CallCFunction(ExternalReference::power_double_double_function(), 2);
2504
+ } else if (exponent_type.IsInteger32()) {
2505
+ __ PrepareCallCFunction(2);
2506
+ // Move arguments to correct registers: xmm0 and edi (not rdi).
2507
+ // On Windows, the registers are xmm0 and edx.
2508
+ __ movsd(xmm0, left_reg);
2509
+ #ifdef _WIN64
2510
+ ASSERT(ToRegister(right).is(rdx));
2511
+ #else
2512
+ ASSERT(ToRegister(right).is(rdi));
2513
+ #endif
2514
+ __ CallCFunction(ExternalReference::power_double_int_function(), 2);
2515
+ } else {
2516
+ ASSERT(exponent_type.IsTagged());
2517
+ CpuFeatures::Scope scope(SSE2);
2518
+ Register right_reg = ToRegister(right);
2519
+
2520
+ Label non_smi, call;
2521
+ __ JumpIfNotSmi(right_reg, &non_smi);
2522
+ __ SmiToInteger32(right_reg, right_reg);
2523
+ __ cvtlsi2sd(xmm1, right_reg);
2524
+ __ jmp(&call);
2525
+
2526
+ __ bind(&non_smi);
2527
+ __ CmpObjectType(right_reg, HEAP_NUMBER_TYPE , kScratchRegister);
2528
+ DeoptimizeIf(not_equal, instr->environment());
2529
+ __ movsd(xmm1, FieldOperand(right_reg, HeapNumber::kValueOffset));
2530
+
2531
+ __ bind(&call);
2532
+ __ PrepareCallCFunction(2);
2533
+ // Move arguments to correct registers xmm0 and xmm1.
2534
+ __ movsd(xmm0, left_reg);
2535
+ // Right argument is already in xmm1.
2536
+ __ CallCFunction(ExternalReference::power_double_double_function(), 2);
2537
+ }
2538
+ // Return value is in xmm0.
2539
+ __ movsd(result_reg, xmm0);
2540
+ }
2541
+
2542
+
2543
+ void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
2544
+ ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2545
+ TranscendentalCacheStub stub(TranscendentalCache::LOG,
2546
+ TranscendentalCacheStub::UNTAGGED);
2547
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2548
+ }
2549
+
2550
+
2551
+ void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
2552
+ ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2553
+ TranscendentalCacheStub stub(TranscendentalCache::LOG,
2554
+ TranscendentalCacheStub::UNTAGGED);
2555
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2556
+ }
2557
+
2558
+
2559
+ void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
2560
+ ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2561
+ TranscendentalCacheStub stub(TranscendentalCache::LOG,
2562
+ TranscendentalCacheStub::UNTAGGED);
2563
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2564
+ }
2565
+
2566
+
2567
+ void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
2568
+ switch (instr->op()) {
2569
+ case kMathAbs:
2570
+ DoMathAbs(instr);
2571
+ break;
2572
+ case kMathFloor:
2573
+ DoMathFloor(instr);
2574
+ break;
2575
+ case kMathRound:
2576
+ DoMathRound(instr);
2577
+ break;
2578
+ case kMathSqrt:
2579
+ DoMathSqrt(instr);
2580
+ break;
2581
+ case kMathPowHalf:
2582
+ DoMathPowHalf(instr);
2583
+ break;
2584
+ case kMathCos:
2585
+ DoMathCos(instr);
2586
+ break;
2587
+ case kMathSin:
2588
+ DoMathSin(instr);
2589
+ break;
2590
+ case kMathLog:
2591
+ DoMathLog(instr);
2592
+ break;
2593
+
2594
+ default:
2595
+ UNREACHABLE();
2596
+ }
2597
+ }
2598
+
2599
+
2600
+ void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
2601
+ ASSERT(ToRegister(instr->key()).is(rcx));
2602
+ ASSERT(ToRegister(instr->result()).is(rax));
2603
+
2604
+ int arity = instr->arity();
2605
+ Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
2606
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
2607
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2608
+ }
2609
+
2610
+
2611
+ void LCodeGen::DoCallNamed(LCallNamed* instr) {
2612
+ ASSERT(ToRegister(instr->result()).is(rax));
2613
+
2614
+ int arity = instr->arity();
2615
+ Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
2616
+ __ Move(rcx, instr->name());
2617
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
2618
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2619
+ }
2620
+
2621
+
2622
+ void LCodeGen::DoCallFunction(LCallFunction* instr) {
2623
+ ASSERT(ToRegister(instr->result()).is(rax));
2624
+
2625
+ int arity = instr->arity();
2626
+ CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
2627
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2628
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2629
+ __ Drop(1);
2630
+ }
2631
+
2632
+
2633
+ void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
2634
+ ASSERT(ToRegister(instr->result()).is(rax));
2635
+ int arity = instr->arity();
2636
+ Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
2637
+ __ Move(rcx, instr->name());
2638
+ CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2639
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2640
+ }
2641
+
2642
+
2643
+ void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
2644
+ ASSERT(ToRegister(instr->result()).is(rax));
2645
+ __ Move(rdi, instr->target());
2646
+ CallKnownFunction(instr->target(), instr->arity(), instr);
2647
+ }
2648
+
2649
+
2650
+ void LCodeGen::DoCallNew(LCallNew* instr) {
2651
+ ASSERT(ToRegister(instr->InputAt(0)).is(rdi));
2652
+ ASSERT(ToRegister(instr->result()).is(rax));
2653
+
2654
+ Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall));
2655
+ __ Set(rax, instr->arity());
2656
+ CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
2657
+ }
2658
+
2659
+
2660
+ void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
2661
+ CallRuntime(instr->function(), instr->arity(), instr);
2662
+ }
2663
+
2664
+
2665
+ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
2666
+ Register object = ToRegister(instr->object());
2667
+ Register value = ToRegister(instr->value());
2668
+ int offset = instr->offset();
2669
+
2670
+ if (!instr->transition().is_null()) {
2671
+ __ Move(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
2672
+ }
2673
+
2674
+ // Do the store.
2675
+ if (instr->is_in_object()) {
2676
+ __ movq(FieldOperand(object, offset), value);
2677
+ if (instr->needs_write_barrier()) {
2678
+ Register temp = ToRegister(instr->TempAt(0));
2679
+ // Update the write barrier for the object for in-object properties.
2680
+ __ RecordWrite(object, offset, value, temp);
2681
+ }
2682
+ } else {
2683
+ Register temp = ToRegister(instr->TempAt(0));
2684
+ __ movq(temp, FieldOperand(object, JSObject::kPropertiesOffset));
2685
+ __ movq(FieldOperand(temp, offset), value);
2686
+ if (instr->needs_write_barrier()) {
2687
+ // Update the write barrier for the properties array.
2688
+ // object is used as a scratch register.
2689
+ __ RecordWrite(temp, offset, value, object);
2690
+ }
2691
+ }
2692
+ }
2693
+
2694
+
2695
+ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
2696
+ ASSERT(ToRegister(instr->object()).is(rdx));
2697
+ ASSERT(ToRegister(instr->value()).is(rax));
2698
+
2699
+ __ Move(rcx, instr->hydrogen()->name());
2700
+ Handle<Code> ic(Builtins::builtin(
2701
+ info_->is_strict() ? Builtins::StoreIC_Initialize_Strict
2702
+ : Builtins::StoreIC_Initialize));
2703
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
2704
+ }
2705
+
2706
+
2707
+ void LCodeGen::DoStorePixelArrayElement(LStorePixelArrayElement* instr) {
2708
+ Register external_pointer = ToRegister(instr->external_pointer());
2709
+ Register key = ToRegister(instr->key());
2710
+ Register value = ToRegister(instr->value());
2711
+
2712
+ { // Clamp the value to [0..255].
2713
+ NearLabel done;
2714
+ __ testl(value, Immediate(0xFFFFFF00));
2715
+ __ j(zero, &done);
2716
+ __ setcc(negative, value); // 1 if negative, 0 if positive.
2717
+ __ decb(value); // 0 if negative, 255 if positive.
2718
+ __ bind(&done);
2719
+ }
2720
+
2721
+ __ movb(Operand(external_pointer, key, times_1, 0), value);
2722
+ }
2723
+
2724
+
2725
+ void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
2726
+ if (instr->length()->IsRegister()) {
2727
+ __ cmpq(ToRegister(instr->index()), ToRegister(instr->length()));
2728
+ } else {
2729
+ __ cmpq(ToRegister(instr->index()), ToOperand(instr->length()));
2730
+ }
2731
+ DeoptimizeIf(above_equal, instr->environment());
2732
+ }
2733
+
2734
+
2735
+ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
2736
+ Register value = ToRegister(instr->value());
2737
+ Register elements = ToRegister(instr->object());
2738
+ Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
2739
+
2740
+ // Do the store.
2741
+ if (instr->key()->IsConstantOperand()) {
2742
+ ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
2743
+ LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
2744
+ int offset =
2745
+ ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
2746
+ __ movq(FieldOperand(elements, offset), value);
2747
+ } else {
2748
+ __ movq(FieldOperand(elements,
2749
+ key,
2750
+ times_pointer_size,
2751
+ FixedArray::kHeaderSize),
2752
+ value);
2753
+ }
2754
+
2755
+ if (instr->hydrogen()->NeedsWriteBarrier()) {
2756
+ // Compute address of modified element and store it into key register.
2757
+ __ lea(key, FieldOperand(elements,
2758
+ key,
2759
+ times_pointer_size,
2760
+ FixedArray::kHeaderSize));
2761
+ __ RecordWrite(elements, key, value);
2762
+ }
2763
+ }
2764
+
2765
+
2766
+ void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
2767
+ ASSERT(ToRegister(instr->object()).is(rdx));
2768
+ ASSERT(ToRegister(instr->key()).is(rcx));
2769
+ ASSERT(ToRegister(instr->value()).is(rax));
2770
+
2771
+ Handle<Code> ic(Builtins::builtin(
2772
+ info_->is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
2773
+ : Builtins::KeyedStoreIC_Initialize));
2774
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
2775
+ }
2776
+
2777
+
2778
+ void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
2779
+ class DeferredStringCharCodeAt: public LDeferredCode {
2780
+ public:
2781
+ DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
2782
+ : LDeferredCode(codegen), instr_(instr) { }
2783
+ virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
2784
+ private:
2785
+ LStringCharCodeAt* instr_;
2786
+ };
2787
+
2788
+ Register string = ToRegister(instr->string());
2789
+ Register index = no_reg;
2790
+ int const_index = -1;
2791
+ if (instr->index()->IsConstantOperand()) {
2792
+ const_index = ToInteger32(LConstantOperand::cast(instr->index()));
2793
+ STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
2794
+ if (!Smi::IsValid(const_index)) {
2795
+ // Guaranteed to be out of bounds because of the assert above.
2796
+ // So the bounds check that must dominate this instruction must
2797
+ // have deoptimized already.
2798
+ if (FLAG_debug_code) {
2799
+ __ Abort("StringCharCodeAt: out of bounds index.");
2800
+ }
2801
+ // No code needs to be generated.
2802
+ return;
2803
+ }
2804
+ } else {
2805
+ index = ToRegister(instr->index());
2806
+ }
2807
+ Register result = ToRegister(instr->result());
2808
+
2809
+ DeferredStringCharCodeAt* deferred =
2810
+ new DeferredStringCharCodeAt(this, instr);
2811
+
2812
+ NearLabel flat_string, ascii_string, done;
2813
+
2814
+ // Fetch the instance type of the receiver into result register.
2815
+ __ movq(result, FieldOperand(string, HeapObject::kMapOffset));
2816
+ __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
2817
+
2818
+ // We need special handling for non-sequential strings.
2819
+ STATIC_ASSERT(kSeqStringTag == 0);
2820
+ __ testb(result, Immediate(kStringRepresentationMask));
2821
+ __ j(zero, &flat_string);
2822
+
2823
+ // Handle cons strings and go to deferred code for the rest.
2824
+ __ testb(result, Immediate(kIsConsStringMask));
2825
+ __ j(zero, deferred->entry());
2826
+
2827
+ // ConsString.
2828
+ // Check whether the right hand side is the empty string (i.e. if
2829
+ // this is really a flat string in a cons string). If that is not
2830
+ // the case we would rather go to the runtime system now to flatten
2831
+ // the string.
2832
+ __ CompareRoot(FieldOperand(string, ConsString::kSecondOffset),
2833
+ Heap::kEmptyStringRootIndex);
2834
+ __ j(not_equal, deferred->entry());
2835
+ // Get the first of the two strings and load its instance type.
2836
+ __ movq(string, FieldOperand(string, ConsString::kFirstOffset));
2837
+ __ movq(result, FieldOperand(string, HeapObject::kMapOffset));
2838
+ __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
2839
+ // If the first cons component is also non-flat, then go to runtime.
2840
+ STATIC_ASSERT(kSeqStringTag == 0);
2841
+ __ testb(result, Immediate(kStringRepresentationMask));
2842
+ __ j(not_zero, deferred->entry());
2843
+
2844
+ // Check for ASCII or two-byte string.
2845
+ __ bind(&flat_string);
2846
+ STATIC_ASSERT(kAsciiStringTag != 0);
2847
+ __ testb(result, Immediate(kStringEncodingMask));
2848
+ __ j(not_zero, &ascii_string);
2849
+
2850
+ // Two-byte string.
2851
+ // Load the two-byte character code into the result register.
2852
+ STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
2853
+ if (instr->index()->IsConstantOperand()) {
2854
+ __ movzxwl(result,
2855
+ FieldOperand(string,
2856
+ SeqTwoByteString::kHeaderSize +
2857
+ (kUC16Size * const_index)));
2858
+ } else {
2859
+ __ movzxwl(result, FieldOperand(string,
2860
+ index,
2861
+ times_2,
2862
+ SeqTwoByteString::kHeaderSize));
2863
+ }
2864
+ __ jmp(&done);
2865
+
2866
+ // ASCII string.
2867
+ // Load the byte into the result register.
2868
+ __ bind(&ascii_string);
2869
+ if (instr->index()->IsConstantOperand()) {
2870
+ __ movzxbl(result, FieldOperand(string,
2871
+ SeqAsciiString::kHeaderSize + const_index));
2872
+ } else {
2873
+ __ movzxbl(result, FieldOperand(string,
2874
+ index,
2875
+ times_1,
2876
+ SeqAsciiString::kHeaderSize));
2877
+ }
2878
+ __ bind(&done);
2879
+ __ bind(deferred->exit());
2880
+ }
2881
+
2882
+
2883
+ void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
2884
+ Register string = ToRegister(instr->string());
2885
+ Register result = ToRegister(instr->result());
2886
+
2887
+ // TODO(3095996): Get rid of this. For now, we need to make the
2888
+ // result register contain a valid pointer because it is already
2889
+ // contained in the register pointer map.
2890
+ __ Set(result, 0);
2891
+
2892
+ __ PushSafepointRegisters();
2893
+ __ push(string);
2894
+ // Push the index as a smi. This is safe because of the checks in
2895
+ // DoStringCharCodeAt above.
2896
+ STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
2897
+ if (instr->index()->IsConstantOperand()) {
2898
+ int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
2899
+ __ Push(Smi::FromInt(const_index));
2900
+ } else {
2901
+ Register index = ToRegister(instr->index());
2902
+ __ Integer32ToSmi(index, index);
2903
+ __ push(index);
2904
+ }
2905
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2906
+ __ CallRuntimeSaveDoubles(Runtime::kStringCharCodeAt);
2907
+ RecordSafepointWithRegisters(
2908
+ instr->pointer_map(), 2, Safepoint::kNoDeoptimizationIndex);
2909
+ if (FLAG_debug_code) {
2910
+ __ AbortIfNotSmi(rax);
2911
+ }
2912
+ __ SmiToInteger32(rax, rax);
2913
+ __ StoreToSafepointRegisterSlot(result, rax);
2914
+ __ PopSafepointRegisters();
2915
+ }
2916
+
2917
+
2918
+ void LCodeGen::DoStringLength(LStringLength* instr) {
2919
+ Register string = ToRegister(instr->string());
2920
+ Register result = ToRegister(instr->result());
2921
+ __ movq(result, FieldOperand(string, String::kLengthOffset));
2922
+ }
2923
+
2924
+
2925
+ void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
2926
+ LOperand* input = instr->InputAt(0);
2927
+ ASSERT(input->IsRegister() || input->IsStackSlot());
2928
+ LOperand* output = instr->result();
2929
+ ASSERT(output->IsDoubleRegister());
2930
+ if (input->IsRegister()) {
2931
+ __ cvtlsi2sd(ToDoubleRegister(output), ToRegister(input));
2932
+ } else {
2933
+ __ cvtlsi2sd(ToDoubleRegister(output), ToOperand(input));
2934
+ }
2935
+ }
2936
+
2937
+
2938
+ void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
2939
+ LOperand* input = instr->InputAt(0);
2940
+ ASSERT(input->IsRegister() && input->Equals(instr->result()));
2941
+ Register reg = ToRegister(input);
2942
+
2943
+ __ Integer32ToSmi(reg, reg);
2944
+ }
2945
+
2946
+
2947
+ void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
2948
+ class DeferredNumberTagD: public LDeferredCode {
2949
+ public:
2950
+ DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
2951
+ : LDeferredCode(codegen), instr_(instr) { }
2952
+ virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
2953
+ private:
2954
+ LNumberTagD* instr_;
2955
+ };
2956
+
2957
+ XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2958
+ Register reg = ToRegister(instr->result());
2959
+ Register tmp = ToRegister(instr->TempAt(0));
2960
+
2961
+ DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
2962
+ if (FLAG_inline_new) {
2963
+ __ AllocateHeapNumber(reg, tmp, deferred->entry());
2964
+ } else {
2965
+ __ jmp(deferred->entry());
2966
+ }
2967
+ __ bind(deferred->exit());
2968
+ __ movsd(FieldOperand(reg, HeapNumber::kValueOffset), input_reg);
2969
+ }
2970
+
2971
+
2972
+ void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
2973
+ // TODO(3095996): Get rid of this. For now, we need to make the
2974
+ // result register contain a valid pointer because it is already
2975
+ // contained in the register pointer map.
2976
+ Register reg = ToRegister(instr->result());
2977
+ __ Move(reg, Smi::FromInt(0));
2978
+
2979
+ __ PushSafepointRegisters();
2980
+ __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
2981
+ RecordSafepointWithRegisters(
2982
+ instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2983
+ // Ensure that value in rax survives popping registers.
2984
+ __ movq(kScratchRegister, rax);
2985
+ __ PopSafepointRegisters();
2986
+ __ movq(reg, kScratchRegister);
2987
+ }
2988
+
2989
+
2990
+ void LCodeGen::DoSmiTag(LSmiTag* instr) {
2991
+ ASSERT(instr->InputAt(0)->Equals(instr->result()));
2992
+ Register input = ToRegister(instr->InputAt(0));
2993
+ ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
2994
+ __ Integer32ToSmi(input, input);
2995
+ }
2996
+
2997
+
2998
+ void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
2999
+ ASSERT(instr->InputAt(0)->Equals(instr->result()));
3000
+ Register input = ToRegister(instr->InputAt(0));
3001
+ if (instr->needs_check()) {
3002
+ Condition is_smi = __ CheckSmi(input);
3003
+ DeoptimizeIf(NegateCondition(is_smi), instr->environment());
3004
+ }
3005
+ __ SmiToInteger32(input, input);
3006
+ }
3007
+
3008
+
3009
+ void LCodeGen::EmitNumberUntagD(Register input_reg,
3010
+ XMMRegister result_reg,
3011
+ LEnvironment* env) {
3012
+ NearLabel load_smi, heap_number, done;
3013
+
3014
+ // Smi check.
3015
+ __ JumpIfSmi(input_reg, &load_smi);
3016
+
3017
+ // Heap number map check.
3018
+ __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
3019
+ Heap::kHeapNumberMapRootIndex);
3020
+ __ j(equal, &heap_number);
3021
+
3022
+ __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
3023
+ DeoptimizeIf(not_equal, env);
3024
+
3025
+ // Convert undefined to NaN. Compute NaN as 0/0.
3026
+ __ xorpd(result_reg, result_reg);
3027
+ __ divsd(result_reg, result_reg);
3028
+ __ jmp(&done);
3029
+
3030
+ // Heap number to XMM conversion.
3031
+ __ bind(&heap_number);
3032
+ __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
3033
+ __ jmp(&done);
3034
+
3035
+ // Smi to XMM conversion
3036
+ __ bind(&load_smi);
3037
+ __ SmiToInteger32(kScratchRegister, input_reg);
3038
+ __ cvtlsi2sd(result_reg, kScratchRegister);
3039
+ __ bind(&done);
3040
+ }
3041
+
3042
+
3043
+ class DeferredTaggedToI: public LDeferredCode {
3044
+ public:
3045
+ DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
3046
+ : LDeferredCode(codegen), instr_(instr) { }
3047
+ virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
3048
+ private:
3049
+ LTaggedToI* instr_;
3050
+ };
3051
+
3052
+
3053
+ void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
3054
+ NearLabel done, heap_number;
3055
+ Register input_reg = ToRegister(instr->InputAt(0));
3056
+
3057
+ // Heap number map check.
3058
+ __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
3059
+ Heap::kHeapNumberMapRootIndex);
3060
+
3061
+ if (instr->truncating()) {
3062
+ __ j(equal, &heap_number);
3063
+ // Check for undefined. Undefined is converted to zero for truncating
3064
+ // conversions.
3065
+ __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
3066
+ DeoptimizeIf(not_equal, instr->environment());
3067
+ __ movl(input_reg, Immediate(0));
3068
+ __ jmp(&done);
3069
+
3070
+ __ bind(&heap_number);
3071
+
3072
+ __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3073
+ __ cvttsd2siq(input_reg, xmm0);
3074
+ __ Set(kScratchRegister, V8_UINT64_C(0x8000000000000000));
3075
+ __ cmpl(input_reg, kScratchRegister);
3076
+ DeoptimizeIf(equal, instr->environment());
3077
+ } else {
3078
+ // Deoptimize if we don't have a heap number.
3079
+ DeoptimizeIf(not_equal, instr->environment());
3080
+
3081
+ XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
3082
+ __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3083
+ __ cvttsd2si(input_reg, xmm0);
3084
+ __ cvtlsi2sd(xmm_temp, input_reg);
3085
+ __ ucomisd(xmm0, xmm_temp);
3086
+ DeoptimizeIf(not_equal, instr->environment());
3087
+ DeoptimizeIf(parity_even, instr->environment()); // NaN.
3088
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3089
+ __ testl(input_reg, input_reg);
3090
+ __ j(not_zero, &done);
3091
+ __ movmskpd(input_reg, xmm0);
3092
+ __ andl(input_reg, Immediate(1));
3093
+ DeoptimizeIf(not_zero, instr->environment());
3094
+ }
3095
+ }
3096
+ __ bind(&done);
3097
+ }
3098
+
3099
+
3100
+ void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
3101
+ LOperand* input = instr->InputAt(0);
3102
+ ASSERT(input->IsRegister());
3103
+ ASSERT(input->Equals(instr->result()));
3104
+
3105
+ Register input_reg = ToRegister(input);
3106
+ DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
3107
+ __ JumpIfNotSmi(input_reg, deferred->entry());
3108
+ __ SmiToInteger32(input_reg, input_reg);
3109
+ __ bind(deferred->exit());
3110
+ }
3111
+
3112
+
3113
+ void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
3114
+ LOperand* input = instr->InputAt(0);
3115
+ ASSERT(input->IsRegister());
3116
+ LOperand* result = instr->result();
3117
+ ASSERT(result->IsDoubleRegister());
3118
+
3119
+ Register input_reg = ToRegister(input);
3120
+ XMMRegister result_reg = ToDoubleRegister(result);
3121
+
3122
+ EmitNumberUntagD(input_reg, result_reg, instr->environment());
3123
+ }
3124
+
3125
+
3126
+ void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
3127
+ LOperand* input = instr->InputAt(0);
3128
+ ASSERT(input->IsDoubleRegister());
3129
+ LOperand* result = instr->result();
3130
+ ASSERT(result->IsRegister());
3131
+
3132
+ XMMRegister input_reg = ToDoubleRegister(input);
3133
+ Register result_reg = ToRegister(result);
3134
+
3135
+ if (instr->truncating()) {
3136
+ // Performs a truncating conversion of a floating point number as used by
3137
+ // the JS bitwise operations.
3138
+ __ cvttsd2siq(result_reg, input_reg);
3139
+ __ movq(kScratchRegister, V8_INT64_C(0x8000000000000000), RelocInfo::NONE);
3140
+ __ cmpl(result_reg, kScratchRegister);
3141
+ DeoptimizeIf(equal, instr->environment());
3142
+ } else {
3143
+ __ cvttsd2si(result_reg, input_reg);
3144
+ __ cvtlsi2sd(xmm0, result_reg);
3145
+ __ ucomisd(xmm0, input_reg);
3146
+ DeoptimizeIf(not_equal, instr->environment());
3147
+ DeoptimizeIf(parity_even, instr->environment()); // NaN.
3148
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3149
+ NearLabel done;
3150
+ // The integer converted back is equal to the original. We
3151
+ // only have to test if we got -0 as an input.
3152
+ __ testl(result_reg, result_reg);
3153
+ __ j(not_zero, &done);
3154
+ __ movmskpd(result_reg, input_reg);
3155
+ // Bit 0 contains the sign of the double in input_reg.
3156
+ // If input was positive, we are ok and return 0, otherwise
3157
+ // deoptimize.
3158
+ __ andl(result_reg, Immediate(1));
3159
+ DeoptimizeIf(not_zero, instr->environment());
3160
+ __ bind(&done);
3161
+ }
3162
+ }
3163
+ }
3164
+
3165
+
3166
+ void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
3167
+ LOperand* input = instr->InputAt(0);
3168
+ ASSERT(input->IsRegister());
3169
+ Condition cc = masm()->CheckSmi(ToRegister(input));
3170
+ if (instr->condition() != equal) {
3171
+ cc = NegateCondition(cc);
3172
+ }
3173
+ DeoptimizeIf(cc, instr->environment());
3174
+ }
3175
+
3176
+
3177
+ void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
3178
+ Register input = ToRegister(instr->InputAt(0));
3179
+ InstanceType first = instr->hydrogen()->first();
3180
+ InstanceType last = instr->hydrogen()->last();
3181
+
3182
+ __ movq(kScratchRegister, FieldOperand(input, HeapObject::kMapOffset));
3183
+
3184
+ // If there is only one type in the interval check for equality.
3185
+ if (first == last) {
3186
+ __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset),
3187
+ Immediate(static_cast<int8_t>(first)));
3188
+ DeoptimizeIf(not_equal, instr->environment());
3189
+ } else if (first == FIRST_STRING_TYPE && last == LAST_STRING_TYPE) {
3190
+ // String has a dedicated bit in instance type.
3191
+ __ testb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset),
3192
+ Immediate(kIsNotStringMask));
3193
+ DeoptimizeIf(not_zero, instr->environment());
3194
+ } else {
3195
+ __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset),
3196
+ Immediate(static_cast<int8_t>(first)));
3197
+ DeoptimizeIf(below, instr->environment());
3198
+ // Omit check for the last type.
3199
+ if (last != LAST_TYPE) {
3200
+ __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset),
3201
+ Immediate(static_cast<int8_t>(last)));
3202
+ DeoptimizeIf(above, instr->environment());
3203
+ }
3204
+ }
3205
+ }
3206
+
3207
+
3208
+ void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
3209
+ ASSERT(instr->InputAt(0)->IsRegister());
3210
+ Register reg = ToRegister(instr->InputAt(0));
3211
+ __ Cmp(reg, instr->hydrogen()->target());
3212
+ DeoptimizeIf(not_equal, instr->environment());
3213
+ }
3214
+
3215
+
3216
+ void LCodeGen::DoCheckMap(LCheckMap* instr) {
3217
+ LOperand* input = instr->InputAt(0);
3218
+ ASSERT(input->IsRegister());
3219
+ Register reg = ToRegister(input);
3220
+ __ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
3221
+ instr->hydrogen()->map());
3222
+ DeoptimizeIf(not_equal, instr->environment());
3223
+ }
3224
+
3225
+
3226
+ void LCodeGen::LoadHeapObject(Register result, Handle<HeapObject> object) {
3227
+ if (Heap::InNewSpace(*object)) {
3228
+ Handle<JSGlobalPropertyCell> cell =
3229
+ Factory::NewJSGlobalPropertyCell(object);
3230
+ __ movq(result, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
3231
+ __ movq(result, Operand(result, 0));
3232
+ } else {
3233
+ __ Move(result, object);
3234
+ }
3235
+ }
3236
+
3237
+
3238
+ void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
3239
+ Register reg = ToRegister(instr->TempAt(0));
3240
+
3241
+ Handle<JSObject> holder = instr->holder();
3242
+ Handle<JSObject> current_prototype = instr->prototype();
3243
+
3244
+ // Load prototype object.
3245
+ LoadHeapObject(reg, current_prototype);
3246
+
3247
+ // Check prototype maps up to the holder.
3248
+ while (!current_prototype.is_identical_to(holder)) {
3249
+ __ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
3250
+ Handle<Map>(current_prototype->map()));
3251
+ DeoptimizeIf(not_equal, instr->environment());
3252
+ current_prototype =
3253
+ Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
3254
+ // Load next prototype object.
3255
+ LoadHeapObject(reg, current_prototype);
3256
+ }
3257
+
3258
+ // Check the holder map.
3259
+ __ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
3260
+ Handle<Map>(current_prototype->map()));
3261
+ DeoptimizeIf(not_equal, instr->environment());
3262
+ }
3263
+
3264
+
3265
+ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
3266
+ // Setup the parameters to the stub/runtime call.
3267
+ __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
3268
+ __ push(FieldOperand(rax, JSFunction::kLiteralsOffset));
3269
+ __ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
3270
+ __ Push(instr->hydrogen()->constant_elements());
3271
+
3272
+ // Pick the right runtime function or stub to call.
3273
+ int length = instr->hydrogen()->length();
3274
+ if (instr->hydrogen()->IsCopyOnWrite()) {
3275
+ ASSERT(instr->hydrogen()->depth() == 1);
3276
+ FastCloneShallowArrayStub::Mode mode =
3277
+ FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
3278
+ FastCloneShallowArrayStub stub(mode, length);
3279
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3280
+ } else if (instr->hydrogen()->depth() > 1) {
3281
+ CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
3282
+ } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
3283
+ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
3284
+ } else {
3285
+ FastCloneShallowArrayStub::Mode mode =
3286
+ FastCloneShallowArrayStub::CLONE_ELEMENTS;
3287
+ FastCloneShallowArrayStub stub(mode, length);
3288
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3289
+ }
3290
+ }
3291
+
3292
+
3293
+ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
3294
+ // Setup the parameters to the stub/runtime call.
3295
+ __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
3296
+ __ push(FieldOperand(rax, JSFunction::kLiteralsOffset));
3297
+ __ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
3298
+ __ Push(instr->hydrogen()->constant_properties());
3299
+ __ Push(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0));
3300
+
3301
+ // Pick the right runtime function to call.
3302
+ if (instr->hydrogen()->depth() > 1) {
3303
+ CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
3304
+ } else {
3305
+ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
3306
+ }
3307
+ }
3308
+
3309
+
3310
+ void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
3311
+ NearLabel materialized;
3312
+ // Registers will be used as follows:
3313
+ // rdi = JS function.
3314
+ // rcx = literals array.
3315
+ // rbx = regexp literal.
3316
+ // rax = regexp literal clone.
3317
+ __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
3318
+ __ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
3319
+ int literal_offset = FixedArray::kHeaderSize +
3320
+ instr->hydrogen()->literal_index() * kPointerSize;
3321
+ __ movq(rbx, FieldOperand(rcx, literal_offset));
3322
+ __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
3323
+ __ j(not_equal, &materialized);
3324
+
3325
+ // Create regexp literal using runtime function
3326
+ // Result will be in rax.
3327
+ __ push(rcx);
3328
+ __ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
3329
+ __ Push(instr->hydrogen()->pattern());
3330
+ __ Push(instr->hydrogen()->flags());
3331
+ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
3332
+ __ movq(rbx, rax);
3333
+
3334
+ __ bind(&materialized);
3335
+ int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
3336
+ Label allocated, runtime_allocate;
3337
+ __ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
3338
+ __ jmp(&allocated);
3339
+
3340
+ __ bind(&runtime_allocate);
3341
+ __ push(rbx);
3342
+ __ Push(Smi::FromInt(size));
3343
+ CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
3344
+ __ pop(rbx);
3345
+
3346
+ __ bind(&allocated);
3347
+ // Copy the content into the newly allocated memory.
3348
+ // (Unroll copy loop once for better throughput).
3349
+ for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
3350
+ __ movq(rdx, FieldOperand(rbx, i));
3351
+ __ movq(rcx, FieldOperand(rbx, i + kPointerSize));
3352
+ __ movq(FieldOperand(rax, i), rdx);
3353
+ __ movq(FieldOperand(rax, i + kPointerSize), rcx);
3354
+ }
3355
+ if ((size % (2 * kPointerSize)) != 0) {
3356
+ __ movq(rdx, FieldOperand(rbx, size - kPointerSize));
3357
+ __ movq(FieldOperand(rax, size - kPointerSize), rdx);
3358
+ }
3359
+ }
3360
+
3361
+
3362
+ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
3363
+ // Use the fast case closure allocation code that allocates in new
3364
+ // space for nested functions that don't need literals cloning.
3365
+ Handle<SharedFunctionInfo> shared_info = instr->shared_info();
3366
+ bool pretenure = instr->hydrogen()->pretenure();
3367
+ if (shared_info->num_literals() == 0 && !pretenure) {
3368
+ FastNewClosureStub stub;
3369
+ __ Push(shared_info);
3370
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3371
+ } else {
3372
+ __ push(rsi);
3373
+ __ Push(shared_info);
3374
+ __ Push(pretenure ? Factory::true_value() : Factory::false_value());
3375
+ CallRuntime(Runtime::kNewClosure, 3, instr);
3376
+ }
3377
+ }
3378
+
3379
+
3380
+ void LCodeGen::DoTypeof(LTypeof* instr) {
3381
+ LOperand* input = instr->InputAt(0);
3382
+ if (input->IsConstantOperand()) {
3383
+ __ Push(ToHandle(LConstantOperand::cast(input)));
3384
+ } else if (input->IsRegister()) {
3385
+ __ push(ToRegister(input));
3386
+ } else {
3387
+ ASSERT(input->IsStackSlot());
3388
+ __ push(ToOperand(input));
3389
+ }
3390
+ CallRuntime(Runtime::kTypeof, 1, instr);
3391
+ }
3392
+
3393
+
3394
+ void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
3395
+ Register input = ToRegister(instr->InputAt(0));
3396
+ Register result = ToRegister(instr->result());
3397
+ Label true_label;
3398
+ Label false_label;
3399
+ NearLabel done;
3400
+
3401
+ Condition final_branch_condition = EmitTypeofIs(&true_label,
3402
+ &false_label,
3403
+ input,
3404
+ instr->type_literal());
3405
+ __ j(final_branch_condition, &true_label);
3406
+ __ bind(&false_label);
3407
+ __ LoadRoot(result, Heap::kFalseValueRootIndex);
3408
+ __ jmp(&done);
3409
+
3410
+ __ bind(&true_label);
3411
+ __ LoadRoot(result, Heap::kTrueValueRootIndex);
3412
+
3413
+ __ bind(&done);
3414
+ }
3415
+
3416
+
3417
+ void LCodeGen::EmitPushConstantOperand(LOperand* operand) {
3418
+ ASSERT(operand->IsConstantOperand());
3419
+ LConstantOperand* const_op = LConstantOperand::cast(operand);
3420
+ Handle<Object> literal = chunk_->LookupLiteral(const_op);
3421
+ Representation r = chunk_->LookupLiteralRepresentation(const_op);
3422
+ if (r.IsInteger32()) {
3423
+ ASSERT(literal->IsNumber());
3424
+ __ push(Immediate(static_cast<int32_t>(literal->Number())));
3425
+ } else if (r.IsDouble()) {
3426
+ Abort("unsupported double immediate");
3427
+ } else {
3428
+ ASSERT(r.IsTagged());
3429
+ __ Push(literal);
3430
+ }
3431
+ }
3432
+
3433
+
3434
+ void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
3435
+ Register input = ToRegister(instr->InputAt(0));
3436
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
3437
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
3438
+ Label* true_label = chunk_->GetAssemblyLabel(true_block);
3439
+ Label* false_label = chunk_->GetAssemblyLabel(false_block);
3440
+
3441
+ Condition final_branch_condition = EmitTypeofIs(true_label,
3442
+ false_label,
3443
+ input,
3444
+ instr->type_literal());
3445
+
3446
+ EmitBranch(true_block, false_block, final_branch_condition);
3447
+ }
3448
+
3449
+
3450
+ Condition LCodeGen::EmitTypeofIs(Label* true_label,
3451
+ Label* false_label,
3452
+ Register input,
3453
+ Handle<String> type_name) {
3454
+ Condition final_branch_condition = no_condition;
3455
+ if (type_name->Equals(Heap::number_symbol())) {
3456
+ __ JumpIfSmi(input, true_label);
3457
+ __ Cmp(FieldOperand(input, HeapObject::kMapOffset),
3458
+ Factory::heap_number_map());
3459
+ final_branch_condition = equal;
3460
+
3461
+ } else if (type_name->Equals(Heap::string_symbol())) {
3462
+ __ JumpIfSmi(input, false_label);
3463
+ __ movq(input, FieldOperand(input, HeapObject::kMapOffset));
3464
+ __ testb(FieldOperand(input, Map::kBitFieldOffset),
3465
+ Immediate(1 << Map::kIsUndetectable));
3466
+ __ j(not_zero, false_label);
3467
+ __ CmpInstanceType(input, FIRST_NONSTRING_TYPE);
3468
+ final_branch_condition = below;
3469
+
3470
+ } else if (type_name->Equals(Heap::boolean_symbol())) {
3471
+ __ CompareRoot(input, Heap::kTrueValueRootIndex);
3472
+ __ j(equal, true_label);
3473
+ __ CompareRoot(input, Heap::kFalseValueRootIndex);
3474
+ final_branch_condition = equal;
3475
+
3476
+ } else if (type_name->Equals(Heap::undefined_symbol())) {
3477
+ __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
3478
+ __ j(equal, true_label);
3479
+ __ JumpIfSmi(input, false_label);
3480
+ // Check for undetectable objects => true.
3481
+ __ movq(input, FieldOperand(input, HeapObject::kMapOffset));
3482
+ __ testb(FieldOperand(input, Map::kBitFieldOffset),
3483
+ Immediate(1 << Map::kIsUndetectable));
3484
+ final_branch_condition = not_zero;
3485
+
3486
+ } else if (type_name->Equals(Heap::function_symbol())) {
3487
+ __ JumpIfSmi(input, false_label);
3488
+ __ CmpObjectType(input, FIRST_FUNCTION_CLASS_TYPE, input);
3489
+ final_branch_condition = above_equal;
3490
+
3491
+ } else if (type_name->Equals(Heap::object_symbol())) {
3492
+ __ JumpIfSmi(input, false_label);
3493
+ __ Cmp(input, Factory::null_value());
3494
+ __ j(equal, true_label);
3495
+ // Check for undetectable objects => false.
3496
+ __ testb(FieldOperand(input, Map::kBitFieldOffset),
3497
+ Immediate(1 << Map::kIsUndetectable));
3498
+ __ j(not_zero, false_label);
3499
+ // Check for JS objects that are not RegExp or Function => true.
3500
+ __ CmpInstanceType(input, FIRST_JS_OBJECT_TYPE);
3501
+ __ j(below, false_label);
3502
+ __ CmpInstanceType(input, FIRST_FUNCTION_CLASS_TYPE);
3503
+ final_branch_condition = below_equal;
3504
+
3505
+ } else {
3506
+ final_branch_condition = never;
3507
+ __ jmp(false_label);
3508
+ }
3509
+
3510
+ return final_branch_condition;
3511
+ }
3512
+
3513
+
3514
+ void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) {
3515
+ Register result = ToRegister(instr->result());
3516
+ NearLabel true_label;
3517
+ NearLabel false_label;
3518
+ NearLabel done;
3519
+
3520
+ EmitIsConstructCall(result);
3521
+ __ j(equal, &true_label);
3522
+
3523
+ __ LoadRoot(result, Heap::kFalseValueRootIndex);
3524
+ __ jmp(&done);
3525
+
3526
+ __ bind(&true_label);
3527
+ __ LoadRoot(result, Heap::kTrueValueRootIndex);
3528
+
3529
+
3530
+ __ bind(&done);
3531
+ }
3532
+
3533
+
3534
+ void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
3535
+ Register temp = ToRegister(instr->TempAt(0));
3536
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
3537
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
3538
+
3539
+ EmitIsConstructCall(temp);
3540
+ EmitBranch(true_block, false_block, equal);
3541
+ }
3542
+
3543
+
3544
+ void LCodeGen::EmitIsConstructCall(Register temp) {
3545
+ // Get the frame pointer for the calling frame.
3546
+ __ movq(temp, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3547
+
3548
+ // Skip the arguments adaptor frame if it exists.
3549
+ NearLabel check_frame_marker;
3550
+ __ SmiCompare(Operand(temp, StandardFrameConstants::kContextOffset),
3551
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3552
+ __ j(not_equal, &check_frame_marker);
3553
+ __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset));
3554
+
3555
+ // Check the marker in the calling frame.
3556
+ __ bind(&check_frame_marker);
3557
+ __ SmiCompare(Operand(temp, StandardFrameConstants::kMarkerOffset),
3558
+ Smi::FromInt(StackFrame::CONSTRUCT));
3559
+ }
3560
+
3561
+
3562
+ void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
3563
+ // No code for lazy bailout instruction. Used to capture environment after a
3564
+ // call for populating the safepoint data with deoptimization data.
3565
+ }
3566
+
3567
+
3568
+ void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
3569
+ DeoptimizeIf(no_condition, instr->environment());
3570
+ }
3571
+
3572
+
3573
+ void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
3574
+ LOperand* obj = instr->object();
3575
+ LOperand* key = instr->key();
3576
+ // Push object.
3577
+ if (obj->IsRegister()) {
3578
+ __ push(ToRegister(obj));
3579
+ } else {
3580
+ __ push(ToOperand(obj));
3581
+ }
3582
+ // Push key.
3583
+ if (key->IsConstantOperand()) {
3584
+ EmitPushConstantOperand(key);
3585
+ } else if (key->IsRegister()) {
3586
+ __ push(ToRegister(key));
3587
+ } else {
3588
+ __ push(ToOperand(key));
3589
+ }
3590
+ ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
3591
+ LPointerMap* pointers = instr->pointer_map();
3592
+ LEnvironment* env = instr->deoptimization_environment();
3593
+ RecordPosition(pointers->position());
3594
+ RegisterEnvironmentForDeoptimization(env);
3595
+ // Create safepoint generator that will also ensure enough space in the
3596
+ // reloc info for patching in deoptimization (since this is invoking a
3597
+ // builtin)
3598
+ SafepointGenerator safepoint_generator(this,
3599
+ pointers,
3600
+ env->deoptimization_index(),
3601
+ true);
3602
+ __ Push(Smi::FromInt(strict_mode_flag()));
3603
+ __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator);
3604
+ }
3605
+
3606
+
3607
+ void LCodeGen::DoStackCheck(LStackCheck* instr) {
3608
+ // Perform stack overflow check.
3609
+ NearLabel done;
3610
+ __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
3611
+ __ j(above_equal, &done);
3612
+
3613
+ StackCheckStub stub;
3614
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3615
+ __ bind(&done);
3616
+ }
3617
+
3618
+
3619
+ void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
3620
+ // This is a pseudo-instruction that ensures that the environment here is
3621
+ // properly registered for deoptimization and records the assembler's PC
3622
+ // offset.
3623
+ LEnvironment* environment = instr->environment();
3624
+ environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
3625
+ instr->SpilledDoubleRegisterArray());
3626
+
3627
+ // If the environment were already registered, we would have no way of
3628
+ // backpatching it with the spill slot operands.
3629
+ ASSERT(!environment->HasBeenRegistered());
3630
+ RegisterEnvironmentForDeoptimization(environment);
3631
+ ASSERT(osr_pc_offset_ == -1);
3632
+ osr_pc_offset_ = masm()->pc_offset();
3633
+ }
3634
+
3635
+ #undef __
3636
+
3637
+ } } // namespace v8::internal
3638
+
3639
+ #endif // V8_TARGET_ARCH_X64