therubyracer 0.9.0beta2 → 0.9.0beta3

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of therubyracer might be problematic. Click here for more details.

Files changed (920) hide show
  1. data/.gitmodules +3 -0
  2. data/ext/v8/upstream/Makefile +1 -2
  3. data/ext/v8/upstream/v8/.gitignore +33 -0
  4. data/ext/v8/upstream/v8/AUTHORS +42 -0
  5. data/ext/v8/upstream/v8/ChangeLog +2663 -0
  6. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE +0 -0
  7. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE.strongtalk +0 -0
  8. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE.v8 +0 -0
  9. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE.valgrind +0 -0
  10. data/ext/v8/upstream/v8/SConstruct +1473 -0
  11. data/ext/v8/upstream/{3.1.8 → v8}/build/README.txt +0 -0
  12. data/ext/v8/upstream/{3.1.8 → v8}/build/all.gyp +0 -0
  13. data/ext/v8/upstream/{3.1.8 → v8}/build/armu.gypi +0 -0
  14. data/ext/v8/upstream/{3.1.8 → v8}/build/common.gypi +0 -0
  15. data/ext/v8/upstream/{3.1.8 → v8}/build/gyp_v8 +0 -0
  16. data/ext/v8/upstream/v8/include/v8-debug.h +394 -0
  17. data/ext/v8/upstream/v8/include/v8-preparser.h +116 -0
  18. data/ext/v8/upstream/v8/include/v8-profiler.h +505 -0
  19. data/ext/v8/upstream/v8/include/v8-testing.h +104 -0
  20. data/ext/v8/upstream/v8/include/v8.h +4000 -0
  21. data/ext/v8/upstream/{3.1.8 → v8}/include/v8stdint.h +0 -0
  22. data/ext/v8/upstream/v8/preparser/SConscript +38 -0
  23. data/ext/v8/upstream/v8/preparser/preparser-process.cc +169 -0
  24. data/ext/v8/upstream/v8/src/SConscript +380 -0
  25. data/ext/v8/upstream/v8/src/accessors.cc +766 -0
  26. data/ext/v8/upstream/{3.1.8 → v8}/src/accessors.h +0 -0
  27. data/ext/v8/upstream/v8/src/allocation-inl.h +49 -0
  28. data/ext/v8/upstream/v8/src/allocation.cc +122 -0
  29. data/ext/v8/upstream/v8/src/allocation.h +143 -0
  30. data/ext/v8/upstream/v8/src/api.cc +5678 -0
  31. data/ext/v8/upstream/v8/src/api.h +572 -0
  32. data/ext/v8/upstream/{3.1.8 → v8}/src/apinatives.js +0 -0
  33. data/ext/v8/upstream/v8/src/apiutils.h +73 -0
  34. data/ext/v8/upstream/v8/src/arguments.h +116 -0
  35. data/ext/v8/upstream/v8/src/arm/assembler-arm-inl.h +353 -0
  36. data/ext/v8/upstream/v8/src/arm/assembler-arm.cc +2877 -0
  37. data/ext/v8/upstream/v8/src/arm/assembler-arm.h +1382 -0
  38. data/ext/v8/upstream/v8/src/arm/builtins-arm.cc +1634 -0
  39. data/ext/v8/upstream/v8/src/arm/code-stubs-arm.cc +6917 -0
  40. data/ext/v8/upstream/v8/src/arm/code-stubs-arm.h +623 -0
  41. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/codegen-arm-inl.h +0 -0
  42. data/ext/v8/upstream/v8/src/arm/codegen-arm.cc +7437 -0
  43. data/ext/v8/upstream/v8/src/arm/codegen-arm.h +595 -0
  44. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/constants-arm.cc +0 -0
  45. data/ext/v8/upstream/v8/src/arm/constants-arm.h +778 -0
  46. data/ext/v8/upstream/v8/src/arm/cpu-arm.cc +149 -0
  47. data/ext/v8/upstream/v8/src/arm/debug-arm.cc +317 -0
  48. data/ext/v8/upstream/v8/src/arm/deoptimizer-arm.cc +737 -0
  49. data/ext/v8/upstream/v8/src/arm/disasm-arm.cc +1503 -0
  50. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/frames-arm.cc +0 -0
  51. data/ext/v8/upstream/v8/src/arm/frames-arm.h +168 -0
  52. data/ext/v8/upstream/v8/src/arm/full-codegen-arm.cc +4374 -0
  53. data/ext/v8/upstream/v8/src/arm/ic-arm.cc +1793 -0
  54. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/jump-target-arm.cc +0 -0
  55. data/ext/v8/upstream/v8/src/arm/lithium-arm.cc +2120 -0
  56. data/ext/v8/upstream/v8/src/arm/lithium-arm.h +2179 -0
  57. data/ext/v8/upstream/v8/src/arm/lithium-codegen-arm.cc +4132 -0
  58. data/ext/v8/upstream/v8/src/arm/lithium-codegen-arm.h +329 -0
  59. data/ext/v8/upstream/v8/src/arm/lithium-gap-resolver-arm.cc +305 -0
  60. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/lithium-gap-resolver-arm.h +0 -0
  61. data/ext/v8/upstream/v8/src/arm/macro-assembler-arm.cc +2939 -0
  62. data/ext/v8/upstream/v8/src/arm/macro-assembler-arm.h +1071 -0
  63. data/ext/v8/upstream/v8/src/arm/regexp-macro-assembler-arm.cc +1287 -0
  64. data/ext/v8/upstream/v8/src/arm/regexp-macro-assembler-arm.h +253 -0
  65. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/register-allocator-arm-inl.h +0 -0
  66. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/register-allocator-arm.cc +0 -0
  67. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/register-allocator-arm.h +0 -0
  68. data/ext/v8/upstream/v8/src/arm/simulator-arm.cc +3288 -0
  69. data/ext/v8/upstream/v8/src/arm/simulator-arm.h +413 -0
  70. data/ext/v8/upstream/v8/src/arm/stub-cache-arm.cc +4034 -0
  71. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/virtual-frame-arm-inl.h +0 -0
  72. data/ext/v8/upstream/v8/src/arm/virtual-frame-arm.cc +843 -0
  73. data/ext/v8/upstream/v8/src/arm/virtual-frame-arm.h +523 -0
  74. data/ext/v8/upstream/v8/src/array.js +1249 -0
  75. data/ext/v8/upstream/v8/src/assembler.cc +1067 -0
  76. data/ext/v8/upstream/v8/src/assembler.h +823 -0
  77. data/ext/v8/upstream/v8/src/ast-inl.h +112 -0
  78. data/ext/v8/upstream/v8/src/ast.cc +1078 -0
  79. data/ext/v8/upstream/v8/src/ast.h +2234 -0
  80. data/ext/v8/upstream/v8/src/atomicops.h +167 -0
  81. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_arm_gcc.h +0 -0
  82. data/ext/v8/upstream/v8/src/atomicops_internals_mips_gcc.h +169 -0
  83. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_gcc.cc +0 -0
  84. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_gcc.h +0 -0
  85. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_macosx.h +0 -0
  86. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_msvc.h +0 -0
  87. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum-dtoa.cc +0 -0
  88. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum-dtoa.h +0 -0
  89. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum.cc +0 -0
  90. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum.h +0 -0
  91. data/ext/v8/upstream/v8/src/bootstrapper.cc +2138 -0
  92. data/ext/v8/upstream/v8/src/bootstrapper.h +185 -0
  93. data/ext/v8/upstream/v8/src/builtins.cc +1708 -0
  94. data/ext/v8/upstream/v8/src/builtins.h +368 -0
  95. data/ext/v8/upstream/{3.1.8 → v8}/src/bytecodes-irregexp.h +0 -0
  96. data/ext/v8/upstream/{3.1.8 → v8}/src/cached-powers.cc +0 -0
  97. data/ext/v8/upstream/{3.1.8 → v8}/src/cached-powers.h +0 -0
  98. data/ext/v8/upstream/{3.1.8 → v8}/src/char-predicates-inl.h +0 -0
  99. data/ext/v8/upstream/{3.1.8 → v8}/src/char-predicates.h +0 -0
  100. data/ext/v8/upstream/v8/src/checks.cc +110 -0
  101. data/ext/v8/upstream/v8/src/checks.h +296 -0
  102. data/ext/v8/upstream/{3.1.8 → v8}/src/circular-queue-inl.h +0 -0
  103. data/ext/v8/upstream/{3.1.8 → v8}/src/circular-queue.cc +0 -0
  104. data/ext/v8/upstream/{3.1.8 → v8}/src/circular-queue.h +0 -0
  105. data/ext/v8/upstream/v8/src/code-stubs.cc +240 -0
  106. data/ext/v8/upstream/v8/src/code-stubs.h +971 -0
  107. data/ext/v8/upstream/{3.1.8 → v8}/src/code.h +0 -0
  108. data/ext/v8/upstream/v8/src/codegen-inl.h +68 -0
  109. data/ext/v8/upstream/v8/src/codegen.cc +505 -0
  110. data/ext/v8/upstream/v8/src/codegen.h +245 -0
  111. data/ext/v8/upstream/v8/src/compilation-cache.cc +540 -0
  112. data/ext/v8/upstream/v8/src/compilation-cache.h +287 -0
  113. data/ext/v8/upstream/v8/src/compiler.cc +792 -0
  114. data/ext/v8/upstream/v8/src/compiler.h +307 -0
  115. data/ext/v8/upstream/v8/src/contexts.cc +327 -0
  116. data/ext/v8/upstream/v8/src/contexts.h +382 -0
  117. data/ext/v8/upstream/{3.1.8 → v8}/src/conversions-inl.h +0 -0
  118. data/ext/v8/upstream/v8/src/conversions.cc +1125 -0
  119. data/ext/v8/upstream/{3.1.8 → v8}/src/conversions.h +0 -0
  120. data/ext/v8/upstream/v8/src/counters.cc +93 -0
  121. data/ext/v8/upstream/v8/src/counters.h +254 -0
  122. data/ext/v8/upstream/v8/src/cpu-profiler-inl.h +101 -0
  123. data/ext/v8/upstream/v8/src/cpu-profiler.cc +606 -0
  124. data/ext/v8/upstream/v8/src/cpu-profiler.h +305 -0
  125. data/ext/v8/upstream/v8/src/cpu.h +67 -0
  126. data/ext/v8/upstream/v8/src/d8-debug.cc +367 -0
  127. data/ext/v8/upstream/v8/src/d8-debug.h +158 -0
  128. data/ext/v8/upstream/v8/src/d8-posix.cc +695 -0
  129. data/ext/v8/upstream/{3.1.8 → v8}/src/d8-readline.cc +0 -0
  130. data/ext/v8/upstream/{3.1.8 → v8}/src/d8-windows.cc +0 -0
  131. data/ext/v8/upstream/v8/src/d8.cc +796 -0
  132. data/ext/v8/upstream/v8/src/d8.gyp +88 -0
  133. data/ext/v8/upstream/{3.1.8 → v8}/src/d8.h +0 -0
  134. data/ext/v8/upstream/{3.1.8 → v8}/src/d8.js +0 -0
  135. data/ext/v8/upstream/{3.1.8 → v8}/src/data-flow.cc +0 -0
  136. data/ext/v8/upstream/v8/src/data-flow.h +379 -0
  137. data/ext/v8/upstream/{3.1.8 → v8}/src/date.js +0 -0
  138. data/ext/v8/upstream/{3.1.8 → v8}/src/dateparser-inl.h +0 -0
  139. data/ext/v8/upstream/{3.1.8 → v8}/src/dateparser.cc +0 -0
  140. data/ext/v8/upstream/v8/src/dateparser.h +265 -0
  141. data/ext/v8/upstream/v8/src/debug-agent.cc +447 -0
  142. data/ext/v8/upstream/v8/src/debug-agent.h +129 -0
  143. data/ext/v8/upstream/{3.1.8 → v8}/src/debug-debugger.js +0 -0
  144. data/ext/v8/upstream/v8/src/debug.cc +3188 -0
  145. data/ext/v8/upstream/v8/src/debug.h +1055 -0
  146. data/ext/v8/upstream/v8/src/deoptimizer.cc +1296 -0
  147. data/ext/v8/upstream/v8/src/deoptimizer.h +629 -0
  148. data/ext/v8/upstream/v8/src/disasm.h +80 -0
  149. data/ext/v8/upstream/v8/src/disassembler.cc +339 -0
  150. data/ext/v8/upstream/{3.1.8 → v8}/src/disassembler.h +0 -0
  151. data/ext/v8/upstream/{3.1.8 → v8}/src/diy-fp.cc +0 -0
  152. data/ext/v8/upstream/{3.1.8 → v8}/src/diy-fp.h +0 -0
  153. data/ext/v8/upstream/{3.1.8 → v8}/src/double.h +0 -0
  154. data/ext/v8/upstream/{3.1.8 → v8}/src/dtoa.cc +0 -0
  155. data/ext/v8/upstream/{3.1.8 → v8}/src/dtoa.h +0 -0
  156. data/ext/v8/upstream/v8/src/execution.cc +791 -0
  157. data/ext/v8/upstream/v8/src/execution.h +291 -0
  158. data/ext/v8/upstream/v8/src/extensions/experimental/break-iterator.cc +250 -0
  159. data/ext/v8/upstream/v8/src/extensions/experimental/break-iterator.h +89 -0
  160. data/ext/v8/upstream/v8/src/extensions/experimental/experimental.gyp +55 -0
  161. data/ext/v8/upstream/v8/src/extensions/experimental/i18n-extension.cc +284 -0
  162. data/ext/v8/upstream/{3.1.8 → v8}/src/extensions/experimental/i18n-extension.h +0 -0
  163. data/ext/v8/upstream/v8/src/extensions/externalize-string-extension.cc +141 -0
  164. data/ext/v8/upstream/{3.1.8 → v8}/src/extensions/externalize-string-extension.h +0 -0
  165. data/ext/v8/upstream/v8/src/extensions/gc-extension.cc +58 -0
  166. data/ext/v8/upstream/{3.1.8 → v8}/src/extensions/gc-extension.h +0 -0
  167. data/ext/v8/upstream/v8/src/factory.cc +1194 -0
  168. data/ext/v8/upstream/v8/src/factory.h +436 -0
  169. data/ext/v8/upstream/{3.1.8 → v8}/src/fast-dtoa.cc +0 -0
  170. data/ext/v8/upstream/{3.1.8 → v8}/src/fast-dtoa.h +0 -0
  171. data/ext/v8/upstream/{3.1.8 → v8}/src/fixed-dtoa.cc +0 -0
  172. data/ext/v8/upstream/{3.1.8 → v8}/src/fixed-dtoa.h +0 -0
  173. data/ext/v8/upstream/v8/src/flag-definitions.h +556 -0
  174. data/ext/v8/upstream/{3.1.8 → v8}/src/flags.cc +0 -0
  175. data/ext/v8/upstream/{3.1.8 → v8}/src/flags.h +0 -0
  176. data/ext/v8/upstream/v8/src/frame-element.cc +37 -0
  177. data/ext/v8/upstream/v8/src/frame-element.h +269 -0
  178. data/ext/v8/upstream/v8/src/frames-inl.h +236 -0
  179. data/ext/v8/upstream/v8/src/frames.cc +1273 -0
  180. data/ext/v8/upstream/v8/src/frames.h +854 -0
  181. data/ext/v8/upstream/v8/src/full-codegen.cc +1385 -0
  182. data/ext/v8/upstream/v8/src/full-codegen.h +753 -0
  183. data/ext/v8/upstream/v8/src/func-name-inferrer.cc +91 -0
  184. data/ext/v8/upstream/v8/src/func-name-inferrer.h +111 -0
  185. data/ext/v8/upstream/v8/src/gdb-jit.cc +1548 -0
  186. data/ext/v8/upstream/{3.1.8 → v8}/src/gdb-jit.h +0 -0
  187. data/ext/v8/upstream/v8/src/global-handles.cc +596 -0
  188. data/ext/v8/upstream/v8/src/global-handles.h +239 -0
  189. data/ext/v8/upstream/v8/src/globals.h +325 -0
  190. data/ext/v8/upstream/v8/src/handles-inl.h +177 -0
  191. data/ext/v8/upstream/v8/src/handles.cc +965 -0
  192. data/ext/v8/upstream/v8/src/handles.h +372 -0
  193. data/ext/v8/upstream/{3.1.8 → v8}/src/hashmap.cc +0 -0
  194. data/ext/v8/upstream/v8/src/hashmap.h +121 -0
  195. data/ext/v8/upstream/v8/src/heap-inl.h +703 -0
  196. data/ext/v8/upstream/v8/src/heap-profiler.cc +1173 -0
  197. data/ext/v8/upstream/v8/src/heap-profiler.h +396 -0
  198. data/ext/v8/upstream/v8/src/heap.cc +5856 -0
  199. data/ext/v8/upstream/v8/src/heap.h +2264 -0
  200. data/ext/v8/upstream/v8/src/hydrogen-instructions.cc +1639 -0
  201. data/ext/v8/upstream/v8/src/hydrogen-instructions.h +3657 -0
  202. data/ext/v8/upstream/v8/src/hydrogen.cc +6011 -0
  203. data/ext/v8/upstream/v8/src/hydrogen.h +1137 -0
  204. data/ext/v8/upstream/v8/src/ia32/assembler-ia32-inl.h +430 -0
  205. data/ext/v8/upstream/v8/src/ia32/assembler-ia32.cc +2846 -0
  206. data/ext/v8/upstream/v8/src/ia32/assembler-ia32.h +1159 -0
  207. data/ext/v8/upstream/v8/src/ia32/builtins-ia32.cc +1596 -0
  208. data/ext/v8/upstream/v8/src/ia32/code-stubs-ia32.cc +6549 -0
  209. data/ext/v8/upstream/v8/src/ia32/code-stubs-ia32.h +495 -0
  210. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/codegen-ia32-inl.h +0 -0
  211. data/ext/v8/upstream/v8/src/ia32/codegen-ia32.cc +10385 -0
  212. data/ext/v8/upstream/v8/src/ia32/codegen-ia32.h +801 -0
  213. data/ext/v8/upstream/v8/src/ia32/cpu-ia32.cc +88 -0
  214. data/ext/v8/upstream/v8/src/ia32/debug-ia32.cc +312 -0
  215. data/ext/v8/upstream/v8/src/ia32/deoptimizer-ia32.cc +774 -0
  216. data/ext/v8/upstream/v8/src/ia32/disasm-ia32.cc +1620 -0
  217. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/frames-ia32.cc +0 -0
  218. data/ext/v8/upstream/v8/src/ia32/frames-ia32.h +140 -0
  219. data/ext/v8/upstream/v8/src/ia32/full-codegen-ia32.cc +4357 -0
  220. data/ext/v8/upstream/v8/src/ia32/ic-ia32.cc +1779 -0
  221. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/jump-target-ia32.cc +0 -0
  222. data/ext/v8/upstream/v8/src/ia32/lithium-codegen-ia32.cc +4158 -0
  223. data/ext/v8/upstream/v8/src/ia32/lithium-codegen-ia32.h +318 -0
  224. data/ext/v8/upstream/v8/src/ia32/lithium-gap-resolver-ia32.cc +466 -0
  225. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/lithium-gap-resolver-ia32.h +0 -0
  226. data/ext/v8/upstream/v8/src/ia32/lithium-ia32.cc +2181 -0
  227. data/ext/v8/upstream/v8/src/ia32/lithium-ia32.h +2235 -0
  228. data/ext/v8/upstream/v8/src/ia32/macro-assembler-ia32.cc +2056 -0
  229. data/ext/v8/upstream/v8/src/ia32/macro-assembler-ia32.h +807 -0
  230. data/ext/v8/upstream/v8/src/ia32/regexp-macro-assembler-ia32.cc +1264 -0
  231. data/ext/v8/upstream/v8/src/ia32/regexp-macro-assembler-ia32.h +216 -0
  232. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/register-allocator-ia32-inl.h +0 -0
  233. data/ext/v8/upstream/v8/src/ia32/register-allocator-ia32.cc +157 -0
  234. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/register-allocator-ia32.h +0 -0
  235. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/simulator-ia32.cc +0 -0
  236. data/ext/v8/upstream/v8/src/ia32/simulator-ia32.h +72 -0
  237. data/ext/v8/upstream/v8/src/ia32/stub-cache-ia32.cc +3711 -0
  238. data/ext/v8/upstream/v8/src/ia32/virtual-frame-ia32.cc +1366 -0
  239. data/ext/v8/upstream/v8/src/ia32/virtual-frame-ia32.h +650 -0
  240. data/ext/v8/upstream/v8/src/ic-inl.h +130 -0
  241. data/ext/v8/upstream/v8/src/ic.cc +2389 -0
  242. data/ext/v8/upstream/v8/src/ic.h +675 -0
  243. data/ext/v8/upstream/{3.1.8 → v8}/src/inspector.cc +0 -0
  244. data/ext/v8/upstream/{3.1.8 → v8}/src/inspector.h +0 -0
  245. data/ext/v8/upstream/v8/src/interpreter-irregexp.cc +659 -0
  246. data/ext/v8/upstream/v8/src/interpreter-irregexp.h +49 -0
  247. data/ext/v8/upstream/v8/src/isolate.cc +883 -0
  248. data/ext/v8/upstream/v8/src/isolate.h +1306 -0
  249. data/ext/v8/upstream/v8/src/json.js +342 -0
  250. data/ext/v8/upstream/v8/src/jsregexp.cc +5371 -0
  251. data/ext/v8/upstream/v8/src/jsregexp.h +1483 -0
  252. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target-heavy-inl.h +0 -0
  253. data/ext/v8/upstream/v8/src/jump-target-heavy.cc +427 -0
  254. data/ext/v8/upstream/v8/src/jump-target-heavy.h +238 -0
  255. data/ext/v8/upstream/v8/src/jump-target-inl.h +48 -0
  256. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target-light-inl.h +0 -0
  257. data/ext/v8/upstream/v8/src/jump-target-light.cc +111 -0
  258. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target-light.h +0 -0
  259. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target.cc +0 -0
  260. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target.h +0 -0
  261. data/ext/v8/upstream/{3.1.8 → v8}/src/list-inl.h +0 -0
  262. data/ext/v8/upstream/{3.1.8 → v8}/src/list.h +0 -0
  263. data/ext/v8/upstream/v8/src/lithium-allocator-inl.h +142 -0
  264. data/ext/v8/upstream/v8/src/lithium-allocator.cc +2105 -0
  265. data/ext/v8/upstream/v8/src/lithium-allocator.h +630 -0
  266. data/ext/v8/upstream/v8/src/lithium.cc +169 -0
  267. data/ext/v8/upstream/{3.1.8 → v8}/src/lithium.h +0 -0
  268. data/ext/v8/upstream/{3.1.8 → v8}/src/liveedit-debugger.js +0 -0
  269. data/ext/v8/upstream/v8/src/liveedit.cc +1693 -0
  270. data/ext/v8/upstream/v8/src/liveedit.h +179 -0
  271. data/ext/v8/upstream/{3.1.8 → v8}/src/liveobjectlist-inl.h +0 -0
  272. data/ext/v8/upstream/v8/src/liveobjectlist.cc +2589 -0
  273. data/ext/v8/upstream/v8/src/liveobjectlist.h +322 -0
  274. data/ext/v8/upstream/{3.1.8 → v8}/src/log-inl.h +0 -0
  275. data/ext/v8/upstream/v8/src/log-utils.cc +423 -0
  276. data/ext/v8/upstream/v8/src/log-utils.h +229 -0
  277. data/ext/v8/upstream/v8/src/log.cc +1666 -0
  278. data/ext/v8/upstream/v8/src/log.h +446 -0
  279. data/ext/v8/upstream/{3.1.8 → v8}/src/macro-assembler.h +0 -0
  280. data/ext/v8/upstream/{3.1.8 → v8}/src/macros.py +0 -0
  281. data/ext/v8/upstream/v8/src/mark-compact.cc +3092 -0
  282. data/ext/v8/upstream/v8/src/mark-compact.h +506 -0
  283. data/ext/v8/upstream/{3.1.8 → v8}/src/math.js +0 -0
  284. data/ext/v8/upstream/v8/src/messages.cc +166 -0
  285. data/ext/v8/upstream/{3.1.8 → v8}/src/messages.h +0 -0
  286. data/ext/v8/upstream/v8/src/messages.js +1090 -0
  287. data/ext/v8/upstream/v8/src/mips/assembler-mips-inl.h +335 -0
  288. data/ext/v8/upstream/v8/src/mips/assembler-mips.cc +2093 -0
  289. data/ext/v8/upstream/v8/src/mips/assembler-mips.h +1066 -0
  290. data/ext/v8/upstream/v8/src/mips/builtins-mips.cc +148 -0
  291. data/ext/v8/upstream/v8/src/mips/code-stubs-mips.cc +752 -0
  292. data/ext/v8/upstream/v8/src/mips/code-stubs-mips.h +511 -0
  293. data/ext/v8/upstream/v8/src/mips/codegen-mips-inl.h +64 -0
  294. data/ext/v8/upstream/v8/src/mips/codegen-mips.cc +1213 -0
  295. data/ext/v8/upstream/v8/src/mips/codegen-mips.h +633 -0
  296. data/ext/v8/upstream/v8/src/mips/constants-mips.cc +352 -0
  297. data/ext/v8/upstream/v8/src/mips/constants-mips.h +723 -0
  298. data/ext/v8/upstream/v8/src/mips/cpu-mips.cc +90 -0
  299. data/ext/v8/upstream/v8/src/mips/debug-mips.cc +155 -0
  300. data/ext/v8/upstream/v8/src/mips/deoptimizer-mips.cc +91 -0
  301. data/ext/v8/upstream/v8/src/mips/disasm-mips.cc +1023 -0
  302. data/ext/v8/upstream/v8/src/mips/frames-mips.cc +48 -0
  303. data/ext/v8/upstream/v8/src/mips/frames-mips.h +179 -0
  304. data/ext/v8/upstream/v8/src/mips/full-codegen-mips.cc +727 -0
  305. data/ext/v8/upstream/v8/src/mips/ic-mips.cc +244 -0
  306. data/ext/v8/upstream/v8/src/mips/jump-target-mips.cc +80 -0
  307. data/ext/v8/upstream/v8/src/mips/lithium-codegen-mips.h +65 -0
  308. data/ext/v8/upstream/v8/src/mips/lithium-mips.h +304 -0
  309. data/ext/v8/upstream/v8/src/mips/macro-assembler-mips.cc +3327 -0
  310. data/ext/v8/upstream/v8/src/mips/macro-assembler-mips.h +1058 -0
  311. data/ext/v8/upstream/v8/src/mips/regexp-macro-assembler-mips.cc +478 -0
  312. data/ext/v8/upstream/v8/src/mips/regexp-macro-assembler-mips.h +250 -0
  313. data/ext/v8/upstream/v8/src/mips/register-allocator-mips-inl.h +134 -0
  314. data/ext/v8/upstream/{3.1.8 → v8}/src/mips/register-allocator-mips.cc +0 -0
  315. data/ext/v8/upstream/v8/src/mips/register-allocator-mips.h +47 -0
  316. data/ext/v8/upstream/v8/src/mips/simulator-mips.cc +2438 -0
  317. data/ext/v8/upstream/v8/src/mips/simulator-mips.h +394 -0
  318. data/ext/v8/upstream/v8/src/mips/stub-cache-mips.cc +601 -0
  319. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips-inl.h +58 -0
  320. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips.cc +307 -0
  321. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips.h +530 -0
  322. data/ext/v8/upstream/v8/src/mirror-debugger.js +2381 -0
  323. data/ext/v8/upstream/v8/src/mksnapshot.cc +256 -0
  324. data/ext/v8/upstream/{3.1.8 → v8}/src/natives.h +0 -0
  325. data/ext/v8/upstream/v8/src/objects-debug.cc +722 -0
  326. data/ext/v8/upstream/v8/src/objects-inl.h +4166 -0
  327. data/ext/v8/upstream/v8/src/objects-printer.cc +801 -0
  328. data/ext/v8/upstream/v8/src/objects-visiting.cc +142 -0
  329. data/ext/v8/upstream/v8/src/objects-visiting.h +422 -0
  330. data/ext/v8/upstream/v8/src/objects.cc +10296 -0
  331. data/ext/v8/upstream/v8/src/objects.h +6662 -0
  332. data/ext/v8/upstream/v8/src/parser.cc +5168 -0
  333. data/ext/v8/upstream/v8/src/parser.h +823 -0
  334. data/ext/v8/upstream/v8/src/platform-cygwin.cc +811 -0
  335. data/ext/v8/upstream/v8/src/platform-freebsd.cc +854 -0
  336. data/ext/v8/upstream/v8/src/platform-linux.cc +1120 -0
  337. data/ext/v8/upstream/v8/src/platform-macos.cc +865 -0
  338. data/ext/v8/upstream/v8/src/platform-nullos.cc +504 -0
  339. data/ext/v8/upstream/v8/src/platform-openbsd.cc +672 -0
  340. data/ext/v8/upstream/v8/src/platform-posix.cc +424 -0
  341. data/ext/v8/upstream/v8/src/platform-solaris.cc +796 -0
  342. data/ext/v8/upstream/v8/src/platform-tls-mac.h +62 -0
  343. data/ext/v8/upstream/v8/src/platform-tls-win32.h +62 -0
  344. data/ext/v8/upstream/v8/src/platform-tls.h +50 -0
  345. data/ext/v8/upstream/v8/src/platform-win32.cc +2072 -0
  346. data/ext/v8/upstream/v8/src/platform.h +693 -0
  347. data/ext/v8/upstream/v8/src/preparse-data.cc +185 -0
  348. data/ext/v8/upstream/{3.1.8 → v8}/src/preparse-data.h +0 -0
  349. data/ext/v8/upstream/v8/src/preparser-api.cc +219 -0
  350. data/ext/v8/upstream/v8/src/preparser.cc +1205 -0
  351. data/ext/v8/upstream/{3.1.8 → v8}/src/preparser.h +0 -0
  352. data/ext/v8/upstream/v8/src/prettyprinter.cc +1530 -0
  353. data/ext/v8/upstream/v8/src/prettyprinter.h +223 -0
  354. data/ext/v8/upstream/{3.1.8 → v8}/src/profile-generator-inl.h +0 -0
  355. data/ext/v8/upstream/v8/src/profile-generator.cc +3095 -0
  356. data/ext/v8/upstream/v8/src/profile-generator.h +1125 -0
  357. data/ext/v8/upstream/v8/src/property.cc +102 -0
  358. data/ext/v8/upstream/v8/src/property.h +348 -0
  359. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-irregexp-inl.h +0 -0
  360. data/ext/v8/upstream/v8/src/regexp-macro-assembler-irregexp.cc +470 -0
  361. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-irregexp.h +0 -0
  362. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-tracer.cc +0 -0
  363. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-tracer.h +0 -0
  364. data/ext/v8/upstream/v8/src/regexp-macro-assembler.cc +266 -0
  365. data/ext/v8/upstream/v8/src/regexp-macro-assembler.h +236 -0
  366. data/ext/v8/upstream/v8/src/regexp-stack.cc +111 -0
  367. data/ext/v8/upstream/v8/src/regexp-stack.h +147 -0
  368. data/ext/v8/upstream/v8/src/regexp.js +483 -0
  369. data/ext/v8/upstream/v8/src/register-allocator-inl.h +141 -0
  370. data/ext/v8/upstream/v8/src/register-allocator.cc +98 -0
  371. data/ext/v8/upstream/v8/src/register-allocator.h +310 -0
  372. data/ext/v8/upstream/v8/src/rewriter.cc +1024 -0
  373. data/ext/v8/upstream/{3.1.8 → v8}/src/rewriter.h +0 -0
  374. data/ext/v8/upstream/v8/src/runtime-profiler.cc +478 -0
  375. data/ext/v8/upstream/v8/src/runtime-profiler.h +192 -0
  376. data/ext/v8/upstream/v8/src/runtime.cc +11949 -0
  377. data/ext/v8/upstream/v8/src/runtime.h +643 -0
  378. data/ext/v8/upstream/{3.1.8 → v8}/src/runtime.js +0 -0
  379. data/ext/v8/upstream/v8/src/safepoint-table.cc +256 -0
  380. data/ext/v8/upstream/v8/src/safepoint-table.h +269 -0
  381. data/ext/v8/upstream/v8/src/scanner-base.cc +964 -0
  382. data/ext/v8/upstream/v8/src/scanner-base.h +664 -0
  383. data/ext/v8/upstream/v8/src/scanner.cc +584 -0
  384. data/ext/v8/upstream/v8/src/scanner.h +196 -0
  385. data/ext/v8/upstream/v8/src/scopeinfo.cc +631 -0
  386. data/ext/v8/upstream/v8/src/scopeinfo.h +249 -0
  387. data/ext/v8/upstream/v8/src/scopes.cc +1093 -0
  388. data/ext/v8/upstream/v8/src/scopes.h +508 -0
  389. data/ext/v8/upstream/v8/src/serialize.cc +1574 -0
  390. data/ext/v8/upstream/v8/src/serialize.h +589 -0
  391. data/ext/v8/upstream/{3.1.8 → v8}/src/shell.h +0 -0
  392. data/ext/v8/upstream/{3.1.8 → v8}/src/simulator.h +0 -0
  393. data/ext/v8/upstream/v8/src/small-pointer-list.h +163 -0
  394. data/ext/v8/upstream/{3.1.8 → v8}/src/smart-pointer.h +0 -0
  395. data/ext/v8/upstream/v8/src/snapshot-common.cc +82 -0
  396. data/ext/v8/upstream/{3.1.8 → v8}/src/snapshot-empty.cc +0 -0
  397. data/ext/v8/upstream/v8/src/snapshot.h +73 -0
  398. data/ext/v8/upstream/v8/src/spaces-inl.h +529 -0
  399. data/ext/v8/upstream/v8/src/spaces.cc +3147 -0
  400. data/ext/v8/upstream/v8/src/spaces.h +2368 -0
  401. data/ext/v8/upstream/{3.1.8 → v8}/src/splay-tree-inl.h +0 -0
  402. data/ext/v8/upstream/{3.1.8 → v8}/src/splay-tree.h +0 -0
  403. data/ext/v8/upstream/v8/src/string-search.cc +41 -0
  404. data/ext/v8/upstream/v8/src/string-search.h +568 -0
  405. data/ext/v8/upstream/v8/src/string-stream.cc +592 -0
  406. data/ext/v8/upstream/{3.1.8 → v8}/src/string-stream.h +0 -0
  407. data/ext/v8/upstream/v8/src/string.js +915 -0
  408. data/ext/v8/upstream/{3.1.8 → v8}/src/strtod.cc +0 -0
  409. data/ext/v8/upstream/{3.1.8 → v8}/src/strtod.h +0 -0
  410. data/ext/v8/upstream/v8/src/stub-cache.cc +1940 -0
  411. data/ext/v8/upstream/v8/src/stub-cache.h +866 -0
  412. data/ext/v8/upstream/{3.1.8 → v8}/src/third_party/valgrind/valgrind.h +0 -0
  413. data/ext/v8/upstream/v8/src/token.cc +63 -0
  414. data/ext/v8/upstream/v8/src/token.h +288 -0
  415. data/ext/v8/upstream/v8/src/top.cc +983 -0
  416. data/ext/v8/upstream/v8/src/type-info.cc +472 -0
  417. data/ext/v8/upstream/v8/src/type-info.h +290 -0
  418. data/ext/v8/upstream/{3.1.8 → v8}/src/unbound-queue-inl.h +0 -0
  419. data/ext/v8/upstream/{3.1.8 → v8}/src/unbound-queue.h +0 -0
  420. data/ext/v8/upstream/{3.1.8 → v8}/src/unicode-inl.h +0 -0
  421. data/ext/v8/upstream/v8/src/unicode.cc +1624 -0
  422. data/ext/v8/upstream/v8/src/unicode.h +280 -0
  423. data/ext/v8/upstream/{3.1.8 → v8}/src/uri.js +0 -0
  424. data/ext/v8/upstream/{3.1.8 → v8}/src/utils.cc +0 -0
  425. data/ext/v8/upstream/v8/src/utils.h +796 -0
  426. data/ext/v8/upstream/v8/src/v8-counters.cc +62 -0
  427. data/ext/v8/upstream/v8/src/v8-counters.h +311 -0
  428. data/ext/v8/upstream/v8/src/v8.cc +215 -0
  429. data/ext/v8/upstream/v8/src/v8.h +130 -0
  430. data/ext/v8/upstream/{3.1.8 → v8}/src/v8checks.h +0 -0
  431. data/ext/v8/upstream/{3.1.8 → v8}/src/v8dll-main.cc +0 -0
  432. data/ext/v8/upstream/v8/src/v8globals.h +486 -0
  433. data/ext/v8/upstream/{3.1.8/src/memory.h → v8/src/v8memory.h} +0 -0
  434. data/ext/v8/upstream/v8/src/v8natives.js +1293 -0
  435. data/ext/v8/upstream/{3.1.8 → v8}/src/v8preparserdll-main.cc +0 -0
  436. data/ext/v8/upstream/v8/src/v8threads.cc +453 -0
  437. data/ext/v8/upstream/v8/src/v8threads.h +164 -0
  438. data/ext/v8/upstream/v8/src/v8utils.h +317 -0
  439. data/ext/v8/upstream/{3.1.8 → v8}/src/variables.cc +0 -0
  440. data/ext/v8/upstream/v8/src/variables.h +212 -0
  441. data/ext/v8/upstream/v8/src/version.cc +116 -0
  442. data/ext/v8/upstream/v8/src/version.h +68 -0
  443. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-heavy-inl.h +0 -0
  444. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-heavy.cc +0 -0
  445. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-inl.h +0 -0
  446. data/ext/v8/upstream/v8/src/virtual-frame-light-inl.h +171 -0
  447. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-light.cc +0 -0
  448. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame.cc +0 -0
  449. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame.h +0 -0
  450. data/ext/v8/upstream/v8/src/vm-state-inl.h +138 -0
  451. data/ext/v8/upstream/v8/src/vm-state.h +70 -0
  452. data/ext/v8/upstream/v8/src/win32-headers.h +96 -0
  453. data/ext/v8/upstream/v8/src/x64/assembler-x64-inl.h +456 -0
  454. data/ext/v8/upstream/v8/src/x64/assembler-x64.cc +2954 -0
  455. data/ext/v8/upstream/v8/src/x64/assembler-x64.h +1630 -0
  456. data/ext/v8/upstream/v8/src/x64/builtins-x64.cc +1493 -0
  457. data/ext/v8/upstream/v8/src/x64/code-stubs-x64.cc +5132 -0
  458. data/ext/v8/upstream/v8/src/x64/code-stubs-x64.h +477 -0
  459. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/codegen-x64-inl.h +0 -0
  460. data/ext/v8/upstream/v8/src/x64/codegen-x64.cc +8843 -0
  461. data/ext/v8/upstream/v8/src/x64/codegen-x64.h +753 -0
  462. data/ext/v8/upstream/v8/src/x64/cpu-x64.cc +88 -0
  463. data/ext/v8/upstream/v8/src/x64/debug-x64.cc +318 -0
  464. data/ext/v8/upstream/v8/src/x64/deoptimizer-x64.cc +815 -0
  465. data/ext/v8/upstream/v8/src/x64/disasm-x64.cc +1752 -0
  466. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/frames-x64.cc +0 -0
  467. data/ext/v8/upstream/v8/src/x64/frames-x64.h +130 -0
  468. data/ext/v8/upstream/v8/src/x64/full-codegen-x64.cc +4339 -0
  469. data/ext/v8/upstream/v8/src/x64/ic-x64.cc +1752 -0
  470. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/jump-target-x64.cc +0 -0
  471. data/ext/v8/upstream/v8/src/x64/lithium-codegen-x64.cc +3970 -0
  472. data/ext/v8/upstream/v8/src/x64/lithium-codegen-x64.h +318 -0
  473. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/lithium-gap-resolver-x64.cc +0 -0
  474. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/lithium-gap-resolver-x64.h +0 -0
  475. data/ext/v8/upstream/v8/src/x64/lithium-x64.cc +2115 -0
  476. data/ext/v8/upstream/v8/src/x64/lithium-x64.h +2161 -0
  477. data/ext/v8/upstream/v8/src/x64/macro-assembler-x64.cc +2911 -0
  478. data/ext/v8/upstream/v8/src/x64/macro-assembler-x64.h +1984 -0
  479. data/ext/v8/upstream/v8/src/x64/regexp-macro-assembler-x64.cc +1398 -0
  480. data/ext/v8/upstream/v8/src/x64/regexp-macro-assembler-x64.h +282 -0
  481. data/ext/v8/upstream/v8/src/x64/register-allocator-x64-inl.h +87 -0
  482. data/ext/v8/upstream/v8/src/x64/register-allocator-x64.cc +95 -0
  483. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/register-allocator-x64.h +0 -0
  484. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/simulator-x64.cc +0 -0
  485. data/ext/v8/upstream/v8/src/x64/simulator-x64.h +71 -0
  486. data/ext/v8/upstream/v8/src/x64/stub-cache-x64.cc +3460 -0
  487. data/ext/v8/upstream/v8/src/x64/virtual-frame-x64.cc +1296 -0
  488. data/ext/v8/upstream/v8/src/x64/virtual-frame-x64.h +597 -0
  489. data/ext/v8/upstream/v8/src/zone-inl.h +129 -0
  490. data/ext/v8/upstream/v8/src/zone.cc +196 -0
  491. data/ext/v8/upstream/v8/src/zone.h +236 -0
  492. data/ext/v8/upstream/{3.1.8 → v8}/tools/codemap.js +0 -0
  493. data/ext/v8/upstream/{3.1.8 → v8}/tools/consarray.js +0 -0
  494. data/ext/v8/upstream/{3.1.8 → v8}/tools/csvparser.js +0 -0
  495. data/ext/v8/upstream/{3.1.8 → v8}/tools/disasm.py +0 -0
  496. data/ext/v8/upstream/v8/tools/freebsd-tick-processor +10 -0
  497. data/ext/v8/upstream/{3.1.8 → v8}/tools/gc-nvp-trace-processor.py +0 -0
  498. data/ext/v8/upstream/{3.1.8 → v8}/tools/generate-ten-powers.scm +0 -0
  499. data/ext/v8/upstream/{3.1.8 → v8}/tools/grokdump.py +0 -0
  500. data/ext/v8/upstream/v8/tools/gyp/v8.gyp +844 -0
  501. data/ext/v8/upstream/{3.1.8 → v8}/tools/js2c.py +0 -0
  502. data/ext/v8/upstream/{3.1.8 → v8}/tools/jsmin.py +0 -0
  503. data/ext/v8/upstream/v8/tools/linux-tick-processor +35 -0
  504. data/ext/v8/upstream/{3.1.8 → v8}/tools/ll_prof.py +0 -0
  505. data/ext/v8/upstream/{3.1.8 → v8}/tools/logreader.js +0 -0
  506. data/ext/v8/upstream/{3.1.8 → v8}/tools/mac-nm +0 -0
  507. data/ext/v8/upstream/{3.1.8 → v8}/tools/mac-tick-processor +0 -0
  508. data/ext/v8/upstream/{3.1.8 → v8}/tools/oom_dump/README +0 -0
  509. data/ext/v8/upstream/{3.1.8 → v8}/tools/oom_dump/SConstruct +0 -0
  510. data/ext/v8/upstream/{3.1.8 → v8}/tools/oom_dump/oom_dump.cc +0 -0
  511. data/ext/v8/upstream/{3.1.8 → v8}/tools/presubmit.py +0 -0
  512. data/ext/v8/upstream/{3.1.8 → v8}/tools/process-heap-prof.py +0 -0
  513. data/ext/v8/upstream/{3.1.8 → v8}/tools/profile.js +0 -0
  514. data/ext/v8/upstream/{3.1.8 → v8}/tools/profile_view.js +0 -0
  515. data/ext/v8/upstream/{3.1.8 → v8}/tools/run-valgrind.py +0 -0
  516. data/ext/v8/upstream/{3.1.8 → v8}/tools/splaytree.js +0 -0
  517. data/ext/v8/upstream/{3.1.8 → v8}/tools/stats-viewer.py +0 -0
  518. data/ext/v8/upstream/v8/tools/test.py +1490 -0
  519. data/ext/v8/upstream/{3.1.8 → v8}/tools/tickprocessor-driver.js +0 -0
  520. data/ext/v8/upstream/v8/tools/tickprocessor.js +877 -0
  521. data/ext/v8/upstream/{3.1.8 → v8}/tools/utils.py +0 -0
  522. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/README.txt +0 -0
  523. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/arm.vsprops +0 -0
  524. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/common.vsprops +0 -0
  525. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8.vcproj +0 -0
  526. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8_arm.vcproj +0 -0
  527. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8_x64.vcproj +0 -0
  528. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8js2c.cmd +0 -0
  529. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/debug.vsprops +0 -0
  530. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/ia32.vsprops +0 -0
  531. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/js2c.cmd +0 -0
  532. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/release.vsprops +0 -0
  533. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8.sln +0 -0
  534. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8.vcproj +0 -0
  535. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_arm.sln +0 -0
  536. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_arm.vcproj +0 -0
  537. data/ext/v8/upstream/v8/tools/visual_studio/v8_base.vcproj +1308 -0
  538. data/ext/v8/upstream/v8/tools/visual_studio/v8_base_arm.vcproj +1238 -0
  539. data/ext/v8/upstream/v8/tools/visual_studio/v8_base_x64.vcproj +1300 -0
  540. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_cctest.vcproj +0 -0
  541. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_cctest_arm.vcproj +0 -0
  542. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_cctest_x64.vcproj +0 -0
  543. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_mksnapshot.vcproj +0 -0
  544. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -0
  545. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_process_sample.vcproj +0 -0
  546. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_process_sample_arm.vcproj +0 -0
  547. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_process_sample_x64.vcproj +0 -0
  548. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_shell_sample.vcproj +0 -0
  549. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_shell_sample_arm.vcproj +0 -0
  550. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_shell_sample_x64.vcproj +0 -0
  551. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot.vcproj +0 -0
  552. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot_cc.vcproj +0 -0
  553. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -0
  554. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot_x64.vcproj +0 -0
  555. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_x64.sln +0 -0
  556. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_x64.vcproj +0 -0
  557. data/ext/v8/upstream/v8/tools/visual_studio/x64.vsprops +18 -0
  558. data/ext/v8/upstream/{3.1.8 → v8}/tools/windows-tick-processor.bat +0 -0
  559. data/ext/v8/v8_callbacks.cpp +52 -92
  560. data/ext/v8/v8_date.cpp +2 -3
  561. data/ext/v8/v8_object.cpp +4 -0
  562. data/ext/v8/v8_template.cpp +2 -2
  563. data/ext/v8/v8_try_catch.cpp +8 -38
  564. data/lib/v8/version.rb +1 -1
  565. data/spec/ext/ext_spec_helper.rb +2 -20
  566. data/spec/ext/object_spec.rb +0 -12
  567. data/spec/ext/try_catch_spec.rb +29 -1
  568. data/spec/spec_helper.rb +1 -0
  569. data/spec/v8/portal/proxies_spec.rb +1 -84
  570. data/specmem/handle_memspec.rb +41 -0
  571. data/specmem/object_memspec.rb +16 -0
  572. data/specmem/proxies_memspec.rb +86 -0
  573. data/specmem/spec_helper.rb +24 -0
  574. data/therubyracer.gemspec +7 -2
  575. metadata +564 -541
  576. data/ext/v8/upstream/3.1.8/.gitignore +0 -31
  577. data/ext/v8/upstream/3.1.8/AUTHORS +0 -40
  578. data/ext/v8/upstream/3.1.8/ChangeLog +0 -2566
  579. data/ext/v8/upstream/3.1.8/SConstruct +0 -1192
  580. data/ext/v8/upstream/3.1.8/include/v8-debug.h +0 -384
  581. data/ext/v8/upstream/3.1.8/include/v8-preparser.h +0 -116
  582. data/ext/v8/upstream/3.1.8/include/v8-profiler.h +0 -426
  583. data/ext/v8/upstream/3.1.8/include/v8-testing.h +0 -99
  584. data/ext/v8/upstream/3.1.8/include/v8.h +0 -3846
  585. data/ext/v8/upstream/3.1.8/preparser/preparser-process.cc +0 -206
  586. data/ext/v8/upstream/3.1.8/src/SConscript +0 -356
  587. data/ext/v8/upstream/3.1.8/src/accessors.cc +0 -907
  588. data/ext/v8/upstream/3.1.8/src/allocation.cc +0 -204
  589. data/ext/v8/upstream/3.1.8/src/allocation.h +0 -176
  590. data/ext/v8/upstream/3.1.8/src/api.cc +0 -5191
  591. data/ext/v8/upstream/3.1.8/src/api.h +0 -508
  592. data/ext/v8/upstream/3.1.8/src/apiutils.h +0 -80
  593. data/ext/v8/upstream/3.1.8/src/arguments.h +0 -105
  594. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm-inl.h +0 -352
  595. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.cc +0 -2756
  596. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.h +0 -1294
  597. data/ext/v8/upstream/3.1.8/src/arm/builtins-arm.cc +0 -1628
  598. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.cc +0 -6783
  599. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.h +0 -657
  600. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.cc +0 -7403
  601. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.h +0 -595
  602. data/ext/v8/upstream/3.1.8/src/arm/constants-arm.h +0 -769
  603. data/ext/v8/upstream/3.1.8/src/arm/cpu-arm.cc +0 -147
  604. data/ext/v8/upstream/3.1.8/src/arm/debug-arm.cc +0 -315
  605. data/ext/v8/upstream/3.1.8/src/arm/deoptimizer-arm.cc +0 -700
  606. data/ext/v8/upstream/3.1.8/src/arm/disasm-arm.cc +0 -1439
  607. data/ext/v8/upstream/3.1.8/src/arm/frames-arm.h +0 -168
  608. data/ext/v8/upstream/3.1.8/src/arm/full-codegen-arm.cc +0 -4230
  609. data/ext/v8/upstream/3.1.8/src/arm/ic-arm.cc +0 -1799
  610. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.cc +0 -2041
  611. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.h +0 -2046
  612. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.cc +0 -3822
  613. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.h +0 -312
  614. data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.cc +0 -303
  615. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.cc +0 -2701
  616. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.h +0 -1015
  617. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.cc +0 -1280
  618. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.h +0 -252
  619. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.cc +0 -3165
  620. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.h +0 -402
  621. data/ext/v8/upstream/3.1.8/src/arm/stub-cache-arm.cc +0 -4077
  622. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.cc +0 -843
  623. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.h +0 -520
  624. data/ext/v8/upstream/3.1.8/src/array.js +0 -1231
  625. data/ext/v8/upstream/3.1.8/src/assembler.cc +0 -973
  626. data/ext/v8/upstream/3.1.8/src/assembler.h +0 -787
  627. data/ext/v8/upstream/3.1.8/src/ast-inl.h +0 -107
  628. data/ext/v8/upstream/3.1.8/src/ast.cc +0 -1067
  629. data/ext/v8/upstream/3.1.8/src/ast.h +0 -2177
  630. data/ext/v8/upstream/3.1.8/src/atomicops.h +0 -165
  631. data/ext/v8/upstream/3.1.8/src/bootstrapper.cc +0 -1888
  632. data/ext/v8/upstream/3.1.8/src/bootstrapper.h +0 -118
  633. data/ext/v8/upstream/3.1.8/src/builtins.cc +0 -1586
  634. data/ext/v8/upstream/3.1.8/src/builtins.h +0 -339
  635. data/ext/v8/upstream/3.1.8/src/checks.cc +0 -110
  636. data/ext/v8/upstream/3.1.8/src/checks.h +0 -292
  637. data/ext/v8/upstream/3.1.8/src/code-stubs.cc +0 -230
  638. data/ext/v8/upstream/3.1.8/src/code-stubs.h +0 -950
  639. data/ext/v8/upstream/3.1.8/src/codegen-inl.h +0 -64
  640. data/ext/v8/upstream/3.1.8/src/codegen.cc +0 -495
  641. data/ext/v8/upstream/3.1.8/src/codegen.h +0 -245
  642. data/ext/v8/upstream/3.1.8/src/compilation-cache.cc +0 -654
  643. data/ext/v8/upstream/3.1.8/src/compilation-cache.h +0 -112
  644. data/ext/v8/upstream/3.1.8/src/compiler.cc +0 -806
  645. data/ext/v8/upstream/3.1.8/src/compiler.h +0 -290
  646. data/ext/v8/upstream/3.1.8/src/contexts.cc +0 -320
  647. data/ext/v8/upstream/3.1.8/src/contexts.h +0 -376
  648. data/ext/v8/upstream/3.1.8/src/conversions.cc +0 -1069
  649. data/ext/v8/upstream/3.1.8/src/counters.cc +0 -78
  650. data/ext/v8/upstream/3.1.8/src/counters.h +0 -242
  651. data/ext/v8/upstream/3.1.8/src/cpu-profiler-inl.h +0 -100
  652. data/ext/v8/upstream/3.1.8/src/cpu-profiler.cc +0 -554
  653. data/ext/v8/upstream/3.1.8/src/cpu-profiler.h +0 -291
  654. data/ext/v8/upstream/3.1.8/src/cpu.h +0 -65
  655. data/ext/v8/upstream/3.1.8/src/d8-debug.cc +0 -367
  656. data/ext/v8/upstream/3.1.8/src/d8-debug.h +0 -157
  657. data/ext/v8/upstream/3.1.8/src/d8-posix.cc +0 -693
  658. data/ext/v8/upstream/3.1.8/src/d8.cc +0 -792
  659. data/ext/v8/upstream/3.1.8/src/d8.gyp +0 -85
  660. data/ext/v8/upstream/3.1.8/src/data-flow.h +0 -379
  661. data/ext/v8/upstream/3.1.8/src/dateparser.h +0 -263
  662. data/ext/v8/upstream/3.1.8/src/debug-agent.cc +0 -446
  663. data/ext/v8/upstream/3.1.8/src/debug-agent.h +0 -131
  664. data/ext/v8/upstream/3.1.8/src/debug.cc +0 -3085
  665. data/ext/v8/upstream/3.1.8/src/debug.h +0 -1025
  666. data/ext/v8/upstream/3.1.8/src/deoptimizer.cc +0 -1185
  667. data/ext/v8/upstream/3.1.8/src/deoptimizer.h +0 -529
  668. data/ext/v8/upstream/3.1.8/src/disasm.h +0 -77
  669. data/ext/v8/upstream/3.1.8/src/disassembler.cc +0 -338
  670. data/ext/v8/upstream/3.1.8/src/execution.cc +0 -735
  671. data/ext/v8/upstream/3.1.8/src/execution.h +0 -322
  672. data/ext/v8/upstream/3.1.8/src/extensions/experimental/experimental.gyp +0 -53
  673. data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.cc +0 -264
  674. data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.cc +0 -141
  675. data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.cc +0 -58
  676. data/ext/v8/upstream/3.1.8/src/factory.cc +0 -1087
  677. data/ext/v8/upstream/3.1.8/src/factory.h +0 -432
  678. data/ext/v8/upstream/3.1.8/src/flag-definitions.h +0 -552
  679. data/ext/v8/upstream/3.1.8/src/frame-element.cc +0 -42
  680. data/ext/v8/upstream/3.1.8/src/frame-element.h +0 -277
  681. data/ext/v8/upstream/3.1.8/src/frames-inl.h +0 -210
  682. data/ext/v8/upstream/3.1.8/src/frames.cc +0 -1232
  683. data/ext/v8/upstream/3.1.8/src/frames.h +0 -826
  684. data/ext/v8/upstream/3.1.8/src/full-codegen.cc +0 -1382
  685. data/ext/v8/upstream/3.1.8/src/full-codegen.h +0 -751
  686. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.cc +0 -90
  687. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.h +0 -111
  688. data/ext/v8/upstream/3.1.8/src/gdb-jit.cc +0 -1547
  689. data/ext/v8/upstream/3.1.8/src/global-handles.cc +0 -534
  690. data/ext/v8/upstream/3.1.8/src/global-handles.h +0 -181
  691. data/ext/v8/upstream/3.1.8/src/globals.h +0 -325
  692. data/ext/v8/upstream/3.1.8/src/handles-inl.h +0 -80
  693. data/ext/v8/upstream/3.1.8/src/handles.cc +0 -910
  694. data/ext/v8/upstream/3.1.8/src/handles.h +0 -424
  695. data/ext/v8/upstream/3.1.8/src/hashmap.h +0 -121
  696. data/ext/v8/upstream/3.1.8/src/heap-inl.h +0 -587
  697. data/ext/v8/upstream/3.1.8/src/heap-profiler.cc +0 -1128
  698. data/ext/v8/upstream/3.1.8/src/heap-profiler.h +0 -381
  699. data/ext/v8/upstream/3.1.8/src/heap.cc +0 -5610
  700. data/ext/v8/upstream/3.1.8/src/heap.h +0 -2218
  701. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.cc +0 -1490
  702. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.h +0 -3493
  703. data/ext/v8/upstream/3.1.8/src/hydrogen.cc +0 -6056
  704. data/ext/v8/upstream/3.1.8/src/hydrogen.h +0 -1091
  705. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32-inl.h +0 -429
  706. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.cc +0 -2800
  707. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.h +0 -1093
  708. data/ext/v8/upstream/3.1.8/src/ia32/builtins-ia32.cc +0 -1590
  709. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.cc +0 -6624
  710. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.h +0 -536
  711. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.cc +0 -10354
  712. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.h +0 -798
  713. data/ext/v8/upstream/3.1.8/src/ia32/cpu-ia32.cc +0 -87
  714. data/ext/v8/upstream/3.1.8/src/ia32/debug-ia32.cc +0 -309
  715. data/ext/v8/upstream/3.1.8/src/ia32/deoptimizer-ia32.cc +0 -664
  716. data/ext/v8/upstream/3.1.8/src/ia32/disasm-ia32.cc +0 -1597
  717. data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.h +0 -140
  718. data/ext/v8/upstream/3.1.8/src/ia32/full-codegen-ia32.cc +0 -4278
  719. data/ext/v8/upstream/3.1.8/src/ia32/ic-ia32.cc +0 -1786
  720. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.cc +0 -3880
  721. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.h +0 -309
  722. data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.cc +0 -460
  723. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.cc +0 -2095
  724. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.h +0 -2127
  725. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.cc +0 -2031
  726. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.h +0 -798
  727. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.cc +0 -1253
  728. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.h +0 -215
  729. data/ext/v8/upstream/3.1.8/src/ia32/register-allocator-ia32.cc +0 -157
  730. data/ext/v8/upstream/3.1.8/src/ia32/simulator-ia32.h +0 -72
  731. data/ext/v8/upstream/3.1.8/src/ia32/stub-cache-ia32.cc +0 -3732
  732. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.cc +0 -1360
  733. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.h +0 -646
  734. data/ext/v8/upstream/3.1.8/src/ic-inl.h +0 -129
  735. data/ext/v8/upstream/3.1.8/src/ic.cc +0 -2333
  736. data/ext/v8/upstream/3.1.8/src/ic.h +0 -639
  737. data/ext/v8/upstream/3.1.8/src/interpreter-irregexp.cc +0 -655
  738. data/ext/v8/upstream/3.1.8/src/interpreter-irregexp.h +0 -48
  739. data/ext/v8/upstream/3.1.8/src/json.js +0 -342
  740. data/ext/v8/upstream/3.1.8/src/jsregexp.cc +0 -5340
  741. data/ext/v8/upstream/3.1.8/src/jsregexp.h +0 -1484
  742. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.cc +0 -430
  743. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.h +0 -244
  744. data/ext/v8/upstream/3.1.8/src/jump-target-inl.h +0 -48
  745. data/ext/v8/upstream/3.1.8/src/jump-target-light.cc +0 -111
  746. data/ext/v8/upstream/3.1.8/src/lithium-allocator-inl.h +0 -140
  747. data/ext/v8/upstream/3.1.8/src/lithium-allocator.cc +0 -2093
  748. data/ext/v8/upstream/3.1.8/src/lithium-allocator.h +0 -644
  749. data/ext/v8/upstream/3.1.8/src/lithium.cc +0 -168
  750. data/ext/v8/upstream/3.1.8/src/liveedit.cc +0 -1650
  751. data/ext/v8/upstream/3.1.8/src/liveedit.h +0 -174
  752. data/ext/v8/upstream/3.1.8/src/liveobjectlist.cc +0 -2527
  753. data/ext/v8/upstream/3.1.8/src/liveobjectlist.h +0 -322
  754. data/ext/v8/upstream/3.1.8/src/log-utils.cc +0 -336
  755. data/ext/v8/upstream/3.1.8/src/log-utils.h +0 -232
  756. data/ext/v8/upstream/3.1.8/src/log.cc +0 -1608
  757. data/ext/v8/upstream/3.1.8/src/log.h +0 -379
  758. data/ext/v8/upstream/3.1.8/src/mark-compact.cc +0 -2957
  759. data/ext/v8/upstream/3.1.8/src/mark-compact.h +0 -433
  760. data/ext/v8/upstream/3.1.8/src/messages.cc +0 -164
  761. data/ext/v8/upstream/3.1.8/src/messages.js +0 -1071
  762. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips-inl.h +0 -215
  763. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips.cc +0 -1219
  764. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips.h +0 -667
  765. data/ext/v8/upstream/3.1.8/src/mips/builtins-mips.cc +0 -205
  766. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips-inl.h +0 -70
  767. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips.cc +0 -1437
  768. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips.h +0 -431
  769. data/ext/v8/upstream/3.1.8/src/mips/constants-mips.cc +0 -328
  770. data/ext/v8/upstream/3.1.8/src/mips/constants-mips.h +0 -525
  771. data/ext/v8/upstream/3.1.8/src/mips/cpu-mips.cc +0 -73
  772. data/ext/v8/upstream/3.1.8/src/mips/debug-mips.cc +0 -127
  773. data/ext/v8/upstream/3.1.8/src/mips/disasm-mips.cc +0 -787
  774. data/ext/v8/upstream/3.1.8/src/mips/fast-codegen-mips.cc +0 -77
  775. data/ext/v8/upstream/3.1.8/src/mips/frames-mips.cc +0 -96
  776. data/ext/v8/upstream/3.1.8/src/mips/frames-mips.h +0 -164
  777. data/ext/v8/upstream/3.1.8/src/mips/full-codegen-mips.cc +0 -277
  778. data/ext/v8/upstream/3.1.8/src/mips/ic-mips.cc +0 -208
  779. data/ext/v8/upstream/3.1.8/src/mips/jump-target-mips.cc +0 -175
  780. data/ext/v8/upstream/3.1.8/src/mips/macro-assembler-mips.cc +0 -1326
  781. data/ext/v8/upstream/3.1.8/src/mips/macro-assembler-mips.h +0 -461
  782. data/ext/v8/upstream/3.1.8/src/mips/register-allocator-mips-inl.h +0 -137
  783. data/ext/v8/upstream/3.1.8/src/mips/register-allocator-mips.h +0 -46
  784. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.cc +0 -1650
  785. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.h +0 -311
  786. data/ext/v8/upstream/3.1.8/src/mips/stub-cache-mips.cc +0 -418
  787. data/ext/v8/upstream/3.1.8/src/mips/virtual-frame-mips.cc +0 -319
  788. data/ext/v8/upstream/3.1.8/src/mips/virtual-frame-mips.h +0 -548
  789. data/ext/v8/upstream/3.1.8/src/mirror-debugger.js +0 -2380
  790. data/ext/v8/upstream/3.1.8/src/mksnapshot.cc +0 -256
  791. data/ext/v8/upstream/3.1.8/src/objects-debug.cc +0 -722
  792. data/ext/v8/upstream/3.1.8/src/objects-inl.h +0 -3946
  793. data/ext/v8/upstream/3.1.8/src/objects-printer.cc +0 -801
  794. data/ext/v8/upstream/3.1.8/src/objects-visiting.cc +0 -142
  795. data/ext/v8/upstream/3.1.8/src/objects-visiting.h +0 -401
  796. data/ext/v8/upstream/3.1.8/src/objects.cc +0 -10044
  797. data/ext/v8/upstream/3.1.8/src/objects.h +0 -6571
  798. data/ext/v8/upstream/3.1.8/src/parser.cc +0 -5165
  799. data/ext/v8/upstream/3.1.8/src/parser.h +0 -802
  800. data/ext/v8/upstream/3.1.8/src/platform-cygwin.cc +0 -745
  801. data/ext/v8/upstream/3.1.8/src/platform-freebsd.cc +0 -702
  802. data/ext/v8/upstream/3.1.8/src/platform-linux.cc +0 -981
  803. data/ext/v8/upstream/3.1.8/src/platform-macos.cc +0 -732
  804. data/ext/v8/upstream/3.1.8/src/platform-nullos.cc +0 -498
  805. data/ext/v8/upstream/3.1.8/src/platform-openbsd.cc +0 -657
  806. data/ext/v8/upstream/3.1.8/src/platform-posix.cc +0 -399
  807. data/ext/v8/upstream/3.1.8/src/platform-solaris.cc +0 -714
  808. data/ext/v8/upstream/3.1.8/src/platform-win32.cc +0 -1974
  809. data/ext/v8/upstream/3.1.8/src/platform.h +0 -636
  810. data/ext/v8/upstream/3.1.8/src/preparse-data.cc +0 -183
  811. data/ext/v8/upstream/3.1.8/src/preparser-api.cc +0 -213
  812. data/ext/v8/upstream/3.1.8/src/preparser.cc +0 -1205
  813. data/ext/v8/upstream/3.1.8/src/prettyprinter.cc +0 -1539
  814. data/ext/v8/upstream/3.1.8/src/prettyprinter.h +0 -223
  815. data/ext/v8/upstream/3.1.8/src/profile-generator.cc +0 -2899
  816. data/ext/v8/upstream/3.1.8/src/profile-generator.h +0 -1151
  817. data/ext/v8/upstream/3.1.8/src/property.cc +0 -96
  818. data/ext/v8/upstream/3.1.8/src/property.h +0 -337
  819. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.cc +0 -470
  820. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.cc +0 -257
  821. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.h +0 -231
  822. data/ext/v8/upstream/3.1.8/src/regexp-stack.cc +0 -103
  823. data/ext/v8/upstream/3.1.8/src/regexp-stack.h +0 -123
  824. data/ext/v8/upstream/3.1.8/src/regexp.js +0 -483
  825. data/ext/v8/upstream/3.1.8/src/register-allocator-inl.h +0 -141
  826. data/ext/v8/upstream/3.1.8/src/register-allocator.cc +0 -104
  827. data/ext/v8/upstream/3.1.8/src/register-allocator.h +0 -320
  828. data/ext/v8/upstream/3.1.8/src/rewriter.cc +0 -1023
  829. data/ext/v8/upstream/3.1.8/src/runtime-profiler.cc +0 -443
  830. data/ext/v8/upstream/3.1.8/src/runtime-profiler.h +0 -77
  831. data/ext/v8/upstream/3.1.8/src/runtime.cc +0 -11592
  832. data/ext/v8/upstream/3.1.8/src/runtime.h +0 -582
  833. data/ext/v8/upstream/3.1.8/src/safepoint-table.cc +0 -253
  834. data/ext/v8/upstream/3.1.8/src/safepoint-table.h +0 -263
  835. data/ext/v8/upstream/3.1.8/src/scanner-base.cc +0 -971
  836. data/ext/v8/upstream/3.1.8/src/scanner-base.h +0 -653
  837. data/ext/v8/upstream/3.1.8/src/scanner.cc +0 -586
  838. data/ext/v8/upstream/3.1.8/src/scanner.h +0 -194
  839. data/ext/v8/upstream/3.1.8/src/scopeinfo.cc +0 -636
  840. data/ext/v8/upstream/3.1.8/src/scopeinfo.h +0 -238
  841. data/ext/v8/upstream/3.1.8/src/scopes.cc +0 -1063
  842. data/ext/v8/upstream/3.1.8/src/scopes.h +0 -494
  843. data/ext/v8/upstream/3.1.8/src/serialize.cc +0 -1535
  844. data/ext/v8/upstream/3.1.8/src/serialize.h +0 -584
  845. data/ext/v8/upstream/3.1.8/src/snapshot-common.cc +0 -82
  846. data/ext/v8/upstream/3.1.8/src/snapshot.h +0 -71
  847. data/ext/v8/upstream/3.1.8/src/spaces-inl.h +0 -524
  848. data/ext/v8/upstream/3.1.8/src/spaces.cc +0 -3254
  849. data/ext/v8/upstream/3.1.8/src/spaces.h +0 -2362
  850. data/ext/v8/upstream/3.1.8/src/string-search.cc +0 -40
  851. data/ext/v8/upstream/3.1.8/src/string-search.h +0 -567
  852. data/ext/v8/upstream/3.1.8/src/string-stream.cc +0 -584
  853. data/ext/v8/upstream/3.1.8/src/string.js +0 -915
  854. data/ext/v8/upstream/3.1.8/src/stub-cache.cc +0 -1878
  855. data/ext/v8/upstream/3.1.8/src/stub-cache.h +0 -849
  856. data/ext/v8/upstream/3.1.8/src/token.cc +0 -63
  857. data/ext/v8/upstream/3.1.8/src/token.h +0 -288
  858. data/ext/v8/upstream/3.1.8/src/top.cc +0 -1152
  859. data/ext/v8/upstream/3.1.8/src/top.h +0 -608
  860. data/ext/v8/upstream/3.1.8/src/type-info.cc +0 -406
  861. data/ext/v8/upstream/3.1.8/src/type-info.h +0 -283
  862. data/ext/v8/upstream/3.1.8/src/unicode.cc +0 -1624
  863. data/ext/v8/upstream/3.1.8/src/unicode.h +0 -280
  864. data/ext/v8/upstream/3.1.8/src/utils.h +0 -793
  865. data/ext/v8/upstream/3.1.8/src/v8-counters.cc +0 -55
  866. data/ext/v8/upstream/3.1.8/src/v8-counters.h +0 -290
  867. data/ext/v8/upstream/3.1.8/src/v8.cc +0 -270
  868. data/ext/v8/upstream/3.1.8/src/v8.h +0 -127
  869. data/ext/v8/upstream/3.1.8/src/v8globals.h +0 -480
  870. data/ext/v8/upstream/3.1.8/src/v8natives.js +0 -1252
  871. data/ext/v8/upstream/3.1.8/src/v8threads.cc +0 -440
  872. data/ext/v8/upstream/3.1.8/src/v8threads.h +0 -157
  873. data/ext/v8/upstream/3.1.8/src/v8utils.h +0 -354
  874. data/ext/v8/upstream/3.1.8/src/variables.h +0 -212
  875. data/ext/v8/upstream/3.1.8/src/version.cc +0 -95
  876. data/ext/v8/upstream/3.1.8/src/version.h +0 -64
  877. data/ext/v8/upstream/3.1.8/src/virtual-frame-light-inl.h +0 -170
  878. data/ext/v8/upstream/3.1.8/src/vm-state-inl.h +0 -134
  879. data/ext/v8/upstream/3.1.8/src/vm-state.h +0 -68
  880. data/ext/v8/upstream/3.1.8/src/win32-headers.h +0 -95
  881. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64-inl.h +0 -455
  882. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.cc +0 -3162
  883. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.h +0 -1584
  884. data/ext/v8/upstream/3.1.8/src/x64/builtins-x64.cc +0 -1492
  885. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.cc +0 -5150
  886. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.h +0 -519
  887. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.cc +0 -8835
  888. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.h +0 -750
  889. data/ext/v8/upstream/3.1.8/src/x64/cpu-x64.cc +0 -86
  890. data/ext/v8/upstream/3.1.8/src/x64/debug-x64.cc +0 -316
  891. data/ext/v8/upstream/3.1.8/src/x64/deoptimizer-x64.cc +0 -781
  892. data/ext/v8/upstream/3.1.8/src/x64/disasm-x64.cc +0 -1737
  893. data/ext/v8/upstream/3.1.8/src/x64/frames-x64.h +0 -130
  894. data/ext/v8/upstream/3.1.8/src/x64/full-codegen-x64.cc +0 -3984
  895. data/ext/v8/upstream/3.1.8/src/x64/ic-x64.cc +0 -1761
  896. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.cc +0 -3639
  897. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.h +0 -305
  898. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.cc +0 -2044
  899. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.h +0 -2052
  900. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.cc +0 -2660
  901. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.h +0 -1852
  902. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.cc +0 -1382
  903. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.h +0 -278
  904. data/ext/v8/upstream/3.1.8/src/x64/register-allocator-x64-inl.h +0 -87
  905. data/ext/v8/upstream/3.1.8/src/x64/register-allocator-x64.cc +0 -91
  906. data/ext/v8/upstream/3.1.8/src/x64/simulator-x64.h +0 -71
  907. data/ext/v8/upstream/3.1.8/src/x64/stub-cache-x64.cc +0 -3509
  908. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.cc +0 -1292
  909. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.h +0 -593
  910. data/ext/v8/upstream/3.1.8/src/zone-inl.h +0 -83
  911. data/ext/v8/upstream/3.1.8/src/zone.cc +0 -195
  912. data/ext/v8/upstream/3.1.8/src/zone.h +0 -233
  913. data/ext/v8/upstream/3.1.8/tools/gyp/v8.gyp +0 -869
  914. data/ext/v8/upstream/3.1.8/tools/linux-tick-processor +0 -33
  915. data/ext/v8/upstream/3.1.8/tools/tickprocessor.js +0 -863
  916. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base.vcproj +0 -1296
  917. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_arm.vcproj +0 -1234
  918. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_x64.vcproj +0 -1296
  919. data/ext/v8/upstream/3.1.8/tools/visual_studio/x64.vsprops +0 -17
  920. data/spec/ext/mem_spec.rb +0 -42
@@ -1,3880 +0,0 @@
1
- // Copyright 2011 the V8 project authors. All rights reserved.
2
- // Redistribution and use in source and binary forms, with or without
3
- // modification, are permitted provided that the following conditions are
4
- // met:
5
- //
6
- // * Redistributions of source code must retain the above copyright
7
- // notice, this list of conditions and the following disclaimer.
8
- // * Redistributions in binary form must reproduce the above
9
- // copyright notice, this list of conditions and the following
10
- // disclaimer in the documentation and/or other materials provided
11
- // with the distribution.
12
- // * Neither the name of Google Inc. nor the names of its
13
- // contributors may be used to endorse or promote products derived
14
- // from this software without specific prior written permission.
15
- //
16
- // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
- // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
- // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
- // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
- // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
- // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
- // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
- // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
- // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
- // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
- // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
-
28
- #include "v8.h"
29
-
30
- #if defined(V8_TARGET_ARCH_IA32)
31
-
32
- #include "ia32/lithium-codegen-ia32.h"
33
- #include "code-stubs.h"
34
- #include "stub-cache.h"
35
-
36
- namespace v8 {
37
- namespace internal {
38
-
39
-
40
- // When invoking builtins, we need to record the safepoint in the middle of
41
- // the invoke instruction sequence generated by the macro assembler.
42
- class SafepointGenerator : public PostCallGenerator {
43
- public:
44
- SafepointGenerator(LCodeGen* codegen,
45
- LPointerMap* pointers,
46
- int deoptimization_index,
47
- bool ensure_reloc_space = false)
48
- : codegen_(codegen),
49
- pointers_(pointers),
50
- deoptimization_index_(deoptimization_index),
51
- ensure_reloc_space_(ensure_reloc_space) { }
52
- virtual ~SafepointGenerator() { }
53
-
54
- virtual void Generate() {
55
- // Ensure that we have enough space in the reloc info to patch
56
- // this with calls when doing deoptimization.
57
- if (ensure_reloc_space_) {
58
- codegen_->EnsureRelocSpaceForDeoptimization();
59
- }
60
- codegen_->RecordSafepoint(pointers_, deoptimization_index_);
61
- }
62
-
63
- private:
64
- LCodeGen* codegen_;
65
- LPointerMap* pointers_;
66
- int deoptimization_index_;
67
- bool ensure_reloc_space_;
68
- };
69
-
70
-
71
- #define __ masm()->
72
-
73
- bool LCodeGen::GenerateCode() {
74
- HPhase phase("Code generation", chunk());
75
- ASSERT(is_unused());
76
- status_ = GENERATING;
77
- CpuFeatures::Scope scope(SSE2);
78
- return GeneratePrologue() &&
79
- GenerateBody() &&
80
- GenerateDeferredCode() &&
81
- GenerateRelocPadding() &&
82
- GenerateSafepointTable();
83
- }
84
-
85
-
86
- void LCodeGen::FinishCode(Handle<Code> code) {
87
- ASSERT(is_done());
88
- code->set_stack_slots(StackSlotCount());
89
- code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
90
- PopulateDeoptimizationData(code);
91
- }
92
-
93
-
94
- void LCodeGen::Abort(const char* format, ...) {
95
- if (FLAG_trace_bailout) {
96
- SmartPointer<char> debug_name = graph()->debug_name()->ToCString();
97
- PrintF("Aborting LCodeGen in @\"%s\": ", *debug_name);
98
- va_list arguments;
99
- va_start(arguments, format);
100
- OS::VPrint(format, arguments);
101
- va_end(arguments);
102
- PrintF("\n");
103
- }
104
- status_ = ABORTED;
105
- }
106
-
107
-
108
- void LCodeGen::Comment(const char* format, ...) {
109
- if (!FLAG_code_comments) return;
110
- char buffer[4 * KB];
111
- StringBuilder builder(buffer, ARRAY_SIZE(buffer));
112
- va_list arguments;
113
- va_start(arguments, format);
114
- builder.AddFormattedList(format, arguments);
115
- va_end(arguments);
116
-
117
- // Copy the string before recording it in the assembler to avoid
118
- // issues when the stack allocated buffer goes out of scope.
119
- size_t length = builder.position();
120
- Vector<char> copy = Vector<char>::New(length + 1);
121
- memcpy(copy.start(), builder.Finalize(), copy.length());
122
- masm()->RecordComment(copy.start());
123
- }
124
-
125
-
126
- bool LCodeGen::GenerateRelocPadding() {
127
- int reloc_size = masm()->relocation_writer_size();
128
- while (reloc_size < deoptimization_reloc_size.min_size) {
129
- __ RecordComment(RelocInfo::kFillerCommentString, true);
130
- reloc_size += RelocInfo::kMinRelocCommentSize;
131
- }
132
- return !is_aborted();
133
- }
134
-
135
-
136
- bool LCodeGen::GeneratePrologue() {
137
- ASSERT(is_generating());
138
-
139
- #ifdef DEBUG
140
- if (strlen(FLAG_stop_at) > 0 &&
141
- info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
142
- __ int3();
143
- }
144
- #endif
145
-
146
- __ push(ebp); // Caller's frame pointer.
147
- __ mov(ebp, esp);
148
- __ push(esi); // Callee's context.
149
- __ push(edi); // Callee's JS function.
150
-
151
- // Reserve space for the stack slots needed by the code.
152
- int slots = StackSlotCount();
153
- if (slots > 0) {
154
- if (FLAG_debug_code) {
155
- __ mov(Operand(eax), Immediate(slots));
156
- Label loop;
157
- __ bind(&loop);
158
- __ push(Immediate(kSlotsZapValue));
159
- __ dec(eax);
160
- __ j(not_zero, &loop);
161
- } else {
162
- __ sub(Operand(esp), Immediate(slots * kPointerSize));
163
- #ifdef _MSC_VER
164
- // On windows, you may not access the stack more than one page below
165
- // the most recently mapped page. To make the allocated area randomly
166
- // accessible, we write to each page in turn (the value is irrelevant).
167
- const int kPageSize = 4 * KB;
168
- for (int offset = slots * kPointerSize - kPageSize;
169
- offset > 0;
170
- offset -= kPageSize) {
171
- __ mov(Operand(esp, offset), eax);
172
- }
173
- #endif
174
- }
175
- }
176
-
177
- // Possibly allocate a local context.
178
- int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
179
- if (heap_slots > 0) {
180
- Comment(";;; Allocate local context");
181
- // Argument to NewContext is the function, which is still in edi.
182
- __ push(edi);
183
- if (heap_slots <= FastNewContextStub::kMaximumSlots) {
184
- FastNewContextStub stub(heap_slots);
185
- __ CallStub(&stub);
186
- } else {
187
- __ CallRuntime(Runtime::kNewContext, 1);
188
- }
189
- RecordSafepoint(Safepoint::kNoDeoptimizationIndex);
190
- // Context is returned in both eax and esi. It replaces the context
191
- // passed to us. It's saved in the stack and kept live in esi.
192
- __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
193
-
194
- // Copy parameters into context if necessary.
195
- int num_parameters = scope()->num_parameters();
196
- for (int i = 0; i < num_parameters; i++) {
197
- Slot* slot = scope()->parameter(i)->AsSlot();
198
- if (slot != NULL && slot->type() == Slot::CONTEXT) {
199
- int parameter_offset = StandardFrameConstants::kCallerSPOffset +
200
- (num_parameters - 1 - i) * kPointerSize;
201
- // Load parameter from stack.
202
- __ mov(eax, Operand(ebp, parameter_offset));
203
- // Store it in the context.
204
- int context_offset = Context::SlotOffset(slot->index());
205
- __ mov(Operand(esi, context_offset), eax);
206
- // Update the write barrier. This clobbers all involved
207
- // registers, so we have to use a third register to avoid
208
- // clobbering esi.
209
- __ mov(ecx, esi);
210
- __ RecordWrite(ecx, context_offset, eax, ebx);
211
- }
212
- }
213
- Comment(";;; End allocate local context");
214
- }
215
-
216
- // Trace the call.
217
- if (FLAG_trace) {
218
- // We have not executed any compiled code yet, so esi still holds the
219
- // incoming context.
220
- __ CallRuntime(Runtime::kTraceEnter, 0);
221
- }
222
- return !is_aborted();
223
- }
224
-
225
-
226
- bool LCodeGen::GenerateBody() {
227
- ASSERT(is_generating());
228
- bool emit_instructions = true;
229
- for (current_instruction_ = 0;
230
- !is_aborted() && current_instruction_ < instructions_->length();
231
- current_instruction_++) {
232
- LInstruction* instr = instructions_->at(current_instruction_);
233
- if (instr->IsLabel()) {
234
- LLabel* label = LLabel::cast(instr);
235
- emit_instructions = !label->HasReplacement();
236
- }
237
-
238
- if (emit_instructions) {
239
- Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
240
- instr->CompileToNative(this);
241
- }
242
- }
243
- return !is_aborted();
244
- }
245
-
246
-
247
- LInstruction* LCodeGen::GetNextInstruction() {
248
- if (current_instruction_ < instructions_->length() - 1) {
249
- return instructions_->at(current_instruction_ + 1);
250
- } else {
251
- return NULL;
252
- }
253
- }
254
-
255
-
256
- bool LCodeGen::GenerateDeferredCode() {
257
- ASSERT(is_generating());
258
- for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
259
- LDeferredCode* code = deferred_[i];
260
- __ bind(code->entry());
261
- code->Generate();
262
- __ jmp(code->exit());
263
- }
264
-
265
- // Deferred code is the last part of the instruction sequence. Mark
266
- // the generated code as done unless we bailed out.
267
- if (!is_aborted()) status_ = DONE;
268
- return !is_aborted();
269
- }
270
-
271
-
272
- bool LCodeGen::GenerateSafepointTable() {
273
- ASSERT(is_done());
274
- safepoints_.Emit(masm(), StackSlotCount());
275
- return !is_aborted();
276
- }
277
-
278
-
279
- Register LCodeGen::ToRegister(int index) const {
280
- return Register::FromAllocationIndex(index);
281
- }
282
-
283
-
284
- XMMRegister LCodeGen::ToDoubleRegister(int index) const {
285
- return XMMRegister::FromAllocationIndex(index);
286
- }
287
-
288
-
289
- Register LCodeGen::ToRegister(LOperand* op) const {
290
- ASSERT(op->IsRegister());
291
- return ToRegister(op->index());
292
- }
293
-
294
-
295
- XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
296
- ASSERT(op->IsDoubleRegister());
297
- return ToDoubleRegister(op->index());
298
- }
299
-
300
-
301
- int LCodeGen::ToInteger32(LConstantOperand* op) const {
302
- Handle<Object> value = chunk_->LookupLiteral(op);
303
- ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
304
- ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
305
- value->Number());
306
- return static_cast<int32_t>(value->Number());
307
- }
308
-
309
-
310
- Immediate LCodeGen::ToImmediate(LOperand* op) {
311
- LConstantOperand* const_op = LConstantOperand::cast(op);
312
- Handle<Object> literal = chunk_->LookupLiteral(const_op);
313
- Representation r = chunk_->LookupLiteralRepresentation(const_op);
314
- if (r.IsInteger32()) {
315
- ASSERT(literal->IsNumber());
316
- return Immediate(static_cast<int32_t>(literal->Number()));
317
- } else if (r.IsDouble()) {
318
- Abort("unsupported double immediate");
319
- }
320
- ASSERT(r.IsTagged());
321
- return Immediate(literal);
322
- }
323
-
324
-
325
- Operand LCodeGen::ToOperand(LOperand* op) const {
326
- if (op->IsRegister()) return Operand(ToRegister(op));
327
- if (op->IsDoubleRegister()) return Operand(ToDoubleRegister(op));
328
- ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
329
- int index = op->index();
330
- if (index >= 0) {
331
- // Local or spill slot. Skip the frame pointer, function, and
332
- // context in the fixed part of the frame.
333
- return Operand(ebp, -(index + 3) * kPointerSize);
334
- } else {
335
- // Incoming parameter. Skip the return address.
336
- return Operand(ebp, -(index - 1) * kPointerSize);
337
- }
338
- }
339
-
340
-
341
- Operand LCodeGen::HighOperand(LOperand* op) {
342
- ASSERT(op->IsDoubleStackSlot());
343
- int index = op->index();
344
- int offset = (index >= 0) ? index + 3 : index - 1;
345
- return Operand(ebp, -offset * kPointerSize);
346
- }
347
-
348
-
349
- void LCodeGen::WriteTranslation(LEnvironment* environment,
350
- Translation* translation) {
351
- if (environment == NULL) return;
352
-
353
- // The translation includes one command per value in the environment.
354
- int translation_size = environment->values()->length();
355
- // The output frame height does not include the parameters.
356
- int height = translation_size - environment->parameter_count();
357
-
358
- WriteTranslation(environment->outer(), translation);
359
- int closure_id = DefineDeoptimizationLiteral(environment->closure());
360
- translation->BeginFrame(environment->ast_id(), closure_id, height);
361
- for (int i = 0; i < translation_size; ++i) {
362
- LOperand* value = environment->values()->at(i);
363
- // spilled_registers_ and spilled_double_registers_ are either
364
- // both NULL or both set.
365
- if (environment->spilled_registers() != NULL && value != NULL) {
366
- if (value->IsRegister() &&
367
- environment->spilled_registers()[value->index()] != NULL) {
368
- translation->MarkDuplicate();
369
- AddToTranslation(translation,
370
- environment->spilled_registers()[value->index()],
371
- environment->HasTaggedValueAt(i));
372
- } else if (
373
- value->IsDoubleRegister() &&
374
- environment->spilled_double_registers()[value->index()] != NULL) {
375
- translation->MarkDuplicate();
376
- AddToTranslation(
377
- translation,
378
- environment->spilled_double_registers()[value->index()],
379
- false);
380
- }
381
- }
382
-
383
- AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
384
- }
385
- }
386
-
387
-
388
- void LCodeGen::EnsureRelocSpaceForDeoptimization() {
389
- // Since we patch the reloc info with RUNTIME_ENTRY calls every patch
390
- // site will take up 2 bytes + any pc-jumps.
391
- // We are conservative and always reserver 6 bytes in case where a
392
- // simple pc-jump is not enough.
393
- uint32_t pc_delta =
394
- masm()->pc_offset() - deoptimization_reloc_size.last_pc_offset;
395
- if (is_uintn(pc_delta, 6)) {
396
- deoptimization_reloc_size.min_size += 2;
397
- } else {
398
- deoptimization_reloc_size.min_size += 6;
399
- }
400
- deoptimization_reloc_size.last_pc_offset = masm()->pc_offset();
401
- }
402
-
403
-
404
- void LCodeGen::AddToTranslation(Translation* translation,
405
- LOperand* op,
406
- bool is_tagged) {
407
- if (op == NULL) {
408
- // TODO(twuerthinger): Introduce marker operands to indicate that this value
409
- // is not present and must be reconstructed from the deoptimizer. Currently
410
- // this is only used for the arguments object.
411
- translation->StoreArgumentsObject();
412
- } else if (op->IsStackSlot()) {
413
- if (is_tagged) {
414
- translation->StoreStackSlot(op->index());
415
- } else {
416
- translation->StoreInt32StackSlot(op->index());
417
- }
418
- } else if (op->IsDoubleStackSlot()) {
419
- translation->StoreDoubleStackSlot(op->index());
420
- } else if (op->IsArgument()) {
421
- ASSERT(is_tagged);
422
- int src_index = StackSlotCount() + op->index();
423
- translation->StoreStackSlot(src_index);
424
- } else if (op->IsRegister()) {
425
- Register reg = ToRegister(op);
426
- if (is_tagged) {
427
- translation->StoreRegister(reg);
428
- } else {
429
- translation->StoreInt32Register(reg);
430
- }
431
- } else if (op->IsDoubleRegister()) {
432
- XMMRegister reg = ToDoubleRegister(op);
433
- translation->StoreDoubleRegister(reg);
434
- } else if (op->IsConstantOperand()) {
435
- Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
436
- int src_index = DefineDeoptimizationLiteral(literal);
437
- translation->StoreLiteral(src_index);
438
- } else {
439
- UNREACHABLE();
440
- }
441
- }
442
-
443
-
444
- void LCodeGen::CallCode(Handle<Code> code,
445
- RelocInfo::Mode mode,
446
- LInstruction* instr,
447
- bool adjusted) {
448
- ASSERT(instr != NULL);
449
- LPointerMap* pointers = instr->pointer_map();
450
- RecordPosition(pointers->position());
451
-
452
- if (!adjusted) {
453
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
454
- }
455
- __ call(code, mode);
456
-
457
- EnsureRelocSpaceForDeoptimization();
458
- RegisterLazyDeoptimization(instr);
459
-
460
- // Signal that we don't inline smi code before these stubs in the
461
- // optimizing code generator.
462
- if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC ||
463
- code->kind() == Code::COMPARE_IC) {
464
- __ nop();
465
- }
466
- }
467
-
468
-
469
- void LCodeGen::CallRuntime(Runtime::Function* fun,
470
- int argc,
471
- LInstruction* instr,
472
- bool adjusted) {
473
- ASSERT(instr != NULL);
474
- ASSERT(instr->HasPointerMap());
475
- LPointerMap* pointers = instr->pointer_map();
476
- RecordPosition(pointers->position());
477
-
478
- if (!adjusted) {
479
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
480
- }
481
- __ CallRuntime(fun, argc);
482
- RegisterLazyDeoptimization(instr);
483
- }
484
-
485
-
486
- void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) {
487
- // Create the environment to bailout to. If the call has side effects
488
- // execution has to continue after the call otherwise execution can continue
489
- // from a previous bailout point repeating the call.
490
- LEnvironment* deoptimization_environment;
491
- if (instr->HasDeoptimizationEnvironment()) {
492
- deoptimization_environment = instr->deoptimization_environment();
493
- } else {
494
- deoptimization_environment = instr->environment();
495
- }
496
-
497
- RegisterEnvironmentForDeoptimization(deoptimization_environment);
498
- RecordSafepoint(instr->pointer_map(),
499
- deoptimization_environment->deoptimization_index());
500
- }
501
-
502
-
503
- void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
504
- if (!environment->HasBeenRegistered()) {
505
- // Physical stack frame layout:
506
- // -x ............. -4 0 ..................................... y
507
- // [incoming arguments] [spill slots] [pushed outgoing arguments]
508
-
509
- // Layout of the environment:
510
- // 0 ..................................................... size-1
511
- // [parameters] [locals] [expression stack including arguments]
512
-
513
- // Layout of the translation:
514
- // 0 ........................................................ size - 1 + 4
515
- // [expression stack including arguments] [locals] [4 words] [parameters]
516
- // |>------------ translation_size ------------<|
517
-
518
- int frame_count = 0;
519
- for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
520
- ++frame_count;
521
- }
522
- Translation translation(&translations_, frame_count);
523
- WriteTranslation(environment, &translation);
524
- int deoptimization_index = deoptimizations_.length();
525
- environment->Register(deoptimization_index, translation.index());
526
- deoptimizations_.Add(environment);
527
- }
528
- }
529
-
530
-
531
- void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
532
- RegisterEnvironmentForDeoptimization(environment);
533
- ASSERT(environment->HasBeenRegistered());
534
- int id = environment->deoptimization_index();
535
- Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
536
- ASSERT(entry != NULL);
537
- if (entry == NULL) {
538
- Abort("bailout was not prepared");
539
- return;
540
- }
541
-
542
- if (FLAG_deopt_every_n_times != 0) {
543
- Handle<SharedFunctionInfo> shared(info_->shared_info());
544
- Label no_deopt;
545
- __ pushfd();
546
- __ push(eax);
547
- __ push(ebx);
548
- __ mov(ebx, shared);
549
- __ mov(eax, FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset));
550
- __ sub(Operand(eax), Immediate(Smi::FromInt(1)));
551
- __ j(not_zero, &no_deopt);
552
- if (FLAG_trap_on_deopt) __ int3();
553
- __ mov(eax, Immediate(Smi::FromInt(FLAG_deopt_every_n_times)));
554
- __ mov(FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset), eax);
555
- __ pop(ebx);
556
- __ pop(eax);
557
- __ popfd();
558
- __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
559
-
560
- __ bind(&no_deopt);
561
- __ mov(FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset), eax);
562
- __ pop(ebx);
563
- __ pop(eax);
564
- __ popfd();
565
- }
566
-
567
- if (cc == no_condition) {
568
- if (FLAG_trap_on_deopt) __ int3();
569
- __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
570
- } else {
571
- if (FLAG_trap_on_deopt) {
572
- NearLabel done;
573
- __ j(NegateCondition(cc), &done);
574
- __ int3();
575
- __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
576
- __ bind(&done);
577
- } else {
578
- __ j(cc, entry, RelocInfo::RUNTIME_ENTRY, not_taken);
579
- }
580
- }
581
- }
582
-
583
-
584
- void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
585
- int length = deoptimizations_.length();
586
- if (length == 0) return;
587
- ASSERT(FLAG_deopt);
588
- Handle<DeoptimizationInputData> data =
589
- Factory::NewDeoptimizationInputData(length, TENURED);
590
-
591
- Handle<ByteArray> translations = translations_.CreateByteArray();
592
- data->SetTranslationByteArray(*translations);
593
- data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
594
-
595
- Handle<FixedArray> literals =
596
- Factory::NewFixedArray(deoptimization_literals_.length(), TENURED);
597
- for (int i = 0; i < deoptimization_literals_.length(); i++) {
598
- literals->set(i, *deoptimization_literals_[i]);
599
- }
600
- data->SetLiteralArray(*literals);
601
-
602
- data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
603
- data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
604
-
605
- // Populate the deoptimization entries.
606
- for (int i = 0; i < length; i++) {
607
- LEnvironment* env = deoptimizations_[i];
608
- data->SetAstId(i, Smi::FromInt(env->ast_id()));
609
- data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
610
- data->SetArgumentsStackHeight(i,
611
- Smi::FromInt(env->arguments_stack_height()));
612
- }
613
- code->set_deoptimization_data(*data);
614
- }
615
-
616
-
617
- int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
618
- int result = deoptimization_literals_.length();
619
- for (int i = 0; i < deoptimization_literals_.length(); ++i) {
620
- if (deoptimization_literals_[i].is_identical_to(literal)) return i;
621
- }
622
- deoptimization_literals_.Add(literal);
623
- return result;
624
- }
625
-
626
-
627
- void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
628
- ASSERT(deoptimization_literals_.length() == 0);
629
-
630
- const ZoneList<Handle<JSFunction> >* inlined_closures =
631
- chunk()->inlined_closures();
632
-
633
- for (int i = 0, length = inlined_closures->length();
634
- i < length;
635
- i++) {
636
- DefineDeoptimizationLiteral(inlined_closures->at(i));
637
- }
638
-
639
- inlined_function_count_ = deoptimization_literals_.length();
640
- }
641
-
642
-
643
- void LCodeGen::RecordSafepoint(
644
- LPointerMap* pointers,
645
- Safepoint::Kind kind,
646
- int arguments,
647
- int deoptimization_index) {
648
- const ZoneList<LOperand*>* operands = pointers->operands();
649
- Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
650
- kind, arguments, deoptimization_index);
651
- for (int i = 0; i < operands->length(); i++) {
652
- LOperand* pointer = operands->at(i);
653
- if (pointer->IsStackSlot()) {
654
- safepoint.DefinePointerSlot(pointer->index());
655
- } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
656
- safepoint.DefinePointerRegister(ToRegister(pointer));
657
- }
658
- }
659
- }
660
-
661
-
662
- void LCodeGen::RecordSafepoint(LPointerMap* pointers,
663
- int deoptimization_index) {
664
- RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index);
665
- }
666
-
667
-
668
- void LCodeGen::RecordSafepoint(int deoptimization_index) {
669
- LPointerMap empty_pointers(RelocInfo::kNoPosition);
670
- RecordSafepoint(&empty_pointers, deoptimization_index);
671
- }
672
-
673
-
674
- void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
675
- int arguments,
676
- int deoptimization_index) {
677
- RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments,
678
- deoptimization_index);
679
- }
680
-
681
-
682
- void LCodeGen::RecordPosition(int position) {
683
- if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return;
684
- masm()->positions_recorder()->RecordPosition(position);
685
- }
686
-
687
-
688
- void LCodeGen::DoLabel(LLabel* label) {
689
- if (label->is_loop_header()) {
690
- Comment(";;; B%d - LOOP entry", label->block_id());
691
- } else {
692
- Comment(";;; B%d", label->block_id());
693
- }
694
- __ bind(label->label());
695
- current_block_ = label->block_id();
696
- LCodeGen::DoGap(label);
697
- }
698
-
699
-
700
- void LCodeGen::DoParallelMove(LParallelMove* move) {
701
- resolver_.Resolve(move);
702
- }
703
-
704
-
705
- void LCodeGen::DoGap(LGap* gap) {
706
- for (int i = LGap::FIRST_INNER_POSITION;
707
- i <= LGap::LAST_INNER_POSITION;
708
- i++) {
709
- LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
710
- LParallelMove* move = gap->GetParallelMove(inner_pos);
711
- if (move != NULL) DoParallelMove(move);
712
- }
713
-
714
- LInstruction* next = GetNextInstruction();
715
- if (next != NULL && next->IsLazyBailout()) {
716
- int pc = masm()->pc_offset();
717
- safepoints_.SetPcAfterGap(pc);
718
- }
719
- }
720
-
721
-
722
- void LCodeGen::DoParameter(LParameter* instr) {
723
- // Nothing to do.
724
- }
725
-
726
-
727
- void LCodeGen::DoCallStub(LCallStub* instr) {
728
- ASSERT(ToRegister(instr->context()).is(esi));
729
- ASSERT(ToRegister(instr->result()).is(eax));
730
- switch (instr->hydrogen()->major_key()) {
731
- case CodeStub::RegExpConstructResult: {
732
- RegExpConstructResultStub stub;
733
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
734
- break;
735
- }
736
- case CodeStub::RegExpExec: {
737
- RegExpExecStub stub;
738
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
739
- break;
740
- }
741
- case CodeStub::SubString: {
742
- SubStringStub stub;
743
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
744
- break;
745
- }
746
- case CodeStub::StringCharAt: {
747
- StringCharAtStub stub;
748
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
749
- break;
750
- }
751
- case CodeStub::MathPow: {
752
- MathPowStub stub;
753
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
754
- break;
755
- }
756
- case CodeStub::NumberToString: {
757
- NumberToStringStub stub;
758
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
759
- break;
760
- }
761
- case CodeStub::StringAdd: {
762
- StringAddStub stub(NO_STRING_ADD_FLAGS);
763
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
764
- break;
765
- }
766
- case CodeStub::StringCompare: {
767
- StringCompareStub stub;
768
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
769
- break;
770
- }
771
- case CodeStub::TranscendentalCache: {
772
- TranscendentalCacheStub stub(instr->transcendental_type(),
773
- TranscendentalCacheStub::TAGGED);
774
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
775
- break;
776
- }
777
- default:
778
- UNREACHABLE();
779
- }
780
- }
781
-
782
-
783
- void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
784
- // Nothing to do.
785
- }
786
-
787
-
788
- void LCodeGen::DoModI(LModI* instr) {
789
- LOperand* right = instr->InputAt(1);
790
- ASSERT(ToRegister(instr->result()).is(edx));
791
- ASSERT(ToRegister(instr->InputAt(0)).is(eax));
792
- ASSERT(!ToRegister(instr->InputAt(1)).is(eax));
793
- ASSERT(!ToRegister(instr->InputAt(1)).is(edx));
794
-
795
- Register right_reg = ToRegister(right);
796
-
797
- // Check for x % 0.
798
- if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
799
- __ test(right_reg, ToOperand(right));
800
- DeoptimizeIf(zero, instr->environment());
801
- }
802
-
803
- // Sign extend to edx.
804
- __ cdq();
805
-
806
- // Check for (0 % -x) that will produce negative zero.
807
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
808
- NearLabel positive_left;
809
- NearLabel done;
810
- __ test(eax, Operand(eax));
811
- __ j(not_sign, &positive_left);
812
- __ idiv(right_reg);
813
-
814
- // Test the remainder for 0, because then the result would be -0.
815
- __ test(edx, Operand(edx));
816
- __ j(not_zero, &done);
817
-
818
- DeoptimizeIf(no_condition, instr->environment());
819
- __ bind(&positive_left);
820
- __ idiv(right_reg);
821
- __ bind(&done);
822
- } else {
823
- __ idiv(right_reg);
824
- }
825
- }
826
-
827
-
828
- void LCodeGen::DoDivI(LDivI* instr) {
829
- LOperand* right = instr->InputAt(1);
830
- ASSERT(ToRegister(instr->result()).is(eax));
831
- ASSERT(ToRegister(instr->InputAt(0)).is(eax));
832
- ASSERT(!ToRegister(instr->InputAt(1)).is(eax));
833
- ASSERT(!ToRegister(instr->InputAt(1)).is(edx));
834
-
835
- Register left_reg = eax;
836
-
837
- // Check for x / 0.
838
- Register right_reg = ToRegister(right);
839
- if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
840
- __ test(right_reg, ToOperand(right));
841
- DeoptimizeIf(zero, instr->environment());
842
- }
843
-
844
- // Check for (0 / -x) that will produce negative zero.
845
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
846
- NearLabel left_not_zero;
847
- __ test(left_reg, Operand(left_reg));
848
- __ j(not_zero, &left_not_zero);
849
- __ test(right_reg, ToOperand(right));
850
- DeoptimizeIf(sign, instr->environment());
851
- __ bind(&left_not_zero);
852
- }
853
-
854
- // Check for (-kMinInt / -1).
855
- if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
856
- NearLabel left_not_min_int;
857
- __ cmp(left_reg, kMinInt);
858
- __ j(not_zero, &left_not_min_int);
859
- __ cmp(right_reg, -1);
860
- DeoptimizeIf(zero, instr->environment());
861
- __ bind(&left_not_min_int);
862
- }
863
-
864
- // Sign extend to edx.
865
- __ cdq();
866
- __ idiv(right_reg);
867
-
868
- // Deoptimize if remainder is not 0.
869
- __ test(edx, Operand(edx));
870
- DeoptimizeIf(not_zero, instr->environment());
871
- }
872
-
873
-
874
- void LCodeGen::DoMulI(LMulI* instr) {
875
- Register left = ToRegister(instr->InputAt(0));
876
- LOperand* right = instr->InputAt(1);
877
-
878
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
879
- __ mov(ToRegister(instr->TempAt(0)), left);
880
- }
881
-
882
- if (right->IsConstantOperand()) {
883
- __ imul(left, left, ToInteger32(LConstantOperand::cast(right)));
884
- } else {
885
- __ imul(left, ToOperand(right));
886
- }
887
-
888
- if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
889
- DeoptimizeIf(overflow, instr->environment());
890
- }
891
-
892
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
893
- // Bail out if the result is supposed to be negative zero.
894
- NearLabel done;
895
- __ test(left, Operand(left));
896
- __ j(not_zero, &done);
897
- if (right->IsConstantOperand()) {
898
- if (ToInteger32(LConstantOperand::cast(right)) <= 0) {
899
- DeoptimizeIf(no_condition, instr->environment());
900
- }
901
- } else {
902
- // Test the non-zero operand for negative sign.
903
- __ or_(ToRegister(instr->TempAt(0)), ToOperand(right));
904
- DeoptimizeIf(sign, instr->environment());
905
- }
906
- __ bind(&done);
907
- }
908
- }
909
-
910
-
911
- void LCodeGen::DoBitI(LBitI* instr) {
912
- LOperand* left = instr->InputAt(0);
913
- LOperand* right = instr->InputAt(1);
914
- ASSERT(left->Equals(instr->result()));
915
- ASSERT(left->IsRegister());
916
-
917
- if (right->IsConstantOperand()) {
918
- int right_operand = ToInteger32(LConstantOperand::cast(right));
919
- switch (instr->op()) {
920
- case Token::BIT_AND:
921
- __ and_(ToRegister(left), right_operand);
922
- break;
923
- case Token::BIT_OR:
924
- __ or_(ToRegister(left), right_operand);
925
- break;
926
- case Token::BIT_XOR:
927
- __ xor_(ToRegister(left), right_operand);
928
- break;
929
- default:
930
- UNREACHABLE();
931
- break;
932
- }
933
- } else {
934
- switch (instr->op()) {
935
- case Token::BIT_AND:
936
- __ and_(ToRegister(left), ToOperand(right));
937
- break;
938
- case Token::BIT_OR:
939
- __ or_(ToRegister(left), ToOperand(right));
940
- break;
941
- case Token::BIT_XOR:
942
- __ xor_(ToRegister(left), ToOperand(right));
943
- break;
944
- default:
945
- UNREACHABLE();
946
- break;
947
- }
948
- }
949
- }
950
-
951
-
952
- void LCodeGen::DoShiftI(LShiftI* instr) {
953
- LOperand* left = instr->InputAt(0);
954
- LOperand* right = instr->InputAt(1);
955
- ASSERT(left->Equals(instr->result()));
956
- ASSERT(left->IsRegister());
957
- if (right->IsRegister()) {
958
- ASSERT(ToRegister(right).is(ecx));
959
-
960
- switch (instr->op()) {
961
- case Token::SAR:
962
- __ sar_cl(ToRegister(left));
963
- break;
964
- case Token::SHR:
965
- __ shr_cl(ToRegister(left));
966
- if (instr->can_deopt()) {
967
- __ test(ToRegister(left), Immediate(0x80000000));
968
- DeoptimizeIf(not_zero, instr->environment());
969
- }
970
- break;
971
- case Token::SHL:
972
- __ shl_cl(ToRegister(left));
973
- break;
974
- default:
975
- UNREACHABLE();
976
- break;
977
- }
978
- } else {
979
- int value = ToInteger32(LConstantOperand::cast(right));
980
- uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
981
- switch (instr->op()) {
982
- case Token::SAR:
983
- if (shift_count != 0) {
984
- __ sar(ToRegister(left), shift_count);
985
- }
986
- break;
987
- case Token::SHR:
988
- if (shift_count == 0 && instr->can_deopt()) {
989
- __ test(ToRegister(left), Immediate(0x80000000));
990
- DeoptimizeIf(not_zero, instr->environment());
991
- } else {
992
- __ shr(ToRegister(left), shift_count);
993
- }
994
- break;
995
- case Token::SHL:
996
- if (shift_count != 0) {
997
- __ shl(ToRegister(left), shift_count);
998
- }
999
- break;
1000
- default:
1001
- UNREACHABLE();
1002
- break;
1003
- }
1004
- }
1005
- }
1006
-
1007
-
1008
- void LCodeGen::DoSubI(LSubI* instr) {
1009
- LOperand* left = instr->InputAt(0);
1010
- LOperand* right = instr->InputAt(1);
1011
- ASSERT(left->Equals(instr->result()));
1012
-
1013
- if (right->IsConstantOperand()) {
1014
- __ sub(ToOperand(left), ToImmediate(right));
1015
- } else {
1016
- __ sub(ToRegister(left), ToOperand(right));
1017
- }
1018
- if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1019
- DeoptimizeIf(overflow, instr->environment());
1020
- }
1021
- }
1022
-
1023
-
1024
- void LCodeGen::DoConstantI(LConstantI* instr) {
1025
- ASSERT(instr->result()->IsRegister());
1026
- __ Set(ToRegister(instr->result()), Immediate(instr->value()));
1027
- }
1028
-
1029
-
1030
- void LCodeGen::DoConstantD(LConstantD* instr) {
1031
- ASSERT(instr->result()->IsDoubleRegister());
1032
- XMMRegister res = ToDoubleRegister(instr->result());
1033
- double v = instr->value();
1034
- // Use xor to produce +0.0 in a fast and compact way, but avoid to
1035
- // do so if the constant is -0.0.
1036
- if (BitCast<uint64_t, double>(v) == 0) {
1037
- __ xorpd(res, res);
1038
- } else {
1039
- Register temp = ToRegister(instr->TempAt(0));
1040
- uint64_t int_val = BitCast<uint64_t, double>(v);
1041
- int32_t lower = static_cast<int32_t>(int_val);
1042
- int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt));
1043
- if (CpuFeatures::IsSupported(SSE4_1)) {
1044
- CpuFeatures::Scope scope(SSE4_1);
1045
- if (lower != 0) {
1046
- __ Set(temp, Immediate(lower));
1047
- __ movd(res, Operand(temp));
1048
- __ Set(temp, Immediate(upper));
1049
- __ pinsrd(res, Operand(temp), 1);
1050
- } else {
1051
- __ xorpd(res, res);
1052
- __ Set(temp, Immediate(upper));
1053
- __ pinsrd(res, Operand(temp), 1);
1054
- }
1055
- } else {
1056
- __ Set(temp, Immediate(upper));
1057
- __ movd(res, Operand(temp));
1058
- __ psllq(res, 32);
1059
- if (lower != 0) {
1060
- __ Set(temp, Immediate(lower));
1061
- __ movd(xmm0, Operand(temp));
1062
- __ por(res, xmm0);
1063
- }
1064
- }
1065
- }
1066
- }
1067
-
1068
-
1069
- void LCodeGen::DoConstantT(LConstantT* instr) {
1070
- ASSERT(instr->result()->IsRegister());
1071
- __ Set(ToRegister(instr->result()), Immediate(instr->value()));
1072
- }
1073
-
1074
-
1075
- void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
1076
- Register result = ToRegister(instr->result());
1077
- Register array = ToRegister(instr->InputAt(0));
1078
- __ mov(result, FieldOperand(array, JSArray::kLengthOffset));
1079
- }
1080
-
1081
-
1082
- void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
1083
- Register result = ToRegister(instr->result());
1084
- Register array = ToRegister(instr->InputAt(0));
1085
- __ mov(result, FieldOperand(array, FixedArray::kLengthOffset));
1086
- }
1087
-
1088
-
1089
- void LCodeGen::DoPixelArrayLength(LPixelArrayLength* instr) {
1090
- Register result = ToRegister(instr->result());
1091
- Register array = ToRegister(instr->InputAt(0));
1092
- __ mov(result, FieldOperand(array, PixelArray::kLengthOffset));
1093
- }
1094
-
1095
-
1096
- void LCodeGen::DoValueOf(LValueOf* instr) {
1097
- Register input = ToRegister(instr->InputAt(0));
1098
- Register result = ToRegister(instr->result());
1099
- Register map = ToRegister(instr->TempAt(0));
1100
- ASSERT(input.is(result));
1101
- NearLabel done;
1102
- // If the object is a smi return the object.
1103
- __ test(input, Immediate(kSmiTagMask));
1104
- __ j(zero, &done);
1105
-
1106
- // If the object is not a value type, return the object.
1107
- __ CmpObjectType(input, JS_VALUE_TYPE, map);
1108
- __ j(not_equal, &done);
1109
- __ mov(result, FieldOperand(input, JSValue::kValueOffset));
1110
-
1111
- __ bind(&done);
1112
- }
1113
-
1114
-
1115
- void LCodeGen::DoBitNotI(LBitNotI* instr) {
1116
- LOperand* input = instr->InputAt(0);
1117
- ASSERT(input->Equals(instr->result()));
1118
- __ not_(ToRegister(input));
1119
- }
1120
-
1121
-
1122
- void LCodeGen::DoThrow(LThrow* instr) {
1123
- __ push(ToOperand(instr->InputAt(0)));
1124
- CallRuntime(Runtime::kThrow, 1, instr, false);
1125
-
1126
- if (FLAG_debug_code) {
1127
- Comment("Unreachable code.");
1128
- __ int3();
1129
- }
1130
- }
1131
-
1132
-
1133
- void LCodeGen::DoAddI(LAddI* instr) {
1134
- LOperand* left = instr->InputAt(0);
1135
- LOperand* right = instr->InputAt(1);
1136
- ASSERT(left->Equals(instr->result()));
1137
-
1138
- if (right->IsConstantOperand()) {
1139
- __ add(ToOperand(left), ToImmediate(right));
1140
- } else {
1141
- __ add(ToRegister(left), ToOperand(right));
1142
- }
1143
-
1144
- if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1145
- DeoptimizeIf(overflow, instr->environment());
1146
- }
1147
- }
1148
-
1149
-
1150
- void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
1151
- LOperand* left = instr->InputAt(0);
1152
- LOperand* right = instr->InputAt(1);
1153
- // Modulo uses a fixed result register.
1154
- ASSERT(instr->op() == Token::MOD || left->Equals(instr->result()));
1155
- switch (instr->op()) {
1156
- case Token::ADD:
1157
- __ addsd(ToDoubleRegister(left), ToDoubleRegister(right));
1158
- break;
1159
- case Token::SUB:
1160
- __ subsd(ToDoubleRegister(left), ToDoubleRegister(right));
1161
- break;
1162
- case Token::MUL:
1163
- __ mulsd(ToDoubleRegister(left), ToDoubleRegister(right));
1164
- break;
1165
- case Token::DIV:
1166
- __ divsd(ToDoubleRegister(left), ToDoubleRegister(right));
1167
- break;
1168
- case Token::MOD: {
1169
- // Pass two doubles as arguments on the stack.
1170
- __ PrepareCallCFunction(4, eax);
1171
- __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
1172
- __ movdbl(Operand(esp, 1 * kDoubleSize), ToDoubleRegister(right));
1173
- __ CallCFunction(ExternalReference::double_fp_operation(Token::MOD), 4);
1174
-
1175
- // Return value is in st(0) on ia32.
1176
- // Store it into the (fixed) result register.
1177
- __ sub(Operand(esp), Immediate(kDoubleSize));
1178
- __ fstp_d(Operand(esp, 0));
1179
- __ movdbl(ToDoubleRegister(instr->result()), Operand(esp, 0));
1180
- __ add(Operand(esp), Immediate(kDoubleSize));
1181
- break;
1182
- }
1183
- default:
1184
- UNREACHABLE();
1185
- break;
1186
- }
1187
- }
1188
-
1189
-
1190
- void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
1191
- ASSERT(ToRegister(instr->InputAt(0)).is(edx));
1192
- ASSERT(ToRegister(instr->InputAt(1)).is(eax));
1193
- ASSERT(ToRegister(instr->result()).is(eax));
1194
-
1195
- TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE);
1196
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
1197
- }
1198
-
1199
-
1200
- int LCodeGen::GetNextEmittedBlock(int block) {
1201
- for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1202
- LLabel* label = chunk_->GetLabel(i);
1203
- if (!label->HasReplacement()) return i;
1204
- }
1205
- return -1;
1206
- }
1207
-
1208
-
1209
- void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
1210
- int next_block = GetNextEmittedBlock(current_block_);
1211
- right_block = chunk_->LookupDestination(right_block);
1212
- left_block = chunk_->LookupDestination(left_block);
1213
-
1214
- if (right_block == left_block) {
1215
- EmitGoto(left_block);
1216
- } else if (left_block == next_block) {
1217
- __ j(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
1218
- } else if (right_block == next_block) {
1219
- __ j(cc, chunk_->GetAssemblyLabel(left_block));
1220
- } else {
1221
- __ j(cc, chunk_->GetAssemblyLabel(left_block));
1222
- __ jmp(chunk_->GetAssemblyLabel(right_block));
1223
- }
1224
- }
1225
-
1226
-
1227
- void LCodeGen::DoBranch(LBranch* instr) {
1228
- int true_block = chunk_->LookupDestination(instr->true_block_id());
1229
- int false_block = chunk_->LookupDestination(instr->false_block_id());
1230
-
1231
- Representation r = instr->hydrogen()->representation();
1232
- if (r.IsInteger32()) {
1233
- Register reg = ToRegister(instr->InputAt(0));
1234
- __ test(reg, Operand(reg));
1235
- EmitBranch(true_block, false_block, not_zero);
1236
- } else if (r.IsDouble()) {
1237
- XMMRegister reg = ToDoubleRegister(instr->InputAt(0));
1238
- __ xorpd(xmm0, xmm0);
1239
- __ ucomisd(reg, xmm0);
1240
- EmitBranch(true_block, false_block, not_equal);
1241
- } else {
1242
- ASSERT(r.IsTagged());
1243
- Register reg = ToRegister(instr->InputAt(0));
1244
- if (instr->hydrogen()->type().IsBoolean()) {
1245
- __ cmp(reg, Factory::true_value());
1246
- EmitBranch(true_block, false_block, equal);
1247
- } else {
1248
- Label* true_label = chunk_->GetAssemblyLabel(true_block);
1249
- Label* false_label = chunk_->GetAssemblyLabel(false_block);
1250
-
1251
- __ cmp(reg, Factory::undefined_value());
1252
- __ j(equal, false_label);
1253
- __ cmp(reg, Factory::true_value());
1254
- __ j(equal, true_label);
1255
- __ cmp(reg, Factory::false_value());
1256
- __ j(equal, false_label);
1257
- __ test(reg, Operand(reg));
1258
- __ j(equal, false_label);
1259
- __ test(reg, Immediate(kSmiTagMask));
1260
- __ j(zero, true_label);
1261
-
1262
- // Test for double values. Zero is false.
1263
- NearLabel call_stub;
1264
- __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
1265
- Factory::heap_number_map());
1266
- __ j(not_equal, &call_stub);
1267
- __ fldz();
1268
- __ fld_d(FieldOperand(reg, HeapNumber::kValueOffset));
1269
- __ FCmp();
1270
- __ j(zero, false_label);
1271
- __ jmp(true_label);
1272
-
1273
- // The conversion stub doesn't cause garbage collections so it's
1274
- // safe to not record a safepoint after the call.
1275
- __ bind(&call_stub);
1276
- ToBooleanStub stub;
1277
- __ pushad();
1278
- __ push(reg);
1279
- __ CallStub(&stub);
1280
- __ test(eax, Operand(eax));
1281
- __ popad();
1282
- EmitBranch(true_block, false_block, not_zero);
1283
- }
1284
- }
1285
- }
1286
-
1287
-
1288
- void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
1289
- block = chunk_->LookupDestination(block);
1290
- int next_block = GetNextEmittedBlock(current_block_);
1291
- if (block != next_block) {
1292
- // Perform stack overflow check if this goto needs it before jumping.
1293
- if (deferred_stack_check != NULL) {
1294
- ExternalReference stack_limit =
1295
- ExternalReference::address_of_stack_limit();
1296
- __ cmp(esp, Operand::StaticVariable(stack_limit));
1297
- __ j(above_equal, chunk_->GetAssemblyLabel(block));
1298
- __ jmp(deferred_stack_check->entry());
1299
- deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block));
1300
- } else {
1301
- __ jmp(chunk_->GetAssemblyLabel(block));
1302
- }
1303
- }
1304
- }
1305
-
1306
-
1307
- void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
1308
- __ pushad();
1309
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
1310
- __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
1311
- RecordSafepointWithRegisters(
1312
- instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
1313
- __ popad();
1314
- }
1315
-
1316
- void LCodeGen::DoGoto(LGoto* instr) {
1317
- class DeferredStackCheck: public LDeferredCode {
1318
- public:
1319
- DeferredStackCheck(LCodeGen* codegen, LGoto* instr)
1320
- : LDeferredCode(codegen), instr_(instr) { }
1321
- virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
1322
- private:
1323
- LGoto* instr_;
1324
- };
1325
-
1326
- DeferredStackCheck* deferred = NULL;
1327
- if (instr->include_stack_check()) {
1328
- deferred = new DeferredStackCheck(this, instr);
1329
- }
1330
- EmitGoto(instr->block_id(), deferred);
1331
- }
1332
-
1333
-
1334
- Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
1335
- Condition cond = no_condition;
1336
- switch (op) {
1337
- case Token::EQ:
1338
- case Token::EQ_STRICT:
1339
- cond = equal;
1340
- break;
1341
- case Token::LT:
1342
- cond = is_unsigned ? below : less;
1343
- break;
1344
- case Token::GT:
1345
- cond = is_unsigned ? above : greater;
1346
- break;
1347
- case Token::LTE:
1348
- cond = is_unsigned ? below_equal : less_equal;
1349
- break;
1350
- case Token::GTE:
1351
- cond = is_unsigned ? above_equal : greater_equal;
1352
- break;
1353
- case Token::IN:
1354
- case Token::INSTANCEOF:
1355
- default:
1356
- UNREACHABLE();
1357
- }
1358
- return cond;
1359
- }
1360
-
1361
-
1362
- void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
1363
- if (right->IsConstantOperand()) {
1364
- __ cmp(ToOperand(left), ToImmediate(right));
1365
- } else {
1366
- __ cmp(ToRegister(left), ToOperand(right));
1367
- }
1368
- }
1369
-
1370
-
1371
- void LCodeGen::DoCmpID(LCmpID* instr) {
1372
- LOperand* left = instr->InputAt(0);
1373
- LOperand* right = instr->InputAt(1);
1374
- LOperand* result = instr->result();
1375
-
1376
- NearLabel unordered;
1377
- if (instr->is_double()) {
1378
- // Don't base result on EFLAGS when a NaN is involved. Instead
1379
- // jump to the unordered case, which produces a false value.
1380
- __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
1381
- __ j(parity_even, &unordered, not_taken);
1382
- } else {
1383
- EmitCmpI(left, right);
1384
- }
1385
-
1386
- NearLabel done;
1387
- Condition cc = TokenToCondition(instr->op(), instr->is_double());
1388
- __ mov(ToRegister(result), Factory::true_value());
1389
- __ j(cc, &done);
1390
-
1391
- __ bind(&unordered);
1392
- __ mov(ToRegister(result), Factory::false_value());
1393
- __ bind(&done);
1394
- }
1395
-
1396
-
1397
- void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
1398
- LOperand* left = instr->InputAt(0);
1399
- LOperand* right = instr->InputAt(1);
1400
- int false_block = chunk_->LookupDestination(instr->false_block_id());
1401
- int true_block = chunk_->LookupDestination(instr->true_block_id());
1402
-
1403
- if (instr->is_double()) {
1404
- // Don't base result on EFLAGS when a NaN is involved. Instead
1405
- // jump to the false block.
1406
- __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
1407
- __ j(parity_even, chunk_->GetAssemblyLabel(false_block));
1408
- } else {
1409
- EmitCmpI(left, right);
1410
- }
1411
-
1412
- Condition cc = TokenToCondition(instr->op(), instr->is_double());
1413
- EmitBranch(true_block, false_block, cc);
1414
- }
1415
-
1416
-
1417
- void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
1418
- Register left = ToRegister(instr->InputAt(0));
1419
- Register right = ToRegister(instr->InputAt(1));
1420
- Register result = ToRegister(instr->result());
1421
-
1422
- __ cmp(left, Operand(right));
1423
- __ mov(result, Factory::true_value());
1424
- NearLabel done;
1425
- __ j(equal, &done);
1426
- __ mov(result, Factory::false_value());
1427
- __ bind(&done);
1428
- }
1429
-
1430
-
1431
- void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
1432
- Register left = ToRegister(instr->InputAt(0));
1433
- Register right = ToRegister(instr->InputAt(1));
1434
- int false_block = chunk_->LookupDestination(instr->false_block_id());
1435
- int true_block = chunk_->LookupDestination(instr->true_block_id());
1436
-
1437
- __ cmp(left, Operand(right));
1438
- EmitBranch(true_block, false_block, equal);
1439
- }
1440
-
1441
-
1442
- void LCodeGen::DoIsNull(LIsNull* instr) {
1443
- Register reg = ToRegister(instr->InputAt(0));
1444
- Register result = ToRegister(instr->result());
1445
-
1446
- // TODO(fsc): If the expression is known to be a smi, then it's
1447
- // definitely not null. Materialize false.
1448
-
1449
- __ cmp(reg, Factory::null_value());
1450
- if (instr->is_strict()) {
1451
- __ mov(result, Factory::true_value());
1452
- NearLabel done;
1453
- __ j(equal, &done);
1454
- __ mov(result, Factory::false_value());
1455
- __ bind(&done);
1456
- } else {
1457
- NearLabel true_value, false_value, done;
1458
- __ j(equal, &true_value);
1459
- __ cmp(reg, Factory::undefined_value());
1460
- __ j(equal, &true_value);
1461
- __ test(reg, Immediate(kSmiTagMask));
1462
- __ j(zero, &false_value);
1463
- // Check for undetectable objects by looking in the bit field in
1464
- // the map. The object has already been smi checked.
1465
- Register scratch = result;
1466
- __ mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1467
- __ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset));
1468
- __ test(scratch, Immediate(1 << Map::kIsUndetectable));
1469
- __ j(not_zero, &true_value);
1470
- __ bind(&false_value);
1471
- __ mov(result, Factory::false_value());
1472
- __ jmp(&done);
1473
- __ bind(&true_value);
1474
- __ mov(result, Factory::true_value());
1475
- __ bind(&done);
1476
- }
1477
- }
1478
-
1479
-
1480
- void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
1481
- Register reg = ToRegister(instr->InputAt(0));
1482
-
1483
- // TODO(fsc): If the expression is known to be a smi, then it's
1484
- // definitely not null. Jump to the false block.
1485
-
1486
- int true_block = chunk_->LookupDestination(instr->true_block_id());
1487
- int false_block = chunk_->LookupDestination(instr->false_block_id());
1488
-
1489
- __ cmp(reg, Factory::null_value());
1490
- if (instr->is_strict()) {
1491
- EmitBranch(true_block, false_block, equal);
1492
- } else {
1493
- Label* true_label = chunk_->GetAssemblyLabel(true_block);
1494
- Label* false_label = chunk_->GetAssemblyLabel(false_block);
1495
- __ j(equal, true_label);
1496
- __ cmp(reg, Factory::undefined_value());
1497
- __ j(equal, true_label);
1498
- __ test(reg, Immediate(kSmiTagMask));
1499
- __ j(zero, false_label);
1500
- // Check for undetectable objects by looking in the bit field in
1501
- // the map. The object has already been smi checked.
1502
- Register scratch = ToRegister(instr->TempAt(0));
1503
- __ mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1504
- __ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset));
1505
- __ test(scratch, Immediate(1 << Map::kIsUndetectable));
1506
- EmitBranch(true_block, false_block, not_zero);
1507
- }
1508
- }
1509
-
1510
-
1511
- Condition LCodeGen::EmitIsObject(Register input,
1512
- Register temp1,
1513
- Register temp2,
1514
- Label* is_not_object,
1515
- Label* is_object) {
1516
- ASSERT(!input.is(temp1));
1517
- ASSERT(!input.is(temp2));
1518
- ASSERT(!temp1.is(temp2));
1519
-
1520
- __ test(input, Immediate(kSmiTagMask));
1521
- __ j(equal, is_not_object);
1522
-
1523
- __ cmp(input, Factory::null_value());
1524
- __ j(equal, is_object);
1525
-
1526
- __ mov(temp1, FieldOperand(input, HeapObject::kMapOffset));
1527
- // Undetectable objects behave like undefined.
1528
- __ movzx_b(temp2, FieldOperand(temp1, Map::kBitFieldOffset));
1529
- __ test(temp2, Immediate(1 << Map::kIsUndetectable));
1530
- __ j(not_zero, is_not_object);
1531
-
1532
- __ movzx_b(temp2, FieldOperand(temp1, Map::kInstanceTypeOffset));
1533
- __ cmp(temp2, FIRST_JS_OBJECT_TYPE);
1534
- __ j(below, is_not_object);
1535
- __ cmp(temp2, LAST_JS_OBJECT_TYPE);
1536
- return below_equal;
1537
- }
1538
-
1539
-
1540
- void LCodeGen::DoIsObject(LIsObject* instr) {
1541
- Register reg = ToRegister(instr->InputAt(0));
1542
- Register result = ToRegister(instr->result());
1543
- Register temp = ToRegister(instr->TempAt(0));
1544
- Label is_false, is_true, done;
1545
-
1546
- Condition true_cond = EmitIsObject(reg, result, temp, &is_false, &is_true);
1547
- __ j(true_cond, &is_true);
1548
-
1549
- __ bind(&is_false);
1550
- __ mov(result, Factory::false_value());
1551
- __ jmp(&done);
1552
-
1553
- __ bind(&is_true);
1554
- __ mov(result, Factory::true_value());
1555
-
1556
- __ bind(&done);
1557
- }
1558
-
1559
-
1560
- void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
1561
- Register reg = ToRegister(instr->InputAt(0));
1562
- Register temp = ToRegister(instr->TempAt(0));
1563
- Register temp2 = ToRegister(instr->TempAt(1));
1564
-
1565
- int true_block = chunk_->LookupDestination(instr->true_block_id());
1566
- int false_block = chunk_->LookupDestination(instr->false_block_id());
1567
- Label* true_label = chunk_->GetAssemblyLabel(true_block);
1568
- Label* false_label = chunk_->GetAssemblyLabel(false_block);
1569
-
1570
- Condition true_cond = EmitIsObject(reg, temp, temp2, false_label, true_label);
1571
-
1572
- EmitBranch(true_block, false_block, true_cond);
1573
- }
1574
-
1575
-
1576
- void LCodeGen::DoIsSmi(LIsSmi* instr) {
1577
- Operand input = ToOperand(instr->InputAt(0));
1578
- Register result = ToRegister(instr->result());
1579
-
1580
- ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1581
- __ test(input, Immediate(kSmiTagMask));
1582
- __ mov(result, Factory::true_value());
1583
- NearLabel done;
1584
- __ j(zero, &done);
1585
- __ mov(result, Factory::false_value());
1586
- __ bind(&done);
1587
- }
1588
-
1589
-
1590
- void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1591
- Operand input = ToOperand(instr->InputAt(0));
1592
-
1593
- int true_block = chunk_->LookupDestination(instr->true_block_id());
1594
- int false_block = chunk_->LookupDestination(instr->false_block_id());
1595
-
1596
- __ test(input, Immediate(kSmiTagMask));
1597
- EmitBranch(true_block, false_block, zero);
1598
- }
1599
-
1600
-
1601
- static InstanceType TestType(HHasInstanceType* instr) {
1602
- InstanceType from = instr->from();
1603
- InstanceType to = instr->to();
1604
- if (from == FIRST_TYPE) return to;
1605
- ASSERT(from == to || to == LAST_TYPE);
1606
- return from;
1607
- }
1608
-
1609
-
1610
- static Condition BranchCondition(HHasInstanceType* instr) {
1611
- InstanceType from = instr->from();
1612
- InstanceType to = instr->to();
1613
- if (from == to) return equal;
1614
- if (to == LAST_TYPE) return above_equal;
1615
- if (from == FIRST_TYPE) return below_equal;
1616
- UNREACHABLE();
1617
- return equal;
1618
- }
1619
-
1620
-
1621
- void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
1622
- Register input = ToRegister(instr->InputAt(0));
1623
- Register result = ToRegister(instr->result());
1624
-
1625
- ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1626
- __ test(input, Immediate(kSmiTagMask));
1627
- NearLabel done, is_false;
1628
- __ j(zero, &is_false);
1629
- __ CmpObjectType(input, TestType(instr->hydrogen()), result);
1630
- __ j(NegateCondition(BranchCondition(instr->hydrogen())), &is_false);
1631
- __ mov(result, Factory::true_value());
1632
- __ jmp(&done);
1633
- __ bind(&is_false);
1634
- __ mov(result, Factory::false_value());
1635
- __ bind(&done);
1636
- }
1637
-
1638
-
1639
- void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
1640
- Register input = ToRegister(instr->InputAt(0));
1641
- Register temp = ToRegister(instr->TempAt(0));
1642
-
1643
- int true_block = chunk_->LookupDestination(instr->true_block_id());
1644
- int false_block = chunk_->LookupDestination(instr->false_block_id());
1645
-
1646
- Label* false_label = chunk_->GetAssemblyLabel(false_block);
1647
-
1648
- __ test(input, Immediate(kSmiTagMask));
1649
- __ j(zero, false_label);
1650
-
1651
- __ CmpObjectType(input, TestType(instr->hydrogen()), temp);
1652
- EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
1653
- }
1654
-
1655
-
1656
- void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
1657
- Register input = ToRegister(instr->InputAt(0));
1658
- Register result = ToRegister(instr->result());
1659
-
1660
- ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1661
- __ mov(result, Factory::true_value());
1662
- __ test(FieldOperand(input, String::kHashFieldOffset),
1663
- Immediate(String::kContainsCachedArrayIndexMask));
1664
- NearLabel done;
1665
- __ j(not_zero, &done);
1666
- __ mov(result, Factory::false_value());
1667
- __ bind(&done);
1668
- }
1669
-
1670
-
1671
- void LCodeGen::DoHasCachedArrayIndexAndBranch(
1672
- LHasCachedArrayIndexAndBranch* instr) {
1673
- Register input = ToRegister(instr->InputAt(0));
1674
-
1675
- int true_block = chunk_->LookupDestination(instr->true_block_id());
1676
- int false_block = chunk_->LookupDestination(instr->false_block_id());
1677
-
1678
- __ test(FieldOperand(input, String::kHashFieldOffset),
1679
- Immediate(String::kContainsCachedArrayIndexMask));
1680
- EmitBranch(true_block, false_block, not_equal);
1681
- }
1682
-
1683
-
1684
- // Branches to a label or falls through with the answer in the z flag. Trashes
1685
- // the temp registers, but not the input. Only input and temp2 may alias.
1686
- void LCodeGen::EmitClassOfTest(Label* is_true,
1687
- Label* is_false,
1688
- Handle<String>class_name,
1689
- Register input,
1690
- Register temp,
1691
- Register temp2) {
1692
- ASSERT(!input.is(temp));
1693
- ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register.
1694
- __ test(input, Immediate(kSmiTagMask));
1695
- __ j(zero, is_false);
1696
- __ CmpObjectType(input, FIRST_JS_OBJECT_TYPE, temp);
1697
- __ j(below, is_false);
1698
-
1699
- // Map is now in temp.
1700
- // Functions have class 'Function'.
1701
- __ CmpInstanceType(temp, JS_FUNCTION_TYPE);
1702
- if (class_name->IsEqualTo(CStrVector("Function"))) {
1703
- __ j(equal, is_true);
1704
- } else {
1705
- __ j(equal, is_false);
1706
- }
1707
-
1708
- // Check if the constructor in the map is a function.
1709
- __ mov(temp, FieldOperand(temp, Map::kConstructorOffset));
1710
-
1711
- // As long as JS_FUNCTION_TYPE is the last instance type and it is
1712
- // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
1713
- // LAST_JS_OBJECT_TYPE.
1714
- ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1715
- ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
1716
-
1717
- // Objects with a non-function constructor have class 'Object'.
1718
- __ CmpObjectType(temp, JS_FUNCTION_TYPE, temp2);
1719
- if (class_name->IsEqualTo(CStrVector("Object"))) {
1720
- __ j(not_equal, is_true);
1721
- } else {
1722
- __ j(not_equal, is_false);
1723
- }
1724
-
1725
- // temp now contains the constructor function. Grab the
1726
- // instance class name from there.
1727
- __ mov(temp, FieldOperand(temp, JSFunction::kSharedFunctionInfoOffset));
1728
- __ mov(temp, FieldOperand(temp,
1729
- SharedFunctionInfo::kInstanceClassNameOffset));
1730
- // The class name we are testing against is a symbol because it's a literal.
1731
- // The name in the constructor is a symbol because of the way the context is
1732
- // booted. This routine isn't expected to work for random API-created
1733
- // classes and it doesn't have to because you can't access it with natives
1734
- // syntax. Since both sides are symbols it is sufficient to use an identity
1735
- // comparison.
1736
- __ cmp(temp, class_name);
1737
- // End with the answer in the z flag.
1738
- }
1739
-
1740
-
1741
- void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
1742
- Register input = ToRegister(instr->InputAt(0));
1743
- Register result = ToRegister(instr->result());
1744
- ASSERT(input.is(result));
1745
- Register temp = ToRegister(instr->TempAt(0));
1746
- Handle<String> class_name = instr->hydrogen()->class_name();
1747
- NearLabel done;
1748
- Label is_true, is_false;
1749
-
1750
- EmitClassOfTest(&is_true, &is_false, class_name, input, temp, input);
1751
-
1752
- __ j(not_equal, &is_false);
1753
-
1754
- __ bind(&is_true);
1755
- __ mov(result, Factory::true_value());
1756
- __ jmp(&done);
1757
-
1758
- __ bind(&is_false);
1759
- __ mov(result, Factory::false_value());
1760
- __ bind(&done);
1761
- }
1762
-
1763
-
1764
- void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
1765
- Register input = ToRegister(instr->InputAt(0));
1766
- Register temp = ToRegister(instr->TempAt(0));
1767
- Register temp2 = ToRegister(instr->TempAt(1));
1768
- if (input.is(temp)) {
1769
- // Swap.
1770
- Register swapper = temp;
1771
- temp = temp2;
1772
- temp2 = swapper;
1773
- }
1774
- Handle<String> class_name = instr->hydrogen()->class_name();
1775
-
1776
- int true_block = chunk_->LookupDestination(instr->true_block_id());
1777
- int false_block = chunk_->LookupDestination(instr->false_block_id());
1778
-
1779
- Label* true_label = chunk_->GetAssemblyLabel(true_block);
1780
- Label* false_label = chunk_->GetAssemblyLabel(false_block);
1781
-
1782
- EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
1783
-
1784
- EmitBranch(true_block, false_block, equal);
1785
- }
1786
-
1787
-
1788
- void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
1789
- Register reg = ToRegister(instr->InputAt(0));
1790
- int true_block = instr->true_block_id();
1791
- int false_block = instr->false_block_id();
1792
-
1793
- __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map());
1794
- EmitBranch(true_block, false_block, equal);
1795
- }
1796
-
1797
-
1798
- void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
1799
- // Object and function are in fixed registers defined by the stub.
1800
- ASSERT(ToRegister(instr->context()).is(esi));
1801
- InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1802
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1803
-
1804
- NearLabel true_value, done;
1805
- __ test(eax, Operand(eax));
1806
- __ j(zero, &true_value);
1807
- __ mov(ToRegister(instr->result()), Factory::false_value());
1808
- __ jmp(&done);
1809
- __ bind(&true_value);
1810
- __ mov(ToRegister(instr->result()), Factory::true_value());
1811
- __ bind(&done);
1812
- }
1813
-
1814
-
1815
- void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
1816
- ASSERT(ToRegister(instr->context()).is(esi));
1817
- int true_block = chunk_->LookupDestination(instr->true_block_id());
1818
- int false_block = chunk_->LookupDestination(instr->false_block_id());
1819
-
1820
- InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1821
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1822
- __ test(eax, Operand(eax));
1823
- EmitBranch(true_block, false_block, zero);
1824
- }
1825
-
1826
-
1827
- void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1828
- class DeferredInstanceOfKnownGlobal: public LDeferredCode {
1829
- public:
1830
- DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
1831
- LInstanceOfKnownGlobal* instr)
1832
- : LDeferredCode(codegen), instr_(instr) { }
1833
- virtual void Generate() {
1834
- codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
1835
- }
1836
-
1837
- Label* map_check() { return &map_check_; }
1838
-
1839
- private:
1840
- LInstanceOfKnownGlobal* instr_;
1841
- Label map_check_;
1842
- };
1843
-
1844
- DeferredInstanceOfKnownGlobal* deferred;
1845
- deferred = new DeferredInstanceOfKnownGlobal(this, instr);
1846
-
1847
- Label done, false_result;
1848
- Register object = ToRegister(instr->InputAt(0));
1849
- Register temp = ToRegister(instr->TempAt(0));
1850
-
1851
- // A Smi is not an instance of anything.
1852
- __ test(object, Immediate(kSmiTagMask));
1853
- __ j(zero, &false_result, not_taken);
1854
-
1855
- // This is the inlined call site instanceof cache. The two occurences of the
1856
- // hole value will be patched to the last map/result pair generated by the
1857
- // instanceof stub.
1858
- NearLabel cache_miss;
1859
- Register map = ToRegister(instr->TempAt(0));
1860
- __ mov(map, FieldOperand(object, HeapObject::kMapOffset));
1861
- __ bind(deferred->map_check()); // Label for calculating code patching.
1862
- __ cmp(map, Factory::the_hole_value()); // Patched to cached map.
1863
- __ j(not_equal, &cache_miss, not_taken);
1864
- __ mov(eax, Factory::the_hole_value()); // Patched to either true or false.
1865
- __ jmp(&done);
1866
-
1867
- // The inlined call site cache did not match. Check for null and string
1868
- // before calling the deferred code.
1869
- __ bind(&cache_miss);
1870
- // Null is not an instance of anything.
1871
- __ cmp(object, Factory::null_value());
1872
- __ j(equal, &false_result);
1873
-
1874
- // String values are not instances of anything.
1875
- Condition is_string = masm_->IsObjectStringType(object, temp, temp);
1876
- __ j(is_string, &false_result);
1877
-
1878
- // Go to the deferred code.
1879
- __ jmp(deferred->entry());
1880
-
1881
- __ bind(&false_result);
1882
- __ mov(ToRegister(instr->result()), Factory::false_value());
1883
-
1884
- // Here result has either true or false. Deferred code also produces true or
1885
- // false object.
1886
- __ bind(deferred->exit());
1887
- __ bind(&done);
1888
- }
1889
-
1890
-
1891
- void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
1892
- Label* map_check) {
1893
- __ PushSafepointRegisters();
1894
-
1895
- InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
1896
- flags = static_cast<InstanceofStub::Flags>(
1897
- flags | InstanceofStub::kArgsInRegisters);
1898
- flags = static_cast<InstanceofStub::Flags>(
1899
- flags | InstanceofStub::kCallSiteInlineCheck);
1900
- flags = static_cast<InstanceofStub::Flags>(
1901
- flags | InstanceofStub::kReturnTrueFalseObject);
1902
- InstanceofStub stub(flags);
1903
-
1904
- // Get the temp register reserved by the instruction. This needs to be edi as
1905
- // its slot of the pushing of safepoint registers is used to communicate the
1906
- // offset to the location of the map check.
1907
- Register temp = ToRegister(instr->TempAt(0));
1908
- ASSERT(temp.is(edi));
1909
- __ mov(InstanceofStub::right(), Immediate(instr->function()));
1910
- static const int kAdditionalDelta = 16;
1911
- int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
1912
- Label before_push_delta;
1913
- __ bind(&before_push_delta);
1914
- __ mov(temp, Immediate(delta));
1915
- __ StoreToSafepointRegisterSlot(temp, temp);
1916
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
1917
- // Put the result value into the eax slot and restore all registers.
1918
- __ StoreToSafepointRegisterSlot(eax, eax);
1919
- __ PopSafepointRegisters();
1920
- }
1921
-
1922
-
1923
- static Condition ComputeCompareCondition(Token::Value op) {
1924
- switch (op) {
1925
- case Token::EQ_STRICT:
1926
- case Token::EQ:
1927
- return equal;
1928
- case Token::LT:
1929
- return less;
1930
- case Token::GT:
1931
- return greater;
1932
- case Token::LTE:
1933
- return less_equal;
1934
- case Token::GTE:
1935
- return greater_equal;
1936
- default:
1937
- UNREACHABLE();
1938
- return no_condition;
1939
- }
1940
- }
1941
-
1942
-
1943
- void LCodeGen::DoCmpT(LCmpT* instr) {
1944
- Token::Value op = instr->op();
1945
-
1946
- Handle<Code> ic = CompareIC::GetUninitialized(op);
1947
- CallCode(ic, RelocInfo::CODE_TARGET, instr, false);
1948
-
1949
- Condition condition = ComputeCompareCondition(op);
1950
- if (op == Token::GT || op == Token::LTE) {
1951
- condition = ReverseCondition(condition);
1952
- }
1953
- NearLabel true_value, done;
1954
- __ test(eax, Operand(eax));
1955
- __ j(condition, &true_value);
1956
- __ mov(ToRegister(instr->result()), Factory::false_value());
1957
- __ jmp(&done);
1958
- __ bind(&true_value);
1959
- __ mov(ToRegister(instr->result()), Factory::true_value());
1960
- __ bind(&done);
1961
- }
1962
-
1963
-
1964
- void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
1965
- Token::Value op = instr->op();
1966
- int true_block = chunk_->LookupDestination(instr->true_block_id());
1967
- int false_block = chunk_->LookupDestination(instr->false_block_id());
1968
-
1969
- Handle<Code> ic = CompareIC::GetUninitialized(op);
1970
- CallCode(ic, RelocInfo::CODE_TARGET, instr, false);
1971
-
1972
- // The compare stub expects compare condition and the input operands
1973
- // reversed for GT and LTE.
1974
- Condition condition = ComputeCompareCondition(op);
1975
- if (op == Token::GT || op == Token::LTE) {
1976
- condition = ReverseCondition(condition);
1977
- }
1978
- __ test(eax, Operand(eax));
1979
- EmitBranch(true_block, false_block, condition);
1980
- }
1981
-
1982
-
1983
- void LCodeGen::DoReturn(LReturn* instr) {
1984
- if (FLAG_trace) {
1985
- // Preserve the return value on the stack and rely on the runtime call
1986
- // to return the value in the same register. We're leaving the code
1987
- // managed by the register allocator and tearing down the frame, it's
1988
- // safe to write to the context register.
1989
- __ push(eax);
1990
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
1991
- __ CallRuntime(Runtime::kTraceExit, 1);
1992
- }
1993
- __ mov(esp, ebp);
1994
- __ pop(ebp);
1995
- __ Ret((ParameterCount() + 1) * kPointerSize, ecx);
1996
- }
1997
-
1998
-
1999
- void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) {
2000
- Register result = ToRegister(instr->result());
2001
- __ mov(result, Operand::Cell(instr->hydrogen()->cell()));
2002
- if (instr->hydrogen()->check_hole_value()) {
2003
- __ cmp(result, Factory::the_hole_value());
2004
- DeoptimizeIf(equal, instr->environment());
2005
- }
2006
- }
2007
-
2008
-
2009
- void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) {
2010
- Register value = ToRegister(instr->InputAt(0));
2011
- Operand cell_operand = Operand::Cell(instr->hydrogen()->cell());
2012
-
2013
- // If the cell we are storing to contains the hole it could have
2014
- // been deleted from the property dictionary. In that case, we need
2015
- // to update the property details in the property dictionary to mark
2016
- // it as no longer deleted. We deoptimize in that case.
2017
- if (instr->hydrogen()->check_hole_value()) {
2018
- __ cmp(cell_operand, Factory::the_hole_value());
2019
- DeoptimizeIf(equal, instr->environment());
2020
- }
2021
-
2022
- // Store the value.
2023
- __ mov(cell_operand, value);
2024
- }
2025
-
2026
-
2027
- void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
2028
- Register context = ToRegister(instr->context());
2029
- Register result = ToRegister(instr->result());
2030
- __ mov(result, ContextOperand(context, instr->slot_index()));
2031
- }
2032
-
2033
-
2034
- void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2035
- Register context = ToRegister(instr->context());
2036
- Register value = ToRegister(instr->value());
2037
- __ mov(ContextOperand(context, instr->slot_index()), value);
2038
- if (instr->needs_write_barrier()) {
2039
- Register temp = ToRegister(instr->TempAt(0));
2040
- int offset = Context::SlotOffset(instr->slot_index());
2041
- __ RecordWrite(context, offset, value, temp);
2042
- }
2043
- }
2044
-
2045
-
2046
- void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
2047
- Register object = ToRegister(instr->InputAt(0));
2048
- Register result = ToRegister(instr->result());
2049
- if (instr->hydrogen()->is_in_object()) {
2050
- __ mov(result, FieldOperand(object, instr->hydrogen()->offset()));
2051
- } else {
2052
- __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset));
2053
- __ mov(result, FieldOperand(result, instr->hydrogen()->offset()));
2054
- }
2055
- }
2056
-
2057
-
2058
- void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2059
- ASSERT(ToRegister(instr->context()).is(esi));
2060
- ASSERT(ToRegister(instr->object()).is(eax));
2061
- ASSERT(ToRegister(instr->result()).is(eax));
2062
-
2063
- __ mov(ecx, instr->name());
2064
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
2065
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
2066
- }
2067
-
2068
-
2069
- void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2070
- Register function = ToRegister(instr->function());
2071
- Register temp = ToRegister(instr->TempAt(0));
2072
- Register result = ToRegister(instr->result());
2073
-
2074
- // Check that the function really is a function.
2075
- __ CmpObjectType(function, JS_FUNCTION_TYPE, result);
2076
- DeoptimizeIf(not_equal, instr->environment());
2077
-
2078
- // Check whether the function has an instance prototype.
2079
- NearLabel non_instance;
2080
- __ test_b(FieldOperand(result, Map::kBitFieldOffset),
2081
- 1 << Map::kHasNonInstancePrototype);
2082
- __ j(not_zero, &non_instance);
2083
-
2084
- // Get the prototype or initial map from the function.
2085
- __ mov(result,
2086
- FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2087
-
2088
- // Check that the function has a prototype or an initial map.
2089
- __ cmp(Operand(result), Immediate(Factory::the_hole_value()));
2090
- DeoptimizeIf(equal, instr->environment());
2091
-
2092
- // If the function does not have an initial map, we're done.
2093
- NearLabel done;
2094
- __ CmpObjectType(result, MAP_TYPE, temp);
2095
- __ j(not_equal, &done);
2096
-
2097
- // Get the prototype from the initial map.
2098
- __ mov(result, FieldOperand(result, Map::kPrototypeOffset));
2099
- __ jmp(&done);
2100
-
2101
- // Non-instance prototype: Fetch prototype from constructor field
2102
- // in the function's map.
2103
- __ bind(&non_instance);
2104
- __ mov(result, FieldOperand(result, Map::kConstructorOffset));
2105
-
2106
- // All done.
2107
- __ bind(&done);
2108
- }
2109
-
2110
-
2111
- void LCodeGen::DoLoadElements(LLoadElements* instr) {
2112
- Register result = ToRegister(instr->result());
2113
- Register input = ToRegister(instr->InputAt(0));
2114
- __ mov(result, FieldOperand(input, JSObject::kElementsOffset));
2115
- if (FLAG_debug_code) {
2116
- NearLabel done;
2117
- __ cmp(FieldOperand(result, HeapObject::kMapOffset),
2118
- Immediate(Factory::fixed_array_map()));
2119
- __ j(equal, &done);
2120
- __ cmp(FieldOperand(result, HeapObject::kMapOffset),
2121
- Immediate(Factory::pixel_array_map()));
2122
- __ j(equal, &done);
2123
- __ cmp(FieldOperand(result, HeapObject::kMapOffset),
2124
- Immediate(Factory::fixed_cow_array_map()));
2125
- __ Check(equal, "Check for fast elements or pixel array failed.");
2126
- __ bind(&done);
2127
- }
2128
- }
2129
-
2130
-
2131
- void LCodeGen::DoLoadPixelArrayExternalPointer(
2132
- LLoadPixelArrayExternalPointer* instr) {
2133
- Register result = ToRegister(instr->result());
2134
- Register input = ToRegister(instr->InputAt(0));
2135
- __ mov(result, FieldOperand(input, PixelArray::kExternalPointerOffset));
2136
- }
2137
-
2138
-
2139
- void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
2140
- Register arguments = ToRegister(instr->arguments());
2141
- Register length = ToRegister(instr->length());
2142
- Operand index = ToOperand(instr->index());
2143
- Register result = ToRegister(instr->result());
2144
-
2145
- __ sub(length, index);
2146
- DeoptimizeIf(below_equal, instr->environment());
2147
-
2148
- // There are two words between the frame pointer and the last argument.
2149
- // Subtracting from length accounts for one of them add one more.
2150
- __ mov(result, Operand(arguments, length, times_4, kPointerSize));
2151
- }
2152
-
2153
-
2154
- void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
2155
- Register elements = ToRegister(instr->elements());
2156
- Register key = ToRegister(instr->key());
2157
- Register result = ToRegister(instr->result());
2158
- ASSERT(result.is(elements));
2159
-
2160
- // Load the result.
2161
- __ mov(result, FieldOperand(elements,
2162
- key,
2163
- times_pointer_size,
2164
- FixedArray::kHeaderSize));
2165
-
2166
- // Check for the hole value.
2167
- __ cmp(result, Factory::the_hole_value());
2168
- DeoptimizeIf(equal, instr->environment());
2169
- }
2170
-
2171
-
2172
- void LCodeGen::DoLoadPixelArrayElement(LLoadPixelArrayElement* instr) {
2173
- Register external_pointer = ToRegister(instr->external_pointer());
2174
- Register key = ToRegister(instr->key());
2175
- Register result = ToRegister(instr->result());
2176
- ASSERT(result.is(external_pointer));
2177
-
2178
- // Load the result.
2179
- __ movzx_b(result, Operand(external_pointer, key, times_1, 0));
2180
- }
2181
-
2182
-
2183
- void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2184
- ASSERT(ToRegister(instr->context()).is(esi));
2185
- ASSERT(ToRegister(instr->object()).is(edx));
2186
- ASSERT(ToRegister(instr->key()).is(eax));
2187
-
2188
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
2189
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
2190
- }
2191
-
2192
-
2193
- void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
2194
- Register result = ToRegister(instr->result());
2195
-
2196
- // Check for arguments adapter frame.
2197
- NearLabel done, adapted;
2198
- __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2199
- __ mov(result, Operand(result, StandardFrameConstants::kContextOffset));
2200
- __ cmp(Operand(result),
2201
- Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2202
- __ j(equal, &adapted);
2203
-
2204
- // No arguments adaptor frame.
2205
- __ mov(result, Operand(ebp));
2206
- __ jmp(&done);
2207
-
2208
- // Arguments adaptor frame present.
2209
- __ bind(&adapted);
2210
- __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2211
-
2212
- // Result is the frame pointer for the frame if not adapted and for the real
2213
- // frame below the adaptor frame if adapted.
2214
- __ bind(&done);
2215
- }
2216
-
2217
-
2218
- void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
2219
- Operand elem = ToOperand(instr->InputAt(0));
2220
- Register result = ToRegister(instr->result());
2221
-
2222
- NearLabel done;
2223
-
2224
- // If no arguments adaptor frame the number of arguments is fixed.
2225
- __ cmp(ebp, elem);
2226
- __ mov(result, Immediate(scope()->num_parameters()));
2227
- __ j(equal, &done);
2228
-
2229
- // Arguments adaptor frame present. Get argument length from there.
2230
- __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2231
- __ mov(result, Operand(result,
2232
- ArgumentsAdaptorFrameConstants::kLengthOffset));
2233
- __ SmiUntag(result);
2234
-
2235
- // Argument length is in result register.
2236
- __ bind(&done);
2237
- }
2238
-
2239
-
2240
- void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2241
- Register receiver = ToRegister(instr->receiver());
2242
- Register function = ToRegister(instr->function());
2243
- Register length = ToRegister(instr->length());
2244
- Register elements = ToRegister(instr->elements());
2245
- Register scratch = ToRegister(instr->TempAt(0));
2246
- ASSERT(receiver.is(eax)); // Used for parameter count.
2247
- ASSERT(function.is(edi)); // Required by InvokeFunction.
2248
- ASSERT(ToRegister(instr->result()).is(eax));
2249
-
2250
- // If the receiver is null or undefined, we have to pass the global object
2251
- // as a receiver.
2252
- NearLabel global_object, receiver_ok;
2253
- __ cmp(receiver, Factory::null_value());
2254
- __ j(equal, &global_object);
2255
- __ cmp(receiver, Factory::undefined_value());
2256
- __ j(equal, &global_object);
2257
-
2258
- // The receiver should be a JS object.
2259
- __ test(receiver, Immediate(kSmiTagMask));
2260
- DeoptimizeIf(equal, instr->environment());
2261
- __ CmpObjectType(receiver, FIRST_JS_OBJECT_TYPE, scratch);
2262
- DeoptimizeIf(below, instr->environment());
2263
- __ jmp(&receiver_ok);
2264
-
2265
- __ bind(&global_object);
2266
- // TODO(kmillikin): We have a hydrogen value for the global object. See
2267
- // if it's better to use it than to explicitly fetch it from the context
2268
- // here.
2269
- __ mov(receiver, Operand(ebp, StandardFrameConstants::kContextOffset));
2270
- __ mov(receiver, ContextOperand(receiver, Context::GLOBAL_INDEX));
2271
- __ bind(&receiver_ok);
2272
-
2273
- // Copy the arguments to this function possibly from the
2274
- // adaptor frame below it.
2275
- const uint32_t kArgumentsLimit = 1 * KB;
2276
- __ cmp(length, kArgumentsLimit);
2277
- DeoptimizeIf(above, instr->environment());
2278
-
2279
- __ push(receiver);
2280
- __ mov(receiver, length);
2281
-
2282
- // Loop through the arguments pushing them onto the execution
2283
- // stack.
2284
- NearLabel invoke, loop;
2285
- // length is a small non-negative integer, due to the test above.
2286
- __ test(length, Operand(length));
2287
- __ j(zero, &invoke);
2288
- __ bind(&loop);
2289
- __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize));
2290
- __ dec(length);
2291
- __ j(not_zero, &loop);
2292
-
2293
- // Invoke the function.
2294
- __ bind(&invoke);
2295
- ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2296
- LPointerMap* pointers = instr->pointer_map();
2297
- LEnvironment* env = instr->deoptimization_environment();
2298
- RecordPosition(pointers->position());
2299
- RegisterEnvironmentForDeoptimization(env);
2300
- SafepointGenerator safepoint_generator(this,
2301
- pointers,
2302
- env->deoptimization_index(),
2303
- true);
2304
- v8::internal::ParameterCount actual(eax);
2305
- __ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator);
2306
- }
2307
-
2308
-
2309
- void LCodeGen::DoPushArgument(LPushArgument* instr) {
2310
- LOperand* argument = instr->InputAt(0);
2311
- if (argument->IsConstantOperand()) {
2312
- __ push(ToImmediate(argument));
2313
- } else {
2314
- __ push(ToOperand(argument));
2315
- }
2316
- }
2317
-
2318
-
2319
- void LCodeGen::DoContext(LContext* instr) {
2320
- Register result = ToRegister(instr->result());
2321
- __ mov(result, Operand(ebp, StandardFrameConstants::kContextOffset));
2322
- }
2323
-
2324
-
2325
- void LCodeGen::DoOuterContext(LOuterContext* instr) {
2326
- Register context = ToRegister(instr->context());
2327
- Register result = ToRegister(instr->result());
2328
- __ mov(result, Operand(context, Context::SlotOffset(Context::CLOSURE_INDEX)));
2329
- __ mov(result, FieldOperand(result, JSFunction::kContextOffset));
2330
- }
2331
-
2332
-
2333
- void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
2334
- Register context = ToRegister(instr->context());
2335
- Register result = ToRegister(instr->result());
2336
- __ mov(result, Operand(context, Context::SlotOffset(Context::GLOBAL_INDEX)));
2337
- }
2338
-
2339
-
2340
- void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
2341
- Register global = ToRegister(instr->global());
2342
- Register result = ToRegister(instr->result());
2343
- __ mov(result, FieldOperand(global, GlobalObject::kGlobalReceiverOffset));
2344
- }
2345
-
2346
-
2347
- void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2348
- int arity,
2349
- LInstruction* instr) {
2350
- // Change context if needed.
2351
- bool change_context =
2352
- (graph()->info()->closure()->context() != function->context()) ||
2353
- scope()->contains_with() ||
2354
- (scope()->num_heap_slots() > 0);
2355
- if (change_context) {
2356
- __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2357
- } else {
2358
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2359
- }
2360
-
2361
- // Set eax to arguments count if adaption is not needed. Assumes that eax
2362
- // is available to write to at this point.
2363
- if (!function->NeedsArgumentsAdaption()) {
2364
- __ mov(eax, arity);
2365
- }
2366
-
2367
- LPointerMap* pointers = instr->pointer_map();
2368
- RecordPosition(pointers->position());
2369
-
2370
- // Invoke function.
2371
- if (*function == *graph()->info()->closure()) {
2372
- __ CallSelf();
2373
- } else {
2374
- __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset));
2375
- EnsureRelocSpaceForDeoptimization();
2376
- }
2377
-
2378
- // Setup deoptimization.
2379
- RegisterLazyDeoptimization(instr);
2380
- }
2381
-
2382
-
2383
- void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2384
- ASSERT(ToRegister(instr->result()).is(eax));
2385
- __ mov(edi, instr->function());
2386
- CallKnownFunction(instr->function(), instr->arity(), instr);
2387
- }
2388
-
2389
-
2390
- void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
2391
- Register input_reg = ToRegister(instr->InputAt(0));
2392
- __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
2393
- Factory::heap_number_map());
2394
- DeoptimizeIf(not_equal, instr->environment());
2395
-
2396
- Label done;
2397
- Register tmp = input_reg.is(eax) ? ecx : eax;
2398
- Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx;
2399
-
2400
- // Preserve the value of all registers.
2401
- __ PushSafepointRegisters();
2402
-
2403
- Label negative;
2404
- __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset));
2405
- // Check the sign of the argument. If the argument is positive, just
2406
- // return it. We do not need to patch the stack since |input| and
2407
- // |result| are the same register and |input| will be restored
2408
- // unchanged by popping safepoint registers.
2409
- __ test(tmp, Immediate(HeapNumber::kSignMask));
2410
- __ j(not_zero, &negative);
2411
- __ jmp(&done);
2412
-
2413
- __ bind(&negative);
2414
-
2415
- Label allocated, slow;
2416
- __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow);
2417
- __ jmp(&allocated);
2418
-
2419
- // Slow case: Call the runtime system to do the number allocation.
2420
- __ bind(&slow);
2421
-
2422
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2423
- __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
2424
- RecordSafepointWithRegisters(
2425
- instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2426
- // Set the pointer to the new heap number in tmp.
2427
- if (!tmp.is(eax)) __ mov(tmp, eax);
2428
-
2429
- // Restore input_reg after call to runtime.
2430
- __ LoadFromSafepointRegisterSlot(input_reg, input_reg);
2431
-
2432
- __ bind(&allocated);
2433
- __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kExponentOffset));
2434
- __ and_(tmp2, ~HeapNumber::kSignMask);
2435
- __ mov(FieldOperand(tmp, HeapNumber::kExponentOffset), tmp2);
2436
- __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kMantissaOffset));
2437
- __ mov(FieldOperand(tmp, HeapNumber::kMantissaOffset), tmp2);
2438
- __ StoreToSafepointRegisterSlot(input_reg, tmp);
2439
-
2440
- __ bind(&done);
2441
- __ PopSafepointRegisters();
2442
- }
2443
-
2444
-
2445
- void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2446
- Register input_reg = ToRegister(instr->InputAt(0));
2447
- __ test(input_reg, Operand(input_reg));
2448
- Label is_positive;
2449
- __ j(not_sign, &is_positive);
2450
- __ neg(input_reg);
2451
- __ test(input_reg, Operand(input_reg));
2452
- DeoptimizeIf(negative, instr->environment());
2453
- __ bind(&is_positive);
2454
- }
2455
-
2456
-
2457
- void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2458
- // Class for deferred case.
2459
- class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
2460
- public:
2461
- DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
2462
- LUnaryMathOperation* instr)
2463
- : LDeferredCode(codegen), instr_(instr) { }
2464
- virtual void Generate() {
2465
- codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
2466
- }
2467
- private:
2468
- LUnaryMathOperation* instr_;
2469
- };
2470
-
2471
- ASSERT(instr->InputAt(0)->Equals(instr->result()));
2472
- Representation r = instr->hydrogen()->value()->representation();
2473
-
2474
- if (r.IsDouble()) {
2475
- XMMRegister scratch = xmm0;
2476
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2477
- __ pxor(scratch, scratch);
2478
- __ subsd(scratch, input_reg);
2479
- __ pand(input_reg, scratch);
2480
- } else if (r.IsInteger32()) {
2481
- EmitIntegerMathAbs(instr);
2482
- } else { // Tagged case.
2483
- DeferredMathAbsTaggedHeapNumber* deferred =
2484
- new DeferredMathAbsTaggedHeapNumber(this, instr);
2485
- Register input_reg = ToRegister(instr->InputAt(0));
2486
- // Smi check.
2487
- __ test(input_reg, Immediate(kSmiTagMask));
2488
- __ j(not_zero, deferred->entry());
2489
- EmitIntegerMathAbs(instr);
2490
- __ bind(deferred->exit());
2491
- }
2492
- }
2493
-
2494
-
2495
- void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
2496
- XMMRegister xmm_scratch = xmm0;
2497
- Register output_reg = ToRegister(instr->result());
2498
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2499
- __ xorpd(xmm_scratch, xmm_scratch); // Zero the register.
2500
- __ ucomisd(input_reg, xmm_scratch);
2501
-
2502
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2503
- DeoptimizeIf(below_equal, instr->environment());
2504
- } else {
2505
- DeoptimizeIf(below, instr->environment());
2506
- }
2507
-
2508
- // Use truncating instruction (OK because input is positive).
2509
- __ cvttsd2si(output_reg, Operand(input_reg));
2510
-
2511
- // Overflow is signalled with minint.
2512
- __ cmp(output_reg, 0x80000000u);
2513
- DeoptimizeIf(equal, instr->environment());
2514
- }
2515
-
2516
-
2517
- void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2518
- XMMRegister xmm_scratch = xmm0;
2519
- Register output_reg = ToRegister(instr->result());
2520
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2521
-
2522
- // xmm_scratch = 0.5
2523
- ExternalReference one_half = ExternalReference::address_of_one_half();
2524
- __ movdbl(xmm_scratch, Operand::StaticVariable(one_half));
2525
-
2526
- // input = input + 0.5
2527
- __ addsd(input_reg, xmm_scratch);
2528
-
2529
- // We need to return -0 for the input range [-0.5, 0[, otherwise
2530
- // compute Math.floor(value + 0.5).
2531
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2532
- __ ucomisd(input_reg, xmm_scratch);
2533
- DeoptimizeIf(below_equal, instr->environment());
2534
- } else {
2535
- // If we don't need to bailout on -0, we check only bailout
2536
- // on negative inputs.
2537
- __ xorpd(xmm_scratch, xmm_scratch); // Zero the register.
2538
- __ ucomisd(input_reg, xmm_scratch);
2539
- DeoptimizeIf(below, instr->environment());
2540
- }
2541
-
2542
- // Compute Math.floor(value + 0.5).
2543
- // Use truncating instruction (OK because input is positive).
2544
- __ cvttsd2si(output_reg, Operand(input_reg));
2545
-
2546
- // Overflow is signalled with minint.
2547
- __ cmp(output_reg, 0x80000000u);
2548
- DeoptimizeIf(equal, instr->environment());
2549
- }
2550
-
2551
-
2552
- void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
2553
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2554
- ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
2555
- __ sqrtsd(input_reg, input_reg);
2556
- }
2557
-
2558
-
2559
- void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
2560
- XMMRegister xmm_scratch = xmm0;
2561
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2562
- ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
2563
- __ xorpd(xmm_scratch, xmm_scratch);
2564
- __ addsd(input_reg, xmm_scratch); // Convert -0 to +0.
2565
- __ sqrtsd(input_reg, input_reg);
2566
- }
2567
-
2568
-
2569
- void LCodeGen::DoPower(LPower* instr) {
2570
- LOperand* left = instr->InputAt(0);
2571
- LOperand* right = instr->InputAt(1);
2572
- DoubleRegister result_reg = ToDoubleRegister(instr->result());
2573
- Representation exponent_type = instr->hydrogen()->right()->representation();
2574
- if (exponent_type.IsDouble()) {
2575
- // It is safe to use ebx directly since the instruction is marked
2576
- // as a call.
2577
- __ PrepareCallCFunction(4, ebx);
2578
- __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
2579
- __ movdbl(Operand(esp, 1 * kDoubleSize), ToDoubleRegister(right));
2580
- __ CallCFunction(ExternalReference::power_double_double_function(), 4);
2581
- } else if (exponent_type.IsInteger32()) {
2582
- // It is safe to use ebx directly since the instruction is marked
2583
- // as a call.
2584
- ASSERT(!ToRegister(right).is(ebx));
2585
- __ PrepareCallCFunction(4, ebx);
2586
- __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
2587
- __ mov(Operand(esp, 1 * kDoubleSize), ToRegister(right));
2588
- __ CallCFunction(ExternalReference::power_double_int_function(), 4);
2589
- } else {
2590
- ASSERT(exponent_type.IsTagged());
2591
- CpuFeatures::Scope scope(SSE2);
2592
- Register right_reg = ToRegister(right);
2593
-
2594
- Label non_smi, call;
2595
- __ test(right_reg, Immediate(kSmiTagMask));
2596
- __ j(not_zero, &non_smi);
2597
- __ SmiUntag(right_reg);
2598
- __ cvtsi2sd(result_reg, Operand(right_reg));
2599
- __ jmp(&call);
2600
-
2601
- __ bind(&non_smi);
2602
- // It is safe to use ebx directly since the instruction is marked
2603
- // as a call.
2604
- ASSERT(!right_reg.is(ebx));
2605
- __ CmpObjectType(right_reg, HEAP_NUMBER_TYPE , ebx);
2606
- DeoptimizeIf(not_equal, instr->environment());
2607
- __ movdbl(result_reg, FieldOperand(right_reg, HeapNumber::kValueOffset));
2608
-
2609
- __ bind(&call);
2610
- __ PrepareCallCFunction(4, ebx);
2611
- __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
2612
- __ movdbl(Operand(esp, 1 * kDoubleSize), result_reg);
2613
- __ CallCFunction(ExternalReference::power_double_double_function(), 4);
2614
- }
2615
-
2616
- // Return value is in st(0) on ia32.
2617
- // Store it into the (fixed) result register.
2618
- __ sub(Operand(esp), Immediate(kDoubleSize));
2619
- __ fstp_d(Operand(esp, 0));
2620
- __ movdbl(result_reg, Operand(esp, 0));
2621
- __ add(Operand(esp), Immediate(kDoubleSize));
2622
- }
2623
-
2624
-
2625
- void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
2626
- ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2627
- TranscendentalCacheStub stub(TranscendentalCache::LOG,
2628
- TranscendentalCacheStub::UNTAGGED);
2629
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
2630
- }
2631
-
2632
-
2633
- void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
2634
- ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2635
- TranscendentalCacheStub stub(TranscendentalCache::COS,
2636
- TranscendentalCacheStub::UNTAGGED);
2637
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
2638
- }
2639
-
2640
-
2641
- void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
2642
- ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2643
- TranscendentalCacheStub stub(TranscendentalCache::SIN,
2644
- TranscendentalCacheStub::UNTAGGED);
2645
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
2646
- }
2647
-
2648
-
2649
- void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
2650
- switch (instr->op()) {
2651
- case kMathAbs:
2652
- DoMathAbs(instr);
2653
- break;
2654
- case kMathFloor:
2655
- DoMathFloor(instr);
2656
- break;
2657
- case kMathRound:
2658
- DoMathRound(instr);
2659
- break;
2660
- case kMathSqrt:
2661
- DoMathSqrt(instr);
2662
- break;
2663
- case kMathPowHalf:
2664
- DoMathPowHalf(instr);
2665
- break;
2666
- case kMathCos:
2667
- DoMathCos(instr);
2668
- break;
2669
- case kMathSin:
2670
- DoMathSin(instr);
2671
- break;
2672
- case kMathLog:
2673
- DoMathLog(instr);
2674
- break;
2675
-
2676
- default:
2677
- UNREACHABLE();
2678
- }
2679
- }
2680
-
2681
-
2682
- void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
2683
- ASSERT(ToRegister(instr->context()).is(esi));
2684
- ASSERT(ToRegister(instr->key()).is(ecx));
2685
- ASSERT(ToRegister(instr->result()).is(eax));
2686
-
2687
- int arity = instr->arity();
2688
- Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
2689
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
2690
- }
2691
-
2692
-
2693
- void LCodeGen::DoCallNamed(LCallNamed* instr) {
2694
- ASSERT(ToRegister(instr->context()).is(esi));
2695
- ASSERT(ToRegister(instr->result()).is(eax));
2696
-
2697
- int arity = instr->arity();
2698
- Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
2699
- __ mov(ecx, instr->name());
2700
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
2701
- }
2702
-
2703
-
2704
- void LCodeGen::DoCallFunction(LCallFunction* instr) {
2705
- ASSERT(ToRegister(instr->context()).is(esi));
2706
- ASSERT(ToRegister(instr->result()).is(eax));
2707
-
2708
- int arity = instr->arity();
2709
- CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
2710
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2711
- __ Drop(1);
2712
- }
2713
-
2714
-
2715
- void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
2716
- ASSERT(ToRegister(instr->context()).is(esi));
2717
- ASSERT(ToRegister(instr->result()).is(eax));
2718
-
2719
- int arity = instr->arity();
2720
- Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
2721
- __ mov(ecx, instr->name());
2722
- CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2723
- }
2724
-
2725
-
2726
- void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
2727
- ASSERT(ToRegister(instr->result()).is(eax));
2728
- __ mov(edi, instr->target());
2729
- CallKnownFunction(instr->target(), instr->arity(), instr);
2730
- }
2731
-
2732
-
2733
- void LCodeGen::DoCallNew(LCallNew* instr) {
2734
- ASSERT(ToRegister(instr->context()).is(esi));
2735
- ASSERT(ToRegister(instr->constructor()).is(edi));
2736
- ASSERT(ToRegister(instr->result()).is(eax));
2737
-
2738
- Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall));
2739
- __ Set(eax, Immediate(instr->arity()));
2740
- CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
2741
- }
2742
-
2743
-
2744
- void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
2745
- CallRuntime(instr->function(), instr->arity(), instr, false);
2746
- }
2747
-
2748
-
2749
- void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
2750
- Register object = ToRegister(instr->object());
2751
- Register value = ToRegister(instr->value());
2752
- int offset = instr->offset();
2753
-
2754
- if (!instr->transition().is_null()) {
2755
- __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
2756
- }
2757
-
2758
- // Do the store.
2759
- if (instr->is_in_object()) {
2760
- __ mov(FieldOperand(object, offset), value);
2761
- if (instr->needs_write_barrier()) {
2762
- Register temp = ToRegister(instr->TempAt(0));
2763
- // Update the write barrier for the object for in-object properties.
2764
- __ RecordWrite(object, offset, value, temp);
2765
- }
2766
- } else {
2767
- Register temp = ToRegister(instr->TempAt(0));
2768
- __ mov(temp, FieldOperand(object, JSObject::kPropertiesOffset));
2769
- __ mov(FieldOperand(temp, offset), value);
2770
- if (instr->needs_write_barrier()) {
2771
- // Update the write barrier for the properties array.
2772
- // object is used as a scratch register.
2773
- __ RecordWrite(temp, offset, value, object);
2774
- }
2775
- }
2776
- }
2777
-
2778
-
2779
- void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
2780
- ASSERT(ToRegister(instr->context()).is(esi));
2781
- ASSERT(ToRegister(instr->object()).is(edx));
2782
- ASSERT(ToRegister(instr->value()).is(eax));
2783
-
2784
- __ mov(ecx, instr->name());
2785
- Handle<Code> ic(Builtins::builtin(
2786
- info_->is_strict() ? Builtins::StoreIC_Initialize_Strict
2787
- : Builtins::StoreIC_Initialize));
2788
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
2789
- }
2790
-
2791
-
2792
- void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
2793
- __ cmp(ToRegister(instr->index()), ToOperand(instr->length()));
2794
- DeoptimizeIf(above_equal, instr->environment());
2795
- }
2796
-
2797
-
2798
- void LCodeGen::DoStorePixelArrayElement(LStorePixelArrayElement* instr) {
2799
- Register external_pointer = ToRegister(instr->external_pointer());
2800
- Register key = ToRegister(instr->key());
2801
- Register value = ToRegister(instr->value());
2802
- ASSERT(ToRegister(instr->TempAt(0)).is(eax));
2803
-
2804
- __ mov(eax, value);
2805
- { // Clamp the value to [0..255].
2806
- NearLabel done;
2807
- __ test(eax, Immediate(0xFFFFFF00));
2808
- __ j(zero, &done);
2809
- __ setcc(negative, eax); // 1 if negative, 0 if positive.
2810
- __ dec_b(eax); // 0 if negative, 255 if positive.
2811
- __ bind(&done);
2812
- }
2813
- __ mov_b(Operand(external_pointer, key, times_1, 0), eax);
2814
- }
2815
-
2816
-
2817
- void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
2818
- Register value = ToRegister(instr->value());
2819
- Register elements = ToRegister(instr->object());
2820
- Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
2821
-
2822
- // Do the store.
2823
- if (instr->key()->IsConstantOperand()) {
2824
- ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
2825
- LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
2826
- int offset =
2827
- ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
2828
- __ mov(FieldOperand(elements, offset), value);
2829
- } else {
2830
- __ mov(FieldOperand(elements,
2831
- key,
2832
- times_pointer_size,
2833
- FixedArray::kHeaderSize),
2834
- value);
2835
- }
2836
-
2837
- if (instr->hydrogen()->NeedsWriteBarrier()) {
2838
- // Compute address of modified element and store it into key register.
2839
- __ lea(key,
2840
- FieldOperand(elements,
2841
- key,
2842
- times_pointer_size,
2843
- FixedArray::kHeaderSize));
2844
- __ RecordWrite(elements, key, value);
2845
- }
2846
- }
2847
-
2848
-
2849
- void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
2850
- ASSERT(ToRegister(instr->context()).is(esi));
2851
- ASSERT(ToRegister(instr->object()).is(edx));
2852
- ASSERT(ToRegister(instr->key()).is(ecx));
2853
- ASSERT(ToRegister(instr->value()).is(eax));
2854
-
2855
- Handle<Code> ic(Builtins::builtin(
2856
- info_->is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
2857
- : Builtins::KeyedStoreIC_Initialize));
2858
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
2859
- }
2860
-
2861
-
2862
- void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
2863
- class DeferredStringCharCodeAt: public LDeferredCode {
2864
- public:
2865
- DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
2866
- : LDeferredCode(codegen), instr_(instr) { }
2867
- virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
2868
- private:
2869
- LStringCharCodeAt* instr_;
2870
- };
2871
-
2872
- Register string = ToRegister(instr->string());
2873
- Register index = no_reg;
2874
- int const_index = -1;
2875
- if (instr->index()->IsConstantOperand()) {
2876
- const_index = ToInteger32(LConstantOperand::cast(instr->index()));
2877
- STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
2878
- if (!Smi::IsValid(const_index)) {
2879
- // Guaranteed to be out of bounds because of the assert above.
2880
- // So the bounds check that must dominate this instruction must
2881
- // have deoptimized already.
2882
- if (FLAG_debug_code) {
2883
- __ Abort("StringCharCodeAt: out of bounds index.");
2884
- }
2885
- // No code needs to be generated.
2886
- return;
2887
- }
2888
- } else {
2889
- index = ToRegister(instr->index());
2890
- }
2891
- Register result = ToRegister(instr->result());
2892
-
2893
- DeferredStringCharCodeAt* deferred =
2894
- new DeferredStringCharCodeAt(this, instr);
2895
-
2896
- NearLabel flat_string, ascii_string, done;
2897
-
2898
- // Fetch the instance type of the receiver into result register.
2899
- __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
2900
- __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
2901
-
2902
- // We need special handling for non-flat strings.
2903
- STATIC_ASSERT(kSeqStringTag == 0);
2904
- __ test(result, Immediate(kStringRepresentationMask));
2905
- __ j(zero, &flat_string);
2906
-
2907
- // Handle non-flat strings.
2908
- __ test(result, Immediate(kIsConsStringMask));
2909
- __ j(zero, deferred->entry());
2910
-
2911
- // ConsString.
2912
- // Check whether the right hand side is the empty string (i.e. if
2913
- // this is really a flat string in a cons string). If that is not
2914
- // the case we would rather go to the runtime system now to flatten
2915
- // the string.
2916
- __ cmp(FieldOperand(string, ConsString::kSecondOffset),
2917
- Immediate(Factory::empty_string()));
2918
- __ j(not_equal, deferred->entry());
2919
- // Get the first of the two strings and load its instance type.
2920
- __ mov(string, FieldOperand(string, ConsString::kFirstOffset));
2921
- __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
2922
- __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
2923
- // If the first cons component is also non-flat, then go to runtime.
2924
- STATIC_ASSERT(kSeqStringTag == 0);
2925
- __ test(result, Immediate(kStringRepresentationMask));
2926
- __ j(not_zero, deferred->entry());
2927
-
2928
- // Check for ASCII or two-byte string.
2929
- __ bind(&flat_string);
2930
- STATIC_ASSERT(kAsciiStringTag != 0);
2931
- __ test(result, Immediate(kStringEncodingMask));
2932
- __ j(not_zero, &ascii_string);
2933
-
2934
- // Two-byte string.
2935
- // Load the two-byte character code into the result register.
2936
- STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
2937
- if (instr->index()->IsConstantOperand()) {
2938
- __ movzx_w(result,
2939
- FieldOperand(string,
2940
- SeqTwoByteString::kHeaderSize +
2941
- (kUC16Size * const_index)));
2942
- } else {
2943
- __ movzx_w(result, FieldOperand(string,
2944
- index,
2945
- times_2,
2946
- SeqTwoByteString::kHeaderSize));
2947
- }
2948
- __ jmp(&done);
2949
-
2950
- // ASCII string.
2951
- // Load the byte into the result register.
2952
- __ bind(&ascii_string);
2953
- if (instr->index()->IsConstantOperand()) {
2954
- __ movzx_b(result, FieldOperand(string,
2955
- SeqAsciiString::kHeaderSize + const_index));
2956
- } else {
2957
- __ movzx_b(result, FieldOperand(string,
2958
- index,
2959
- times_1,
2960
- SeqAsciiString::kHeaderSize));
2961
- }
2962
- __ bind(&done);
2963
- __ bind(deferred->exit());
2964
- }
2965
-
2966
-
2967
- void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
2968
- Register string = ToRegister(instr->string());
2969
- Register result = ToRegister(instr->result());
2970
-
2971
- // TODO(3095996): Get rid of this. For now, we need to make the
2972
- // result register contain a valid pointer because it is already
2973
- // contained in the register pointer map.
2974
- __ Set(result, Immediate(0));
2975
-
2976
- __ PushSafepointRegisters();
2977
- __ push(string);
2978
- // Push the index as a smi. This is safe because of the checks in
2979
- // DoStringCharCodeAt above.
2980
- STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
2981
- if (instr->index()->IsConstantOperand()) {
2982
- int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
2983
- __ push(Immediate(Smi::FromInt(const_index)));
2984
- } else {
2985
- Register index = ToRegister(instr->index());
2986
- __ SmiTag(index);
2987
- __ push(index);
2988
- }
2989
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2990
- __ CallRuntimeSaveDoubles(Runtime::kStringCharCodeAt);
2991
- RecordSafepointWithRegisters(
2992
- instr->pointer_map(), 2, Safepoint::kNoDeoptimizationIndex);
2993
- if (FLAG_debug_code) {
2994
- __ AbortIfNotSmi(eax);
2995
- }
2996
- __ SmiUntag(eax);
2997
- __ StoreToSafepointRegisterSlot(result, eax);
2998
- __ PopSafepointRegisters();
2999
- }
3000
-
3001
-
3002
- void LCodeGen::DoStringLength(LStringLength* instr) {
3003
- Register string = ToRegister(instr->string());
3004
- Register result = ToRegister(instr->result());
3005
- __ mov(result, FieldOperand(string, String::kLengthOffset));
3006
- }
3007
-
3008
-
3009
- void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
3010
- LOperand* input = instr->InputAt(0);
3011
- ASSERT(input->IsRegister() || input->IsStackSlot());
3012
- LOperand* output = instr->result();
3013
- ASSERT(output->IsDoubleRegister());
3014
- __ cvtsi2sd(ToDoubleRegister(output), ToOperand(input));
3015
- }
3016
-
3017
-
3018
- void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
3019
- class DeferredNumberTagI: public LDeferredCode {
3020
- public:
3021
- DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
3022
- : LDeferredCode(codegen), instr_(instr) { }
3023
- virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
3024
- private:
3025
- LNumberTagI* instr_;
3026
- };
3027
-
3028
- LOperand* input = instr->InputAt(0);
3029
- ASSERT(input->IsRegister() && input->Equals(instr->result()));
3030
- Register reg = ToRegister(input);
3031
-
3032
- DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
3033
- __ SmiTag(reg);
3034
- __ j(overflow, deferred->entry());
3035
- __ bind(deferred->exit());
3036
- }
3037
-
3038
-
3039
- void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
3040
- Label slow;
3041
- Register reg = ToRegister(instr->InputAt(0));
3042
- Register tmp = reg.is(eax) ? ecx : eax;
3043
-
3044
- // Preserve the value of all registers.
3045
- __ PushSafepointRegisters();
3046
-
3047
- // There was overflow, so bits 30 and 31 of the original integer
3048
- // disagree. Try to allocate a heap number in new space and store
3049
- // the value in there. If that fails, call the runtime system.
3050
- NearLabel done;
3051
- __ SmiUntag(reg);
3052
- __ xor_(reg, 0x80000000);
3053
- __ cvtsi2sd(xmm0, Operand(reg));
3054
- if (FLAG_inline_new) {
3055
- __ AllocateHeapNumber(reg, tmp, no_reg, &slow);
3056
- __ jmp(&done);
3057
- }
3058
-
3059
- // Slow case: Call the runtime system to do the number allocation.
3060
- __ bind(&slow);
3061
-
3062
- // TODO(3095996): Put a valid pointer value in the stack slot where the result
3063
- // register is stored, as this register is in the pointer map, but contains an
3064
- // integer value.
3065
- __ StoreToSafepointRegisterSlot(reg, Immediate(0));
3066
-
3067
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3068
- __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
3069
- RecordSafepointWithRegisters(
3070
- instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
3071
- if (!reg.is(eax)) __ mov(reg, eax);
3072
-
3073
- // Done. Put the value in xmm0 into the value of the allocated heap
3074
- // number.
3075
- __ bind(&done);
3076
- __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0);
3077
- __ StoreToSafepointRegisterSlot(reg, reg);
3078
- __ PopSafepointRegisters();
3079
- }
3080
-
3081
-
3082
- void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3083
- class DeferredNumberTagD: public LDeferredCode {
3084
- public:
3085
- DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
3086
- : LDeferredCode(codegen), instr_(instr) { }
3087
- virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
3088
- private:
3089
- LNumberTagD* instr_;
3090
- };
3091
-
3092
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
3093
- Register reg = ToRegister(instr->result());
3094
- Register tmp = ToRegister(instr->TempAt(0));
3095
-
3096
- DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
3097
- if (FLAG_inline_new) {
3098
- __ AllocateHeapNumber(reg, tmp, no_reg, deferred->entry());
3099
- } else {
3100
- __ jmp(deferred->entry());
3101
- }
3102
- __ bind(deferred->exit());
3103
- __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), input_reg);
3104
- }
3105
-
3106
-
3107
- void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
3108
- // TODO(3095996): Get rid of this. For now, we need to make the
3109
- // result register contain a valid pointer because it is already
3110
- // contained in the register pointer map.
3111
- Register reg = ToRegister(instr->result());
3112
- __ Set(reg, Immediate(0));
3113
-
3114
- __ PushSafepointRegisters();
3115
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3116
- __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
3117
- RecordSafepointWithRegisters(
3118
- instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
3119
- __ StoreToSafepointRegisterSlot(reg, eax);
3120
- __ PopSafepointRegisters();
3121
- }
3122
-
3123
-
3124
- void LCodeGen::DoSmiTag(LSmiTag* instr) {
3125
- LOperand* input = instr->InputAt(0);
3126
- ASSERT(input->IsRegister() && input->Equals(instr->result()));
3127
- ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
3128
- __ SmiTag(ToRegister(input));
3129
- }
3130
-
3131
-
3132
- void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
3133
- LOperand* input = instr->InputAt(0);
3134
- ASSERT(input->IsRegister() && input->Equals(instr->result()));
3135
- if (instr->needs_check()) {
3136
- __ test(ToRegister(input), Immediate(kSmiTagMask));
3137
- DeoptimizeIf(not_zero, instr->environment());
3138
- }
3139
- __ SmiUntag(ToRegister(input));
3140
- }
3141
-
3142
-
3143
- void LCodeGen::EmitNumberUntagD(Register input_reg,
3144
- XMMRegister result_reg,
3145
- LEnvironment* env) {
3146
- NearLabel load_smi, heap_number, done;
3147
-
3148
- // Smi check.
3149
- __ test(input_reg, Immediate(kSmiTagMask));
3150
- __ j(zero, &load_smi, not_taken);
3151
-
3152
- // Heap number map check.
3153
- __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
3154
- Factory::heap_number_map());
3155
- __ j(equal, &heap_number);
3156
-
3157
- __ cmp(input_reg, Factory::undefined_value());
3158
- DeoptimizeIf(not_equal, env);
3159
-
3160
- // Convert undefined to NaN.
3161
- __ push(input_reg);
3162
- __ mov(input_reg, Factory::nan_value());
3163
- __ movdbl(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
3164
- __ pop(input_reg);
3165
- __ jmp(&done);
3166
-
3167
- // Heap number to XMM conversion.
3168
- __ bind(&heap_number);
3169
- __ movdbl(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
3170
- __ jmp(&done);
3171
-
3172
- // Smi to XMM conversion
3173
- __ bind(&load_smi);
3174
- __ SmiUntag(input_reg); // Untag smi before converting to float.
3175
- __ cvtsi2sd(result_reg, Operand(input_reg));
3176
- __ SmiTag(input_reg); // Retag smi.
3177
- __ bind(&done);
3178
- }
3179
-
3180
-
3181
- class DeferredTaggedToI: public LDeferredCode {
3182
- public:
3183
- DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
3184
- : LDeferredCode(codegen), instr_(instr) { }
3185
- virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
3186
- private:
3187
- LTaggedToI* instr_;
3188
- };
3189
-
3190
-
3191
- void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
3192
- NearLabel done, heap_number;
3193
- Register input_reg = ToRegister(instr->InputAt(0));
3194
-
3195
- // Heap number map check.
3196
- __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
3197
- Factory::heap_number_map());
3198
-
3199
- if (instr->truncating()) {
3200
- __ j(equal, &heap_number);
3201
- // Check for undefined. Undefined is converted to zero for truncating
3202
- // conversions.
3203
- __ cmp(input_reg, Factory::undefined_value());
3204
- DeoptimizeIf(not_equal, instr->environment());
3205
- __ mov(input_reg, 0);
3206
- __ jmp(&done);
3207
-
3208
- __ bind(&heap_number);
3209
- if (CpuFeatures::IsSupported(SSE3)) {
3210
- CpuFeatures::Scope scope(SSE3);
3211
- NearLabel convert;
3212
- // Use more powerful conversion when sse3 is available.
3213
- // Load x87 register with heap number.
3214
- __ fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset));
3215
- // Get exponent alone and check for too-big exponent.
3216
- __ mov(input_reg, FieldOperand(input_reg, HeapNumber::kExponentOffset));
3217
- __ and_(input_reg, HeapNumber::kExponentMask);
3218
- const uint32_t kTooBigExponent =
3219
- (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
3220
- __ cmp(Operand(input_reg), Immediate(kTooBigExponent));
3221
- __ j(less, &convert);
3222
- // Pop FPU stack before deoptimizing.
3223
- __ ffree(0);
3224
- __ fincstp();
3225
- DeoptimizeIf(no_condition, instr->environment());
3226
-
3227
- // Reserve space for 64 bit answer.
3228
- __ bind(&convert);
3229
- __ sub(Operand(esp), Immediate(kDoubleSize));
3230
- // Do conversion, which cannot fail because we checked the exponent.
3231
- __ fisttp_d(Operand(esp, 0));
3232
- __ mov(input_reg, Operand(esp, 0)); // Low word of answer is the result.
3233
- __ add(Operand(esp), Immediate(kDoubleSize));
3234
- } else {
3235
- NearLabel deopt;
3236
- XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
3237
- __ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3238
- __ cvttsd2si(input_reg, Operand(xmm0));
3239
- __ cmp(input_reg, 0x80000000u);
3240
- __ j(not_equal, &done);
3241
- // Check if the input was 0x8000000 (kMinInt).
3242
- // If no, then we got an overflow and we deoptimize.
3243
- ExternalReference min_int = ExternalReference::address_of_min_int();
3244
- __ movdbl(xmm_temp, Operand::StaticVariable(min_int));
3245
- __ ucomisd(xmm_temp, xmm0);
3246
- DeoptimizeIf(not_equal, instr->environment());
3247
- DeoptimizeIf(parity_even, instr->environment()); // NaN.
3248
- }
3249
- } else {
3250
- // Deoptimize if we don't have a heap number.
3251
- DeoptimizeIf(not_equal, instr->environment());
3252
-
3253
- XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
3254
- __ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3255
- __ cvttsd2si(input_reg, Operand(xmm0));
3256
- __ cvtsi2sd(xmm_temp, Operand(input_reg));
3257
- __ ucomisd(xmm0, xmm_temp);
3258
- DeoptimizeIf(not_equal, instr->environment());
3259
- DeoptimizeIf(parity_even, instr->environment()); // NaN.
3260
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3261
- __ test(input_reg, Operand(input_reg));
3262
- __ j(not_zero, &done);
3263
- __ movmskpd(input_reg, xmm0);
3264
- __ and_(input_reg, 1);
3265
- DeoptimizeIf(not_zero, instr->environment());
3266
- }
3267
- }
3268
- __ bind(&done);
3269
- }
3270
-
3271
-
3272
- void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
3273
- LOperand* input = instr->InputAt(0);
3274
- ASSERT(input->IsRegister());
3275
- ASSERT(input->Equals(instr->result()));
3276
-
3277
- Register input_reg = ToRegister(input);
3278
-
3279
- DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
3280
-
3281
- // Smi check.
3282
- __ test(input_reg, Immediate(kSmiTagMask));
3283
- __ j(not_zero, deferred->entry());
3284
-
3285
- // Smi to int32 conversion
3286
- __ SmiUntag(input_reg); // Untag smi.
3287
-
3288
- __ bind(deferred->exit());
3289
- }
3290
-
3291
-
3292
- void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
3293
- LOperand* input = instr->InputAt(0);
3294
- ASSERT(input->IsRegister());
3295
- LOperand* result = instr->result();
3296
- ASSERT(result->IsDoubleRegister());
3297
-
3298
- Register input_reg = ToRegister(input);
3299
- XMMRegister result_reg = ToDoubleRegister(result);
3300
-
3301
- EmitNumberUntagD(input_reg, result_reg, instr->environment());
3302
- }
3303
-
3304
-
3305
- void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
3306
- LOperand* input = instr->InputAt(0);
3307
- ASSERT(input->IsDoubleRegister());
3308
- LOperand* result = instr->result();
3309
- ASSERT(result->IsRegister());
3310
-
3311
- XMMRegister input_reg = ToDoubleRegister(input);
3312
- Register result_reg = ToRegister(result);
3313
-
3314
- if (instr->truncating()) {
3315
- // Performs a truncating conversion of a floating point number as used by
3316
- // the JS bitwise operations.
3317
- __ cvttsd2si(result_reg, Operand(input_reg));
3318
- __ cmp(result_reg, 0x80000000u);
3319
- if (CpuFeatures::IsSupported(SSE3)) {
3320
- // This will deoptimize if the exponent of the input in out of range.
3321
- CpuFeatures::Scope scope(SSE3);
3322
- NearLabel convert, done;
3323
- __ j(not_equal, &done);
3324
- __ sub(Operand(esp), Immediate(kDoubleSize));
3325
- __ movdbl(Operand(esp, 0), input_reg);
3326
- // Get exponent alone and check for too-big exponent.
3327
- __ mov(result_reg, Operand(esp, sizeof(int32_t)));
3328
- __ and_(result_reg, HeapNumber::kExponentMask);
3329
- const uint32_t kTooBigExponent =
3330
- (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
3331
- __ cmp(Operand(result_reg), Immediate(kTooBigExponent));
3332
- __ j(less, &convert);
3333
- __ add(Operand(esp), Immediate(kDoubleSize));
3334
- DeoptimizeIf(no_condition, instr->environment());
3335
- __ bind(&convert);
3336
- // Do conversion, which cannot fail because we checked the exponent.
3337
- __ fld_d(Operand(esp, 0));
3338
- __ fisttp_d(Operand(esp, 0));
3339
- __ mov(result_reg, Operand(esp, 0)); // Low word of answer is the result.
3340
- __ add(Operand(esp), Immediate(kDoubleSize));
3341
- __ bind(&done);
3342
- } else {
3343
- NearLabel done;
3344
- Register temp_reg = ToRegister(instr->TempAt(0));
3345
- XMMRegister xmm_scratch = xmm0;
3346
-
3347
- // If cvttsd2si succeeded, we're done. Otherwise, we attempt
3348
- // manual conversion.
3349
- __ j(not_equal, &done);
3350
-
3351
- // Get high 32 bits of the input in result_reg and temp_reg.
3352
- __ pshufd(xmm_scratch, input_reg, 1);
3353
- __ movd(Operand(temp_reg), xmm_scratch);
3354
- __ mov(result_reg, temp_reg);
3355
-
3356
- // Prepare negation mask in temp_reg.
3357
- __ sar(temp_reg, kBitsPerInt - 1);
3358
-
3359
- // Extract the exponent from result_reg and subtract adjusted
3360
- // bias from it. The adjustment is selected in a way such that
3361
- // when the difference is zero, the answer is in the low 32 bits
3362
- // of the input, otherwise a shift has to be performed.
3363
- __ shr(result_reg, HeapNumber::kExponentShift);
3364
- __ and_(result_reg,
3365
- HeapNumber::kExponentMask >> HeapNumber::kExponentShift);
3366
- __ sub(Operand(result_reg),
3367
- Immediate(HeapNumber::kExponentBias +
3368
- HeapNumber::kExponentBits +
3369
- HeapNumber::kMantissaBits));
3370
- // Don't handle big (> kMantissaBits + kExponentBits == 63) or
3371
- // special exponents.
3372
- DeoptimizeIf(greater, instr->environment());
3373
-
3374
- // Zero out the sign and the exponent in the input (by shifting
3375
- // it to the left) and restore the implicit mantissa bit,
3376
- // i.e. convert the input to unsigned int64 shifted left by
3377
- // kExponentBits.
3378
- ExternalReference minus_zero = ExternalReference::address_of_minus_zero();
3379
- // Minus zero has the most significant bit set and the other
3380
- // bits cleared.
3381
- __ movdbl(xmm_scratch, Operand::StaticVariable(minus_zero));
3382
- __ psllq(input_reg, HeapNumber::kExponentBits);
3383
- __ por(input_reg, xmm_scratch);
3384
-
3385
- // Get the amount to shift the input right in xmm_scratch.
3386
- __ neg(result_reg);
3387
- __ movd(xmm_scratch, Operand(result_reg));
3388
-
3389
- // Shift the input right and extract low 32 bits.
3390
- __ psrlq(input_reg, xmm_scratch);
3391
- __ movd(Operand(result_reg), input_reg);
3392
-
3393
- // Use the prepared mask in temp_reg to negate the result if necessary.
3394
- __ xor_(result_reg, Operand(temp_reg));
3395
- __ sub(result_reg, Operand(temp_reg));
3396
- __ bind(&done);
3397
- }
3398
- } else {
3399
- NearLabel done;
3400
- __ cvttsd2si(result_reg, Operand(input_reg));
3401
- __ cvtsi2sd(xmm0, Operand(result_reg));
3402
- __ ucomisd(xmm0, input_reg);
3403
- DeoptimizeIf(not_equal, instr->environment());
3404
- DeoptimizeIf(parity_even, instr->environment()); // NaN.
3405
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3406
- // The integer converted back is equal to the original. We
3407
- // only have to test if we got -0 as an input.
3408
- __ test(result_reg, Operand(result_reg));
3409
- __ j(not_zero, &done);
3410
- __ movmskpd(result_reg, input_reg);
3411
- // Bit 0 contains the sign of the double in input_reg.
3412
- // If input was positive, we are ok and return 0, otherwise
3413
- // deoptimize.
3414
- __ and_(result_reg, 1);
3415
- DeoptimizeIf(not_zero, instr->environment());
3416
- }
3417
- __ bind(&done);
3418
- }
3419
- }
3420
-
3421
-
3422
- void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
3423
- LOperand* input = instr->InputAt(0);
3424
- ASSERT(input->IsRegister());
3425
- __ test(ToRegister(input), Immediate(kSmiTagMask));
3426
- DeoptimizeIf(instr->condition(), instr->environment());
3427
- }
3428
-
3429
-
3430
- void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
3431
- Register input = ToRegister(instr->InputAt(0));
3432
- Register temp = ToRegister(instr->TempAt(0));
3433
- InstanceType first = instr->hydrogen()->first();
3434
- InstanceType last = instr->hydrogen()->last();
3435
-
3436
- __ mov(temp, FieldOperand(input, HeapObject::kMapOffset));
3437
-
3438
- // If there is only one type in the interval check for equality.
3439
- if (first == last) {
3440
- __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
3441
- static_cast<int8_t>(first));
3442
- DeoptimizeIf(not_equal, instr->environment());
3443
- } else if (first == FIRST_STRING_TYPE && last == LAST_STRING_TYPE) {
3444
- // String has a dedicated bit in instance type.
3445
- __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), kIsNotStringMask);
3446
- DeoptimizeIf(not_zero, instr->environment());
3447
- } else {
3448
- __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
3449
- static_cast<int8_t>(first));
3450
- DeoptimizeIf(below, instr->environment());
3451
- // Omit check for the last type.
3452
- if (last != LAST_TYPE) {
3453
- __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
3454
- static_cast<int8_t>(last));
3455
- DeoptimizeIf(above, instr->environment());
3456
- }
3457
- }
3458
- }
3459
-
3460
-
3461
- void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
3462
- ASSERT(instr->InputAt(0)->IsRegister());
3463
- Register reg = ToRegister(instr->InputAt(0));
3464
- __ cmp(reg, instr->hydrogen()->target());
3465
- DeoptimizeIf(not_equal, instr->environment());
3466
- }
3467
-
3468
-
3469
- void LCodeGen::DoCheckMap(LCheckMap* instr) {
3470
- LOperand* input = instr->InputAt(0);
3471
- ASSERT(input->IsRegister());
3472
- Register reg = ToRegister(input);
3473
- __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
3474
- instr->hydrogen()->map());
3475
- DeoptimizeIf(not_equal, instr->environment());
3476
- }
3477
-
3478
-
3479
- void LCodeGen::LoadHeapObject(Register result, Handle<HeapObject> object) {
3480
- if (Heap::InNewSpace(*object)) {
3481
- Handle<JSGlobalPropertyCell> cell =
3482
- Factory::NewJSGlobalPropertyCell(object);
3483
- __ mov(result, Operand::Cell(cell));
3484
- } else {
3485
- __ mov(result, object);
3486
- }
3487
- }
3488
-
3489
-
3490
- void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
3491
- Register reg = ToRegister(instr->TempAt(0));
3492
-
3493
- Handle<JSObject> holder = instr->holder();
3494
- Handle<JSObject> current_prototype = instr->prototype();
3495
-
3496
- // Load prototype object.
3497
- LoadHeapObject(reg, current_prototype);
3498
-
3499
- // Check prototype maps up to the holder.
3500
- while (!current_prototype.is_identical_to(holder)) {
3501
- __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
3502
- Handle<Map>(current_prototype->map()));
3503
- DeoptimizeIf(not_equal, instr->environment());
3504
- current_prototype =
3505
- Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
3506
- // Load next prototype object.
3507
- LoadHeapObject(reg, current_prototype);
3508
- }
3509
-
3510
- // Check the holder map.
3511
- __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
3512
- Handle<Map>(current_prototype->map()));
3513
- DeoptimizeIf(not_equal, instr->environment());
3514
- }
3515
-
3516
-
3517
- void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
3518
- // Setup the parameters to the stub/runtime call.
3519
- __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3520
- __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
3521
- __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3522
- __ push(Immediate(instr->hydrogen()->constant_elements()));
3523
-
3524
- // Pick the right runtime function or stub to call.
3525
- int length = instr->hydrogen()->length();
3526
- if (instr->hydrogen()->IsCopyOnWrite()) {
3527
- ASSERT(instr->hydrogen()->depth() == 1);
3528
- FastCloneShallowArrayStub::Mode mode =
3529
- FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
3530
- FastCloneShallowArrayStub stub(mode, length);
3531
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
3532
- } else if (instr->hydrogen()->depth() > 1) {
3533
- CallRuntime(Runtime::kCreateArrayLiteral, 3, instr, false);
3534
- } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
3535
- CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr, false);
3536
- } else {
3537
- FastCloneShallowArrayStub::Mode mode =
3538
- FastCloneShallowArrayStub::CLONE_ELEMENTS;
3539
- FastCloneShallowArrayStub stub(mode, length);
3540
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
3541
- }
3542
- }
3543
-
3544
-
3545
- void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
3546
- ASSERT(ToRegister(instr->context()).is(esi));
3547
- // Setup the parameters to the stub/runtime call.
3548
- __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3549
- __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
3550
- __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3551
- __ push(Immediate(instr->hydrogen()->constant_properties()));
3552
- __ push(Immediate(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0)));
3553
-
3554
- // Pick the right runtime function to call.
3555
- if (instr->hydrogen()->depth() > 1) {
3556
- CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
3557
- } else {
3558
- CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
3559
- }
3560
- }
3561
-
3562
-
3563
- void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
3564
- NearLabel materialized;
3565
- // Registers will be used as follows:
3566
- // edi = JS function.
3567
- // ecx = literals array.
3568
- // ebx = regexp literal.
3569
- // eax = regexp literal clone.
3570
- __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3571
- __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
3572
- int literal_offset = FixedArray::kHeaderSize +
3573
- instr->hydrogen()->literal_index() * kPointerSize;
3574
- __ mov(ebx, FieldOperand(ecx, literal_offset));
3575
- __ cmp(ebx, Factory::undefined_value());
3576
- __ j(not_equal, &materialized);
3577
-
3578
- // Create regexp literal using runtime function
3579
- // Result will be in eax.
3580
- __ push(ecx);
3581
- __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3582
- __ push(Immediate(instr->hydrogen()->pattern()));
3583
- __ push(Immediate(instr->hydrogen()->flags()));
3584
- CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr, false);
3585
- __ mov(ebx, eax);
3586
-
3587
- __ bind(&materialized);
3588
- int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
3589
- Label allocated, runtime_allocate;
3590
- __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
3591
- __ jmp(&allocated);
3592
-
3593
- __ bind(&runtime_allocate);
3594
- __ push(ebx);
3595
- __ push(Immediate(Smi::FromInt(size)));
3596
- CallRuntime(Runtime::kAllocateInNewSpace, 1, instr, false);
3597
- __ pop(ebx);
3598
-
3599
- __ bind(&allocated);
3600
- // Copy the content into the newly allocated memory.
3601
- // (Unroll copy loop once for better throughput).
3602
- for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
3603
- __ mov(edx, FieldOperand(ebx, i));
3604
- __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
3605
- __ mov(FieldOperand(eax, i), edx);
3606
- __ mov(FieldOperand(eax, i + kPointerSize), ecx);
3607
- }
3608
- if ((size % (2 * kPointerSize)) != 0) {
3609
- __ mov(edx, FieldOperand(ebx, size - kPointerSize));
3610
- __ mov(FieldOperand(eax, size - kPointerSize), edx);
3611
- }
3612
- }
3613
-
3614
-
3615
- void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
3616
- // Use the fast case closure allocation code that allocates in new
3617
- // space for nested functions that don't need literals cloning.
3618
- Handle<SharedFunctionInfo> shared_info = instr->shared_info();
3619
- bool pretenure = instr->hydrogen()->pretenure();
3620
- if (shared_info->num_literals() == 0 && !pretenure) {
3621
- FastNewClosureStub stub;
3622
- __ push(Immediate(shared_info));
3623
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
3624
- } else {
3625
- __ push(Operand(ebp, StandardFrameConstants::kContextOffset));
3626
- __ push(Immediate(shared_info));
3627
- __ push(Immediate(pretenure
3628
- ? Factory::true_value()
3629
- : Factory::false_value()));
3630
- CallRuntime(Runtime::kNewClosure, 3, instr, false);
3631
- }
3632
- }
3633
-
3634
-
3635
- void LCodeGen::DoTypeof(LTypeof* instr) {
3636
- LOperand* input = instr->InputAt(0);
3637
- if (input->IsConstantOperand()) {
3638
- __ push(ToImmediate(input));
3639
- } else {
3640
- __ push(ToOperand(input));
3641
- }
3642
- CallRuntime(Runtime::kTypeof, 1, instr, false);
3643
- }
3644
-
3645
-
3646
- void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
3647
- Register input = ToRegister(instr->InputAt(0));
3648
- Register result = ToRegister(instr->result());
3649
- Label true_label;
3650
- Label false_label;
3651
- NearLabel done;
3652
-
3653
- Condition final_branch_condition = EmitTypeofIs(&true_label,
3654
- &false_label,
3655
- input,
3656
- instr->type_literal());
3657
- __ j(final_branch_condition, &true_label);
3658
- __ bind(&false_label);
3659
- __ mov(result, Factory::false_value());
3660
- __ jmp(&done);
3661
-
3662
- __ bind(&true_label);
3663
- __ mov(result, Factory::true_value());
3664
-
3665
- __ bind(&done);
3666
- }
3667
-
3668
-
3669
- void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
3670
- Register input = ToRegister(instr->InputAt(0));
3671
- int true_block = chunk_->LookupDestination(instr->true_block_id());
3672
- int false_block = chunk_->LookupDestination(instr->false_block_id());
3673
- Label* true_label = chunk_->GetAssemblyLabel(true_block);
3674
- Label* false_label = chunk_->GetAssemblyLabel(false_block);
3675
-
3676
- Condition final_branch_condition = EmitTypeofIs(true_label,
3677
- false_label,
3678
- input,
3679
- instr->type_literal());
3680
-
3681
- EmitBranch(true_block, false_block, final_branch_condition);
3682
- }
3683
-
3684
-
3685
- Condition LCodeGen::EmitTypeofIs(Label* true_label,
3686
- Label* false_label,
3687
- Register input,
3688
- Handle<String> type_name) {
3689
- Condition final_branch_condition = no_condition;
3690
- if (type_name->Equals(Heap::number_symbol())) {
3691
- __ test(input, Immediate(kSmiTagMask));
3692
- __ j(zero, true_label);
3693
- __ cmp(FieldOperand(input, HeapObject::kMapOffset),
3694
- Factory::heap_number_map());
3695
- final_branch_condition = equal;
3696
-
3697
- } else if (type_name->Equals(Heap::string_symbol())) {
3698
- __ test(input, Immediate(kSmiTagMask));
3699
- __ j(zero, false_label);
3700
- __ mov(input, FieldOperand(input, HeapObject::kMapOffset));
3701
- __ test_b(FieldOperand(input, Map::kBitFieldOffset),
3702
- 1 << Map::kIsUndetectable);
3703
- __ j(not_zero, false_label);
3704
- __ CmpInstanceType(input, FIRST_NONSTRING_TYPE);
3705
- final_branch_condition = below;
3706
-
3707
- } else if (type_name->Equals(Heap::boolean_symbol())) {
3708
- __ cmp(input, Factory::true_value());
3709
- __ j(equal, true_label);
3710
- __ cmp(input, Factory::false_value());
3711
- final_branch_condition = equal;
3712
-
3713
- } else if (type_name->Equals(Heap::undefined_symbol())) {
3714
- __ cmp(input, Factory::undefined_value());
3715
- __ j(equal, true_label);
3716
- __ test(input, Immediate(kSmiTagMask));
3717
- __ j(zero, false_label);
3718
- // Check for undetectable objects => true.
3719
- __ mov(input, FieldOperand(input, HeapObject::kMapOffset));
3720
- __ test_b(FieldOperand(input, Map::kBitFieldOffset),
3721
- 1 << Map::kIsUndetectable);
3722
- final_branch_condition = not_zero;
3723
-
3724
- } else if (type_name->Equals(Heap::function_symbol())) {
3725
- __ test(input, Immediate(kSmiTagMask));
3726
- __ j(zero, false_label);
3727
- __ CmpObjectType(input, JS_FUNCTION_TYPE, input);
3728
- __ j(equal, true_label);
3729
- // Regular expressions => 'function' (they are callable).
3730
- __ CmpInstanceType(input, JS_REGEXP_TYPE);
3731
- final_branch_condition = equal;
3732
-
3733
- } else if (type_name->Equals(Heap::object_symbol())) {
3734
- __ test(input, Immediate(kSmiTagMask));
3735
- __ j(zero, false_label);
3736
- __ cmp(input, Factory::null_value());
3737
- __ j(equal, true_label);
3738
- // Regular expressions => 'function', not 'object'.
3739
- __ CmpObjectType(input, JS_REGEXP_TYPE, input);
3740
- __ j(equal, false_label);
3741
- // Check for undetectable objects => false.
3742
- __ test_b(FieldOperand(input, Map::kBitFieldOffset),
3743
- 1 << Map::kIsUndetectable);
3744
- __ j(not_zero, false_label);
3745
- // Check for JS objects => true.
3746
- __ CmpInstanceType(input, FIRST_JS_OBJECT_TYPE);
3747
- __ j(below, false_label);
3748
- __ CmpInstanceType(input, LAST_JS_OBJECT_TYPE);
3749
- final_branch_condition = below_equal;
3750
-
3751
- } else {
3752
- final_branch_condition = not_equal;
3753
- __ jmp(false_label);
3754
- // A dead branch instruction will be generated after this point.
3755
- }
3756
-
3757
- return final_branch_condition;
3758
- }
3759
-
3760
-
3761
- void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) {
3762
- Register result = ToRegister(instr->result());
3763
- NearLabel true_label;
3764
- NearLabel false_label;
3765
- NearLabel done;
3766
-
3767
- EmitIsConstructCall(result);
3768
- __ j(equal, &true_label);
3769
-
3770
- __ mov(result, Factory::false_value());
3771
- __ jmp(&done);
3772
-
3773
- __ bind(&true_label);
3774
- __ mov(result, Factory::true_value());
3775
-
3776
- __ bind(&done);
3777
- }
3778
-
3779
-
3780
- void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
3781
- Register temp = ToRegister(instr->TempAt(0));
3782
- int true_block = chunk_->LookupDestination(instr->true_block_id());
3783
- int false_block = chunk_->LookupDestination(instr->false_block_id());
3784
-
3785
- EmitIsConstructCall(temp);
3786
- EmitBranch(true_block, false_block, equal);
3787
- }
3788
-
3789
-
3790
- void LCodeGen::EmitIsConstructCall(Register temp) {
3791
- // Get the frame pointer for the calling frame.
3792
- __ mov(temp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3793
-
3794
- // Skip the arguments adaptor frame if it exists.
3795
- NearLabel check_frame_marker;
3796
- __ cmp(Operand(temp, StandardFrameConstants::kContextOffset),
3797
- Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3798
- __ j(not_equal, &check_frame_marker);
3799
- __ mov(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset));
3800
-
3801
- // Check the marker in the calling frame.
3802
- __ bind(&check_frame_marker);
3803
- __ cmp(Operand(temp, StandardFrameConstants::kMarkerOffset),
3804
- Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
3805
- }
3806
-
3807
-
3808
- void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
3809
- // No code for lazy bailout instruction. Used to capture environment after a
3810
- // call for populating the safepoint data with deoptimization data.
3811
- }
3812
-
3813
-
3814
- void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
3815
- DeoptimizeIf(no_condition, instr->environment());
3816
- }
3817
-
3818
-
3819
- void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
3820
- LOperand* obj = instr->object();
3821
- LOperand* key = instr->key();
3822
- __ push(ToOperand(obj));
3823
- if (key->IsConstantOperand()) {
3824
- __ push(ToImmediate(key));
3825
- } else {
3826
- __ push(ToOperand(key));
3827
- }
3828
- ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
3829
- LPointerMap* pointers = instr->pointer_map();
3830
- LEnvironment* env = instr->deoptimization_environment();
3831
- RecordPosition(pointers->position());
3832
- RegisterEnvironmentForDeoptimization(env);
3833
- // Create safepoint generator that will also ensure enough space in the
3834
- // reloc info for patching in deoptimization (since this is invoking a
3835
- // builtin)
3836
- SafepointGenerator safepoint_generator(this,
3837
- pointers,
3838
- env->deoptimization_index(),
3839
- true);
3840
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3841
- __ push(Immediate(Smi::FromInt(strict_mode_flag())));
3842
- __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator);
3843
- }
3844
-
3845
-
3846
- void LCodeGen::DoStackCheck(LStackCheck* instr) {
3847
- // Perform stack overflow check.
3848
- NearLabel done;
3849
- ExternalReference stack_limit = ExternalReference::address_of_stack_limit();
3850
- __ cmp(esp, Operand::StaticVariable(stack_limit));
3851
- __ j(above_equal, &done);
3852
-
3853
- StackCheckStub stub;
3854
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
3855
- __ bind(&done);
3856
- }
3857
-
3858
-
3859
- void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
3860
- // This is a pseudo-instruction that ensures that the environment here is
3861
- // properly registered for deoptimization and records the assembler's PC
3862
- // offset.
3863
- LEnvironment* environment = instr->environment();
3864
- environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
3865
- instr->SpilledDoubleRegisterArray());
3866
-
3867
- // If the environment were already registered, we would have no way of
3868
- // backpatching it with the spill slot operands.
3869
- ASSERT(!environment->HasBeenRegistered());
3870
- RegisterEnvironmentForDeoptimization(environment);
3871
- ASSERT(osr_pc_offset_ == -1);
3872
- osr_pc_offset_ = masm()->pc_offset();
3873
- }
3874
-
3875
-
3876
- #undef __
3877
-
3878
- } } // namespace v8::internal
3879
-
3880
- #endif // V8_TARGET_ARCH_IA32